store: rename Store to Backend and StoreWrapper to Store

For what's currently called `Store` in the code, I have been using
"backend" in plain text. That probably means that `Backend` is a good
name for it.
This commit is contained in:
Martin von Zweigbergk 2021-09-11 23:52:38 -07:00
parent 1f2ce49e89
commit ce5e95fa80
48 changed files with 932 additions and 958 deletions

318
lib/src/backend.rs Normal file
View file

@ -0,0 +1,318 @@
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use std::fmt::{Debug, Error, Formatter};
use std::io::Read;
use std::result::Result;
use std::vec::Vec;
use thiserror::Error;
use crate::repo_path::{RepoPath, RepoPathComponent};
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct CommitId(pub Vec<u8>);
impl Debug for CommitId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("CommitId").field(&self.hex()).finish()
}
}
impl CommitId {
pub fn from_hex(hex: &str) -> Self {
CommitId(hex::decode(hex).unwrap())
}
pub fn hex(&self) -> String {
hex::encode(&self.0)
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct ChangeId(pub Vec<u8>);
impl Debug for ChangeId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("ChangeId").field(&self.hex()).finish()
}
}
impl ChangeId {
pub fn from_hex(hex: &str) -> Self {
ChangeId(hex::decode(hex).unwrap())
}
pub fn hex(&self) -> String {
hex::encode(&self.0)
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct TreeId(pub Vec<u8>);
impl Debug for TreeId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("TreeId").field(&self.hex()).finish()
}
}
impl TreeId {
pub fn hex(&self) -> String {
hex::encode(&self.0)
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct FileId(pub Vec<u8>);
impl Debug for FileId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("FileId").field(&self.hex()).finish()
}
}
impl FileId {
pub fn hex(&self) -> String {
hex::encode(&self.0)
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct SymlinkId(pub Vec<u8>);
impl Debug for SymlinkId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("SymlinkId").field(&self.hex()).finish()
}
}
impl SymlinkId {
pub fn hex(&self) -> String {
hex::encode(&self.0)
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct ConflictId(pub Vec<u8>);
impl Debug for ConflictId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("ConflictId").field(&self.hex()).finish()
}
}
impl ConflictId {
pub fn hex(&self) -> String {
hex::encode(&self.0)
}
}
pub enum Phase {
Public,
Draft,
}
#[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord)]
pub struct MillisSinceEpoch(pub u64);
#[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord)]
pub struct Timestamp {
pub timestamp: MillisSinceEpoch,
// time zone offset in minutes
pub tz_offset: i32,
}
impl Timestamp {
pub fn now() -> Self {
let now = chrono::offset::Local::now();
Self {
timestamp: MillisSinceEpoch(now.timestamp_millis() as u64),
tz_offset: now.offset().local_minus_utc() / 60,
}
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Signature {
pub name: String,
pub email: String,
pub timestamp: Timestamp,
}
#[derive(Debug, Clone)]
pub struct Commit {
pub parents: Vec<CommitId>,
pub predecessors: Vec<CommitId>,
pub root_tree: TreeId,
pub change_id: ChangeId,
pub description: String,
pub author: Signature,
pub committer: Signature,
pub is_open: bool,
pub is_pruned: bool,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ConflictPart {
// TODO: Store e.g. CommitId here too? Labels (theirs/ours/base)? Would those still be
// useful e.g. after rebasing this conflict?
pub value: TreeValue,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Conflict {
// A conflict is represented by a list of positive and negative states that need to be applied.
// In a simple 3-way merge of B and C with merge base A, the conflict will be { add: [B, C],
// remove: [A] }. Also note that a conflict of the form { add: [A], remove: [] } is the
// same as non-conflict A.
pub removes: Vec<ConflictPart>,
pub adds: Vec<ConflictPart>,
}
impl Default for Conflict {
fn default() -> Self {
Conflict {
removes: Default::default(),
adds: Default::default(),
}
}
}
#[derive(Debug, Error, PartialEq, Eq)]
pub enum BackendError {
#[error("Object not found")]
NotFound,
#[error("Error: {0}")]
Other(String),
}
pub type BackendResult<T> = Result<T, BackendError>;
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum TreeValue {
Normal { id: FileId, executable: bool },
Symlink(SymlinkId),
Tree(TreeId),
GitSubmodule(CommitId),
Conflict(ConflictId),
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct TreeEntry<'a> {
name: &'a RepoPathComponent,
value: &'a TreeValue,
}
impl<'a> TreeEntry<'a> {
pub fn new(name: &'a RepoPathComponent, value: &'a TreeValue) -> Self {
TreeEntry { name, value }
}
pub fn name(&self) -> &'a RepoPathComponent {
self.name
}
pub fn value(&self) -> &'a TreeValue {
self.value
}
}
pub struct TreeEntriesNonRecursiveIter<'a> {
iter: std::collections::btree_map::Iter<'a, RepoPathComponent, TreeValue>,
}
impl<'a> Iterator for TreeEntriesNonRecursiveIter<'a> {
type Item = TreeEntry<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(name, value)| TreeEntry { name, value })
}
}
#[derive(Debug, Clone)]
pub struct Tree {
entries: BTreeMap<RepoPathComponent, TreeValue>,
}
impl Default for Tree {
fn default() -> Self {
Self {
entries: BTreeMap::new(),
}
}
}
impl Tree {
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
pub fn entries(&self) -> TreeEntriesNonRecursiveIter {
TreeEntriesNonRecursiveIter {
iter: self.entries.iter(),
}
}
pub fn set(&mut self, name: RepoPathComponent, value: TreeValue) {
self.entries.insert(name, value);
}
pub fn remove(&mut self, name: &RepoPathComponent) {
self.entries.remove(name);
}
pub fn entry(&self, name: &RepoPathComponent) -> Option<TreeEntry> {
self.entries
.get_key_value(name)
.map(|(name, value)| TreeEntry { name, value })
}
pub fn value(&self, name: &RepoPathComponent) -> Option<&TreeValue> {
self.entries.get(name)
}
}
pub trait Backend: Send + Sync + Debug {
fn hash_length(&self) -> usize;
fn git_repo(&self) -> Option<git2::Repository>;
fn read_file(&self, path: &RepoPath, id: &FileId) -> BackendResult<Box<dyn Read>>;
fn write_file(&self, path: &RepoPath, contents: &mut dyn Read) -> BackendResult<FileId>;
fn read_symlink(&self, path: &RepoPath, id: &SymlinkId) -> BackendResult<String>;
fn write_symlink(&self, path: &RepoPath, target: &str) -> BackendResult<SymlinkId>;
fn empty_tree_id(&self) -> &TreeId;
fn read_tree(&self, path: &RepoPath, id: &TreeId) -> BackendResult<Tree>;
fn write_tree(&self, path: &RepoPath, contents: &Tree) -> BackendResult<TreeId>;
fn read_commit(&self, id: &CommitId) -> BackendResult<Commit>;
fn write_commit(&self, contents: &Commit) -> BackendResult<CommitId>;
// TODO: Pass in the paths here too even though they are unused, just like for
// files and trees?
fn read_conflict(&self, id: &ConflictId) -> BackendResult<Conflict>;
fn write_conflict(&self, contents: &Conflict) -> BackendResult<ConflictId>;
}

View file

@ -17,17 +17,17 @@ use std::fmt::{Debug, Error, Formatter};
use std::hash::{Hash, Hasher};
use std::sync::Arc;
use crate::backend;
use crate::backend::{ChangeId, CommitId, Signature};
use crate::repo_path::RepoPath;
use crate::store;
use crate::store::{ChangeId, CommitId, Signature};
use crate::store_wrapper::StoreWrapper;
use crate::store::Store;
use crate::tree::Tree;
#[derive(Clone)]
pub struct Commit {
store: Arc<StoreWrapper>,
store: Arc<Store>,
id: CommitId,
data: Arc<store::Commit>,
data: Arc<backend::Commit>,
}
impl Debug for Commit {
@ -63,11 +63,11 @@ impl Hash for Commit {
}
impl Commit {
pub fn new(store: Arc<StoreWrapper>, id: CommitId, data: Arc<store::Commit>) -> Self {
pub fn new(store: Arc<Store>, id: CommitId, data: Arc<backend::Commit>) -> Self {
Commit { store, id, data }
}
pub fn store(&self) -> &Arc<StoreWrapper> {
pub fn store(&self) -> &Arc<Store> {
&self.store
}
@ -116,7 +116,7 @@ impl Commit {
&self.data.change_id
}
pub fn store_commit(&self) -> &store::Commit {
pub fn store_commit(&self) -> &backend::Commit {
&self.data
}

View file

@ -16,17 +16,17 @@ use std::sync::Arc;
use uuid::Uuid;
use crate::backend;
use crate::backend::{ChangeId, CommitId, Signature, Timestamp, TreeId};
use crate::commit::Commit;
use crate::repo::MutableRepo;
use crate::settings::UserSettings;
use crate::store;
use crate::store::{ChangeId, CommitId, Signature, Timestamp, TreeId};
use crate::store_wrapper::StoreWrapper;
use crate::store::Store;
#[derive(Debug)]
pub struct CommitBuilder {
store: Arc<StoreWrapper>,
commit: store::Commit,
store: Arc<Store>,
commit: backend::Commit,
}
pub fn new_change_id() -> ChangeId {
@ -45,11 +45,11 @@ pub fn signature(settings: &UserSettings) -> Signature {
impl CommitBuilder {
pub fn for_new_commit(
settings: &UserSettings,
store: &Arc<StoreWrapper>,
store: &Arc<Store>,
tree_id: TreeId,
) -> CommitBuilder {
let signature = signature(settings);
let commit = store::Commit {
let commit = backend::Commit {
parents: vec![],
predecessors: vec![],
root_tree: tree_id,
@ -68,7 +68,7 @@ impl CommitBuilder {
pub fn for_rewrite_from(
settings: &UserSettings,
store: &Arc<StoreWrapper>,
store: &Arc<Store>,
predecessor: &Commit,
) -> CommitBuilder {
let mut commit = predecessor.store_commit().clone();
@ -82,12 +82,12 @@ impl CommitBuilder {
pub fn for_open_commit(
settings: &UserSettings,
store: &Arc<StoreWrapper>,
store: &Arc<Store>,
parent_id: CommitId,
tree_id: TreeId,
) -> CommitBuilder {
let signature = signature(settings);
let commit = store::Commit {
let commit = backend::Commit {
parents: vec![parent_id],
predecessors: vec![],
root_tree: tree_id,

View file

@ -17,12 +17,12 @@ use std::io::{Cursor, Write};
use itertools::Itertools;
use crate::backend::{Conflict, ConflictPart, TreeValue};
use crate::diff::{find_line_ranges, Diff, DiffHunk};
use crate::files;
use crate::files::{MergeHunk, MergeResult};
use crate::repo_path::RepoPath;
use crate::store::{Conflict, ConflictPart, TreeValue};
use crate::store_wrapper::StoreWrapper;
use crate::store::Store;
fn describe_conflict_part(part: &ConflictPart) -> String {
match &part.value {
@ -79,7 +79,7 @@ fn file_parts(parts: &[ConflictPart]) -> Vec<&ConflictPart> {
.collect_vec()
}
fn get_file_contents(store: &StoreWrapper, path: &RepoPath, part: &ConflictPart) -> Vec<u8> {
fn get_file_contents(store: &Store, path: &RepoPath, part: &ConflictPart) -> Vec<u8> {
if let TreeValue::Normal {
id,
executable: false,
@ -123,7 +123,7 @@ fn write_diff_hunks(left: &[u8], right: &[u8], file: &mut dyn Write) -> std::io:
}
pub fn materialize_conflict(
store: &StoreWrapper,
store: &Store,
path: &RepoPath,
conflict: &Conflict,
file: &mut dyn Write,
@ -190,7 +190,7 @@ pub fn materialize_conflict(
}
pub fn conflict_to_materialized_value(
store: &StoreWrapper,
store: &Store,
path: &RepoPath,
conflict: &Conflict,
) -> TreeValue {

View file

@ -17,6 +17,7 @@ use std::sync::Arc;
use itertools::Itertools;
use crate::backend::{ChangeId, CommitId};
use crate::commit::Commit;
use crate::commit_builder::CommitBuilder;
use crate::dag_walk::{bfs, closest_common_node, leaves};
@ -25,8 +26,7 @@ use crate::repo::{MutableRepo, ReadonlyRepo, RepoRef};
use crate::repo_path::RepoPath;
use crate::rewrite::{merge_commit_trees, rebase_commit};
use crate::settings::UserSettings;
use crate::store::{ChangeId, CommitId};
use crate::store_wrapper::StoreWrapper;
use crate::store::Store;
use crate::tree::merge_trees;
// TODO: Combine some maps/sets and use a struct as value instead.
@ -608,7 +608,7 @@ pub enum OrphanResolution {
fn evolve_divergent_change(
user_settings: &UserSettings,
store: &Arc<StoreWrapper>,
store: &Arc<Store>,
mut_repo: &mut MutableRepo,
commits: &HashSet<Commit>,
) -> DivergenceResolution {
@ -657,7 +657,7 @@ fn evolve_divergent_change(
fn evolve_two_divergent_commits(
user_settings: &UserSettings,
store: &Arc<StoreWrapper>,
store: &Arc<Store>,
mut_repo: &mut MutableRepo,
common_predecessor: &Commit,
commit1: &Commit,

View file

@ -18,10 +18,10 @@ use git2::FetchPrune;
use itertools::Itertools;
use thiserror::Error;
use crate::backend::CommitId;
use crate::commit::Commit;
use crate::op_store::RefTarget;
use crate::repo::MutableRepo;
use crate::store::CommitId;
use crate::view::RefName;
#[derive(Error, Debug, PartialEq)]

View file

@ -25,11 +25,12 @@ use itertools::Itertools;
use protobuf::Message;
use uuid::Uuid;
use crate::repo_path::{RepoPath, RepoPathComponent};
use crate::store::{
ChangeId, Commit, CommitId, Conflict, ConflictId, ConflictPart, FileId, MillisSinceEpoch,
Signature, Store, StoreError, StoreResult, SymlinkId, Timestamp, Tree, TreeId, TreeValue,
use crate::backend::{
Backend, BackendError, BackendResult, ChangeId, Commit, CommitId, Conflict, ConflictId,
ConflictPart, FileId, MillisSinceEpoch, Signature, SymlinkId, Timestamp, Tree, TreeId,
TreeValue,
};
use crate::repo_path::{RepoPath, RepoPathComponent};
/// Ref namespace used only for preventing GC.
const NO_GC_REF_NAMESPACE: &str = "refs/jj/keep/";
@ -37,26 +38,26 @@ const NO_GC_REF_NAMESPACE: &str = "refs/jj/keep/";
const COMMITS_NOTES_REF: &str = "refs/notes/jj/commits";
const CONFLICT_SUFFIX: &str = ".jjconflict";
impl From<git2::Error> for StoreError {
impl From<git2::Error> for BackendError {
fn from(err: git2::Error) -> Self {
match err.code() {
git2::ErrorCode::NotFound => StoreError::NotFound,
_other => StoreError::Other(err.to_string()),
git2::ErrorCode::NotFound => BackendError::NotFound,
_other => BackendError::Other(err.to_string()),
}
}
}
pub struct GitStore {
pub struct GitBackend {
repo: Mutex<git2::Repository>,
empty_tree_id: TreeId,
}
impl GitStore {
impl GitBackend {
pub fn load(path: &Path) -> Self {
let repo = Mutex::new(git2::Repository::open(path).unwrap());
let empty_tree_id =
TreeId(hex::decode("4b825dc642cb6eb9a060e54bf8d69288fbee4904").unwrap());
GitStore {
GitBackend {
repo,
empty_tree_id,
}
@ -159,7 +160,7 @@ fn write_note(
Ok(())
}
impl Debug for GitStore {
impl Debug for GitBackend {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_struct("GitStore")
.field("path", &self.repo.lock().unwrap().path())
@ -167,7 +168,7 @@ impl Debug for GitStore {
}
}
impl Store for GitStore {
impl Backend for GitBackend {
fn hash_length(&self) -> usize {
20
}
@ -177,9 +178,9 @@ impl Store for GitStore {
Some(git2::Repository::open(&path).unwrap())
}
fn read_file(&self, _path: &RepoPath, id: &FileId) -> StoreResult<Box<dyn Read>> {
fn read_file(&self, _path: &RepoPath, id: &FileId) -> BackendResult<Box<dyn Read>> {
if id.0.len() != self.hash_length() {
return Err(StoreError::NotFound);
return Err(BackendError::NotFound);
}
let locked_repo = self.repo.lock().unwrap();
let blob = locked_repo
@ -189,7 +190,7 @@ impl Store for GitStore {
Ok(Box::new(Cursor::new(content)))
}
fn write_file(&self, _path: &RepoPath, contents: &mut dyn Read) -> StoreResult<FileId> {
fn write_file(&self, _path: &RepoPath, contents: &mut dyn Read) -> BackendResult<FileId> {
let mut bytes = Vec::new();
contents.read_to_end(&mut bytes).unwrap();
let locked_repo = self.repo.lock().unwrap();
@ -197,9 +198,9 @@ impl Store for GitStore {
Ok(FileId(oid.as_bytes().to_vec()))
}
fn read_symlink(&self, _path: &RepoPath, id: &SymlinkId) -> Result<String, StoreError> {
fn read_symlink(&self, _path: &RepoPath, id: &SymlinkId) -> Result<String, BackendError> {
if id.0.len() != self.hash_length() {
return Err(StoreError::NotFound);
return Err(BackendError::NotFound);
}
let locked_repo = self.repo.lock().unwrap();
let blob = locked_repo
@ -209,7 +210,7 @@ impl Store for GitStore {
Ok(target)
}
fn write_symlink(&self, _path: &RepoPath, target: &str) -> Result<SymlinkId, StoreError> {
fn write_symlink(&self, _path: &RepoPath, target: &str) -> Result<SymlinkId, BackendError> {
let locked_repo = self.repo.lock().unwrap();
let oid = locked_repo.blob(target.as_bytes()).unwrap();
Ok(SymlinkId(oid.as_bytes().to_vec()))
@ -219,12 +220,12 @@ impl Store for GitStore {
&self.empty_tree_id
}
fn read_tree(&self, _path: &RepoPath, id: &TreeId) -> StoreResult<Tree> {
fn read_tree(&self, _path: &RepoPath, id: &TreeId) -> BackendResult<Tree> {
if id == &self.empty_tree_id {
return Ok(Tree::default());
}
if id.0.len() != self.hash_length() {
return Err(StoreError::NotFound);
return Err(BackendError::NotFound);
}
let locked_repo = self.repo.lock().unwrap();
@ -284,7 +285,7 @@ impl Store for GitStore {
Ok(tree)
}
fn write_tree(&self, _path: &RepoPath, contents: &Tree) -> StoreResult<TreeId> {
fn write_tree(&self, _path: &RepoPath, contents: &Tree) -> BackendResult<TreeId> {
let locked_repo = self.repo.lock().unwrap();
let mut builder = locked_repo.treebuilder(None).unwrap();
for entry in contents.entries() {
@ -313,9 +314,9 @@ impl Store for GitStore {
Ok(TreeId(oid.as_bytes().to_vec()))
}
fn read_commit(&self, id: &CommitId) -> StoreResult<Commit> {
fn read_commit(&self, id: &CommitId) -> BackendResult<Commit> {
if id.0.len() != self.hash_length() {
return Err(StoreError::NotFound);
return Err(BackendError::NotFound);
}
let locked_repo = self.repo.lock().unwrap();
@ -365,7 +366,7 @@ impl Store for GitStore {
Ok(commit)
}
fn write_commit(&self, contents: &Commit) -> StoreResult<CommitId> {
fn write_commit(&self, contents: &Commit) -> BackendResult<CommitId> {
// TODO: We shouldn't have to create an in-memory index just to write an
// object...
let locked_repo = self.repo.lock().unwrap();
@ -405,7 +406,7 @@ impl Store for GitStore {
Ok(id)
}
fn read_conflict(&self, id: &ConflictId) -> StoreResult<Conflict> {
fn read_conflict(&self, id: &ConflictId) -> BackendResult<Conflict> {
let mut file = self.read_file(
&RepoPath::from_internal_string("unused"),
&FileId(id.0.clone()),
@ -419,7 +420,7 @@ impl Store for GitStore {
})
}
fn write_conflict(&self, conflict: &Conflict) -> StoreResult<ConflictId> {
fn write_conflict(&self, conflict: &Conflict) -> BackendResult<ConflictId> {
let json = serde_json::json!({
"removes": conflict_part_list_to_json(&conflict.removes),
"adds": conflict_part_list_to_json(&conflict.adds),
@ -507,7 +508,7 @@ fn bytes_vec_from_json(value: &serde_json::Value) -> Vec<u8> {
mod tests {
use super::*;
use crate::store::{FileId, MillisSinceEpoch};
use crate::backend::{FileId, MillisSinceEpoch};
#[test]
fn read_plain_git_commit() {
@ -556,7 +557,7 @@ mod tests {
// Check that the git commit above got the hash we expect
assert_eq!(git_commit_id.as_bytes(), &commit_id.0);
let store = GitStore::load(git_repo_path);
let store = GitBackend::load(git_repo_path);
let commit = store.read_commit(&commit_id).unwrap();
assert_eq!(&commit.change_id, &change_id);
assert_eq!(commit.parents, vec![]);
@ -622,7 +623,7 @@ mod tests {
let temp_dir = tempfile::tempdir().unwrap();
let git_repo_path = temp_dir.path();
let git_repo = git2::Repository::init(git_repo_path).unwrap();
let store = GitStore::load(git_repo_path);
let store = GitBackend::load(git_repo_path);
let signature = Signature {
name: "Someone".to_string(),
email: "someone@example.com".to_string(),
@ -656,7 +657,7 @@ mod tests {
let temp_dir = tempfile::tempdir().unwrap();
let git_repo_path = temp_dir.path();
git2::Repository::init(git_repo_path).unwrap();
let store = GitStore::load(git_repo_path);
let store = GitBackend::load(git_repo_path);
let signature = Signature {
name: "Someone".to_string(),
email: "someone@example.com".to_string(),
@ -684,7 +685,7 @@ mod tests {
Ok(_) => {
panic!("expectedly successfully wrote two commits with the same git commit object")
}
Err(StoreError::Other(message)) if message.contains(&expected_error_message) => {}
Err(BackendError::Other(message)) if message.contains(&expected_error_message) => {}
Err(err) => panic!("unexpected error: {:?}", err),
};
}

View file

@ -30,9 +30,9 @@ use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use itertools::Itertools;
use tempfile::NamedTempFile;
use crate::backend::{ChangeId, CommitId};
use crate::commit::Commit;
use crate::file_util::persist_content_addressed_temp_file;
use crate::store::{ChangeId, CommitId};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub struct IndexPosition(u32);

View file

@ -22,14 +22,14 @@ use std::sync::Arc;
use itertools::Itertools;
use tempfile::NamedTempFile;
use crate::backend::CommitId;
use crate::commit::Commit;
use crate::dag_walk;
use crate::file_util::persist_content_addressed_temp_file;
use crate::index::{MutableIndex, ReadonlyIndex};
use crate::op_store::OperationId;
use crate::operation::Operation;
use crate::store::CommitId;
use crate::store_wrapper::StoreWrapper;
use crate::store::Store;
pub struct IndexStore {
dir: PathBuf,
@ -50,7 +50,7 @@ impl IndexStore {
IndexStore { dir }
}
pub fn get_index_at_op(&self, op: &Operation, store: &StoreWrapper) -> Arc<ReadonlyIndex> {
pub fn get_index_at_op(&self, op: &Operation, store: &Store) -> Arc<ReadonlyIndex> {
let op_id_hex = op.id().hex();
let op_id_file = self.dir.join("operations").join(&op_id_hex);
if op_id_file.exists() {
@ -89,7 +89,7 @@ impl IndexStore {
fn index_at_operation(
&self,
store: &StoreWrapper,
store: &Store,
operation: &Operation,
) -> io::Result<Arc<ReadonlyIndex>> {
let view = operation.view();
@ -163,7 +163,7 @@ impl IndexStore {
// Returns the ancestors of heads with parents and predecessors come before the
// commit itself
fn topo_order_earlier_first(
store: &StoreWrapper,
store: &Store,
heads: Vec<CommitId>,
parent_file: Option<Arc<ReadonlyIndex>>,
) -> Vec<Commit> {

View file

@ -24,6 +24,7 @@ extern crate pest_derive;
#[macro_use]
extern crate maplit;
pub mod backend;
pub mod commit;
pub mod commit_builder;
pub mod conflicts;
@ -33,11 +34,11 @@ pub mod evolution;
pub mod file_util;
pub mod files;
pub mod git;
pub mod git_store;
pub mod git_backend;
pub mod gitignore;
pub mod index;
pub mod index_store;
pub mod local_store;
pub mod local_backend;
pub mod lock;
pub mod matchers;
pub mod op_heads_store;
@ -53,7 +54,6 @@ pub mod rewrite;
pub mod settings;
pub mod simple_op_store;
pub mod store;
pub mod store_wrapper;
pub mod testutils;
pub mod transaction;
pub mod tree;

View file

@ -22,55 +22,56 @@ use blake2::{Blake2b, Digest};
use protobuf::{Message, ProtobufError};
use tempfile::{NamedTempFile, PersistError};
use crate::backend::{
Backend, BackendError, BackendResult, ChangeId, Commit, CommitId, Conflict, ConflictId,
ConflictPart, FileId, MillisSinceEpoch, Signature, SymlinkId, Timestamp, Tree, TreeId,
TreeValue,
};
use crate::file_util::persist_content_addressed_temp_file;
use crate::repo_path::{RepoPath, RepoPathComponent};
use crate::store::{
ChangeId, Commit, CommitId, Conflict, ConflictId, ConflictPart, FileId, MillisSinceEpoch,
Signature, Store, StoreError, StoreResult, SymlinkId, Timestamp, Tree, TreeId, TreeValue,
};
impl From<std::io::Error> for StoreError {
impl From<std::io::Error> for BackendError {
fn from(err: std::io::Error) -> Self {
StoreError::Other(err.to_string())
BackendError::Other(err.to_string())
}
}
impl From<PersistError> for StoreError {
impl From<PersistError> for BackendError {
fn from(err: PersistError) -> Self {
StoreError::Other(err.to_string())
BackendError::Other(err.to_string())
}
}
impl From<ProtobufError> for StoreError {
impl From<ProtobufError> for BackendError {
fn from(err: ProtobufError) -> Self {
StoreError::Other(err.to_string())
BackendError::Other(err.to_string())
}
}
#[derive(Debug)]
pub struct LocalStore {
pub struct LocalBackend {
path: PathBuf,
empty_tree_id: TreeId,
}
impl LocalStore {
impl LocalBackend {
pub fn init(store_path: PathBuf) -> Self {
fs::create_dir(store_path.join("commits")).unwrap();
fs::create_dir(store_path.join("trees")).unwrap();
fs::create_dir(store_path.join("files")).unwrap();
fs::create_dir(store_path.join("symlinks")).unwrap();
fs::create_dir(store_path.join("conflicts")).unwrap();
let store = Self::load(store_path);
let empty_tree_id = store
let backend = Self::load(store_path);
let empty_tree_id = backend
.write_tree(&RepoPath::root(), &Tree::default())
.unwrap();
assert_eq!(empty_tree_id, store.empty_tree_id);
store
assert_eq!(empty_tree_id, backend.empty_tree_id);
backend
}
pub fn load(store_path: PathBuf) -> Self {
let empty_tree_id = TreeId(hex::decode("786a02f742015903c6c6fd852552d272912f4740e15847618a86e217f71f5419d25e1031afee585313896444934eb04b903a685b1448b755d56f701afe9be2ce").unwrap());
LocalStore {
LocalBackend {
path: store_path,
empty_tree_id,
}
@ -97,15 +98,15 @@ impl LocalStore {
}
}
fn not_found_to_store_error(err: std::io::Error) -> StoreError {
fn not_found_to_backend_error(err: std::io::Error) -> BackendError {
if err.kind() == ErrorKind::NotFound {
StoreError::NotFound
BackendError::NotFound
} else {
StoreError::from(err)
BackendError::from(err)
}
}
impl Store for LocalStore {
impl Backend for LocalBackend {
fn hash_length(&self) -> usize {
64
}
@ -114,13 +115,13 @@ impl Store for LocalStore {
None
}
fn read_file(&self, _path: &RepoPath, id: &FileId) -> StoreResult<Box<dyn Read>> {
fn read_file(&self, _path: &RepoPath, id: &FileId) -> BackendResult<Box<dyn Read>> {
let path = self.file_path(id);
let file = File::open(path).map_err(not_found_to_store_error)?;
let file = File::open(path).map_err(not_found_to_backend_error)?;
Ok(Box::new(zstd::Decoder::new(file)?))
}
fn write_file(&self, _path: &RepoPath, contents: &mut dyn Read) -> StoreResult<FileId> {
fn write_file(&self, _path: &RepoPath, contents: &mut dyn Read) -> BackendResult<FileId> {
let temp_file = NamedTempFile::new_in(&self.path)?;
let mut encoder = zstd::Encoder::new(temp_file.as_file(), 0)?;
let mut hasher = Blake2b::new();
@ -145,15 +146,15 @@ impl Store for LocalStore {
Ok(id)
}
fn read_symlink(&self, _path: &RepoPath, id: &SymlinkId) -> Result<String, StoreError> {
fn read_symlink(&self, _path: &RepoPath, id: &SymlinkId) -> Result<String, BackendError> {
let path = self.symlink_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let mut file = File::open(path).map_err(not_found_to_backend_error)?;
let mut target = String::new();
file.read_to_string(&mut target).unwrap();
Ok(target)
}
fn write_symlink(&self, _path: &RepoPath, target: &str) -> Result<SymlinkId, StoreError> {
fn write_symlink(&self, _path: &RepoPath, target: &str) -> Result<SymlinkId, BackendError> {
let mut temp_file = NamedTempFile::new_in(&self.path)?;
temp_file.write_all(target.as_bytes())?;
let mut hasher = Blake2b::new();
@ -168,15 +169,15 @@ impl Store for LocalStore {
&self.empty_tree_id
}
fn read_tree(&self, _path: &RepoPath, id: &TreeId) -> StoreResult<Tree> {
fn read_tree(&self, _path: &RepoPath, id: &TreeId) -> BackendResult<Tree> {
let path = self.tree_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let mut file = File::open(path).map_err(not_found_to_backend_error)?;
let proto: crate::protos::store::Tree = Message::parse_from_reader(&mut file)?;
Ok(tree_from_proto(&proto))
}
fn write_tree(&self, _path: &RepoPath, tree: &Tree) -> StoreResult<TreeId> {
fn write_tree(&self, _path: &RepoPath, tree: &Tree) -> BackendResult<TreeId> {
let temp_file = NamedTempFile::new_in(&self.path)?;
let proto = tree_to_proto(tree);
@ -191,15 +192,15 @@ impl Store for LocalStore {
Ok(id)
}
fn read_commit(&self, id: &CommitId) -> StoreResult<Commit> {
fn read_commit(&self, id: &CommitId) -> BackendResult<Commit> {
let path = self.commit_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let mut file = File::open(path).map_err(not_found_to_backend_error)?;
let proto: crate::protos::store::Commit = Message::parse_from_reader(&mut file)?;
Ok(commit_from_proto(&proto))
}
fn write_commit(&self, commit: &Commit) -> StoreResult<CommitId> {
fn write_commit(&self, commit: &Commit) -> BackendResult<CommitId> {
let temp_file = NamedTempFile::new_in(&self.path)?;
let proto = commit_to_proto(commit);
@ -214,15 +215,15 @@ impl Store for LocalStore {
Ok(id)
}
fn read_conflict(&self, id: &ConflictId) -> StoreResult<Conflict> {
fn read_conflict(&self, id: &ConflictId) -> BackendResult<Conflict> {
let path = self.conflict_path(id);
let mut file = File::open(path).map_err(not_found_to_store_error)?;
let mut file = File::open(path).map_err(not_found_to_backend_error)?;
let proto: crate::protos::store::Conflict = Message::parse_from_reader(&mut file)?;
Ok(conflict_from_proto(&proto))
}
fn write_conflict(&self, conflict: &Conflict) -> StoreResult<ConflictId> {
fn write_conflict(&self, conflict: &Conflict) -> BackendResult<ConflictId> {
let temp_file = NamedTempFile::new_in(&self.path)?;
let proto = conflict_to_proto(conflict);

View file

@ -19,11 +19,11 @@ use std::sync::Arc;
use itertools::Itertools;
use thiserror::Error;
use crate::backend::Timestamp;
use crate::lock::FileLock;
use crate::op_store::{OpStore, OperationId, OperationMetadata};
use crate::operation::Operation;
use crate::repo::RepoLoader;
use crate::store::Timestamp;
use crate::transaction::UnpublishedOperation;
use crate::{dag_walk, op_store};

View file

@ -15,7 +15,7 @@
use std::collections::{BTreeMap, HashMap, HashSet};
use std::fmt::{Debug, Error, Formatter};
use crate::store::{CommitId, Timestamp};
use crate::backend::{CommitId, Timestamp};
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct ViewId(pub Vec<u8>);

View file

@ -18,9 +18,9 @@ use std::fmt::{Debug, Error, Formatter};
use std::hash::{Hash, Hasher};
use std::sync::Arc;
use crate::backend::CommitId;
use crate::op_store;
use crate::op_store::{OpStore, OperationId, ViewId};
use crate::store::CommitId;
#[derive(Clone)]
pub struct Operation {

View file

@ -12,9 +12,9 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::backend::CommitId;
use crate::index::IndexRef;
use crate::op_store::{BranchTarget, RefTarget};
use crate::store::CommitId;
pub fn merge_ref_targets(
index: IndexRef,

View file

@ -21,25 +21,25 @@ use std::sync::{Arc, Mutex, MutexGuard};
use thiserror::Error;
use crate::backend::{Backend, BackendError, CommitId};
use crate::commit::Commit;
use crate::commit_builder::{new_change_id, signature, CommitBuilder};
use crate::dag_walk::topo_order_reverse;
use crate::evolution::{EvolutionRef, MutableEvolution, ReadonlyEvolution};
use crate::git_store::GitStore;
use crate::git_backend::GitBackend;
use crate::index::{IndexRef, MutableIndex, ReadonlyIndex};
use crate::index_store::IndexStore;
use crate::local_store::LocalStore;
use crate::local_backend::LocalBackend;
use crate::op_heads_store::OpHeadsStore;
use crate::op_store::{BranchTarget, OpStore, OperationId, RefTarget};
use crate::operation::Operation;
use crate::settings::{RepoSettings, UserSettings};
use crate::simple_op_store::SimpleOpStore;
use crate::store::{CommitId, Store, StoreError};
use crate::store_wrapper::StoreWrapper;
use crate::store::Store;
use crate::transaction::Transaction;
use crate::view::{RefName, View};
use crate::working_copy::WorkingCopy;
use crate::{conflicts, op_store, store};
use crate::{backend, conflicts, op_store};
#[derive(Debug, Error, PartialEq, Eq)]
pub enum RepoError {
@ -49,11 +49,11 @@ pub enum RepoError {
Other(String),
}
impl From<StoreError> for RepoError {
fn from(err: StoreError) -> Self {
impl From<BackendError> for RepoError {
fn from(err: BackendError) -> Self {
match err {
StoreError::NotFound => RepoError::NotFound,
StoreError::Other(description) => RepoError::Other(description),
BackendError::NotFound => RepoError::NotFound,
BackendError::Other(description) => RepoError::Other(description),
}
}
}
@ -69,7 +69,7 @@ pub enum RepoRef<'a> {
}
impl<'a> RepoRef<'a> {
pub fn store(&self) -> &Arc<StoreWrapper> {
pub fn store(&self) -> &Arc<Store> {
match self {
RepoRef::Readonly(repo) => repo.store(),
RepoRef::Mutable(repo) => repo.store(),
@ -108,7 +108,7 @@ impl<'a> RepoRef<'a> {
pub struct ReadonlyRepo {
repo_path: PathBuf,
wc_path: PathBuf,
store: Arc<StoreWrapper>,
store: Arc<Store>,
op_store: Arc<dyn OpStore>,
op_heads_store: Arc<OpHeadsStore>,
operation: Operation,
@ -150,11 +150,11 @@ impl ReadonlyRepo {
let repo_path = ReadonlyRepo::init_repo_dir(&wc_path)?;
let store_path = repo_path.join("store");
fs::create_dir(&store_path).unwrap();
let store = Box::new(LocalStore::init(store_path));
Ok(ReadonlyRepo::init(settings, repo_path, wc_path, store))
let backend = Box::new(LocalBackend::init(store_path));
Ok(ReadonlyRepo::init(settings, repo_path, wc_path, backend))
}
/// Initializes a repo with a new Git store in .jj/git/ (bare Git repo)
/// Initializes a repo with a new Git backend in .jj/git/ (bare Git repo)
pub fn init_internal_git(
settings: &UserSettings,
wc_path: PathBuf,
@ -166,11 +166,11 @@ impl ReadonlyRepo {
let git_store_path = fs::canonicalize(git_store_path).unwrap();
let mut store_file = File::create(store_path).unwrap();
store_file.write_all(b"git: git").unwrap();
let store = Box::new(GitStore::load(&git_store_path));
Ok(ReadonlyRepo::init(settings, repo_path, wc_path, store))
let backend = Box::new(GitBackend::load(&git_store_path));
Ok(ReadonlyRepo::init(settings, repo_path, wc_path, backend))
}
/// Initializes a repo with an existing Git store at the specified path
/// Initializes a repo with an existing Git backend at the specified path
pub fn init_external_git(
settings: &UserSettings,
wc_path: PathBuf,
@ -183,8 +183,8 @@ impl ReadonlyRepo {
store_file
.write_all(format!("git: {}", git_store_path.to_str().unwrap()).as_bytes())
.unwrap();
let store = Box::new(GitStore::load(&git_store_path));
Ok(ReadonlyRepo::init(settings, repo_path, wc_path, store))
let backend = Box::new(GitBackend::load(&git_store_path));
Ok(ReadonlyRepo::init(settings, repo_path, wc_path, backend))
}
fn init_repo_dir(wc_path: &Path) -> Result<PathBuf, RepoInitError> {
@ -201,10 +201,10 @@ impl ReadonlyRepo {
user_settings: &UserSettings,
repo_path: PathBuf,
wc_path: PathBuf,
store: Box<dyn Store>,
backend: Box<dyn Backend>,
) -> Arc<ReadonlyRepo> {
let repo_settings = user_settings.with_repo(&repo_path).unwrap();
let store = StoreWrapper::new(store);
let store = Store::new(backend);
fs::create_dir(repo_path.join("working_copy")).unwrap();
let working_copy = WorkingCopy::init(
@ -215,7 +215,7 @@ impl ReadonlyRepo {
fs::create_dir(repo_path.join("view")).unwrap();
let signature = signature(user_settings);
let checkout_commit = store::Commit {
let checkout_commit = backend::Commit {
parents: vec![],
predecessors: vec![],
root_tree: store.empty_tree_id().clone(),
@ -352,7 +352,7 @@ impl ReadonlyRepo {
self.working_copy.as_ref().lock().unwrap()
}
pub fn store(&self) -> &Arc<StoreWrapper> {
pub fn store(&self) -> &Arc<Store> {
&self.store
}
@ -396,7 +396,7 @@ pub struct RepoLoader {
wc_path: PathBuf,
repo_path: PathBuf,
repo_settings: RepoSettings,
store: Arc<StoreWrapper>,
store: Arc<Store>,
op_store: Arc<dyn OpStore>,
op_heads_store: Arc<OpHeadsStore>,
index_store: Arc<IndexStore>,
@ -423,7 +423,7 @@ impl RepoLoader {
) -> Result<RepoLoader, RepoLoadError> {
let repo_path = find_repo_dir(&wc_path).ok_or(RepoLoadError::NoRepoHere(wc_path))?;
let wc_path = repo_path.parent().unwrap().to_owned();
let store = StoreWrapper::load_store(&repo_path);
let store = Store::load_store(&repo_path);
let repo_settings = user_settings.with_repo(&repo_path).unwrap();
let op_store: Arc<dyn OpStore> = Arc::new(SimpleOpStore::load(repo_path.join("op_store")));
let op_heads_store = Arc::new(OpHeadsStore::load(repo_path.join("op_heads")));
@ -439,7 +439,7 @@ impl RepoLoader {
})
}
pub fn store(&self) -> &Arc<StoreWrapper> {
pub fn store(&self) -> &Arc<Store> {
&self.store
}
@ -548,7 +548,7 @@ impl MutableRepo {
&self.base_repo
}
pub fn store(&self) -> &Arc<StoreWrapper> {
pub fn store(&self) -> &Arc<Store> {
self.base_repo.store()
}
@ -593,7 +593,7 @@ impl MutableRepo {
self.evolution.lock().unwrap().take();
}
pub fn write_commit(&mut self, commit: store::Commit) -> Commit {
pub fn write_commit(&mut self, commit: backend::Commit) -> Commit {
let commit = self.store().write_commit(commit);
self.add_head(&commit);
commit

View file

@ -23,11 +23,11 @@ use pest::iterators::Pairs;
use pest::Parser;
use thiserror::Error;
use crate::backend::{BackendError, CommitId};
use crate::commit::Commit;
use crate::index::{HexPrefix, IndexEntry, IndexPosition, PrefixResolution, RevWalk};
use crate::repo::RepoRef;
use crate::revset_graph_iterator::RevsetGraphIterator;
use crate::store::{CommitId, StoreError};
#[derive(Debug, Error, PartialEq, Eq)]
pub enum RevsetError {
@ -38,7 +38,7 @@ pub enum RevsetError {
#[error("Change id prefix \"{0}\" is ambiguous")]
AmbiguousChangeIdPrefix(String),
#[error("Unexpected error from store: {0}")]
StoreError(#[from] StoreError),
StoreError(#[from] BackendError),
}
fn resolve_git_ref(repo: RepoRef, symbol: &str) -> Result<Vec<CommitId>, RevsetError> {
@ -75,7 +75,7 @@ fn resolve_commit_id(repo: RepoRef, symbol: &str) -> Result<Vec<CommitId>, Revse
let commit_id = CommitId(binary_commit_id);
match repo.store().get_commit(&commit_id) {
Ok(_) => return Ok(vec![commit_id]),
Err(StoreError::NotFound) => {} // fall through
Err(BackendError::NotFound) => {} // fall through
Err(err) => return Err(RevsetError::StoreError(err)),
}
}

View file

@ -16,13 +16,13 @@ use std::collections::{HashMap, HashSet};
use itertools::Itertools;
use crate::backend::CommitId;
use crate::commit::Commit;
use crate::commit_builder::CommitBuilder;
use crate::repo::{MutableRepo, RepoRef};
use crate::repo_path::RepoPath;
use crate::revset::RevsetExpression;
use crate::settings::UserSettings;
use crate::store::CommitId;
use crate::tree::{merge_trees, Tree};
pub fn merge_commit_trees(repo: RepoRef, commits: &[Commit]) -> Tree {

View file

@ -24,12 +24,12 @@ use itertools::Itertools;
use protobuf::{Message, ProtobufError};
use tempfile::{NamedTempFile, PersistError};
use crate::backend::{CommitId, MillisSinceEpoch, Timestamp};
use crate::file_util::persist_content_addressed_temp_file;
use crate::op_store::{
BranchTarget, OpStore, OpStoreError, OpStoreResult, Operation, OperationId, OperationMetadata,
RefTarget, View, ViewId,
};
use crate::store::{CommitId, MillisSinceEpoch, Timestamp};
impl From<std::io::Error> for OpStoreError {
fn from(err: std::io::Error) -> Self {

View file

@ -12,307 +12,217 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use std::fmt::{Debug, Error, Formatter};
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::result::Result;
use std::vec::Vec;
use std::path::{Path, PathBuf};
use std::sync::{Arc, RwLock, Weak};
use thiserror::Error;
use crate::backend;
use crate::backend::{
Backend, BackendResult, ChangeId, CommitId, Conflict, ConflictId, FileId, MillisSinceEpoch,
Signature, SymlinkId, Timestamp, TreeId,
};
use crate::commit::Commit;
use crate::git_backend::GitBackend;
use crate::local_backend::LocalBackend;
use crate::repo_path::RepoPath;
use crate::tree::Tree;
use crate::tree_builder::TreeBuilder;
use crate::repo_path::{RepoPath, RepoPathComponent};
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct CommitId(pub Vec<u8>);
impl Debug for CommitId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("CommitId").field(&self.hex()).finish()
}
/// Wraps the low-level backend and makes it return more convenient types. Also
/// adds the root commit and adds caching.
#[derive(Debug)]
pub struct Store {
weak_self: Option<Weak<Store>>,
backend: Box<dyn Backend>,
root_commit_id: CommitId,
commit_cache: RwLock<HashMap<CommitId, Arc<backend::Commit>>>,
tree_cache: RwLock<HashMap<(RepoPath, TreeId), Arc<backend::Tree>>>,
}
impl CommitId {
pub fn from_hex(hex: &str) -> Self {
CommitId(hex::decode(hex).unwrap())
impl Store {
pub fn new(backend: Box<dyn Backend>) -> Arc<Self> {
let root_commit_id = CommitId(vec![0; backend.hash_length()]);
let mut wrapper = Arc::new(Store {
weak_self: None,
backend,
root_commit_id,
commit_cache: Default::default(),
tree_cache: Default::default(),
});
let weak_self = Arc::downgrade(&wrapper);
let mut ref_mut = unsafe { Arc::get_mut_unchecked(&mut wrapper) };
ref_mut.weak_self = Some(weak_self);
wrapper
}
pub fn hex(&self) -> String {
hex::encode(&self.0)
pub fn load_store(repo_path: &Path) -> Arc<Store> {
let store_path = repo_path.join("store");
let backend: Box<dyn Backend>;
// TODO: Perhaps .jj/store should always be a directory. Then .jj/git would live
// inside that directory and this function would not need to know the repo path
// (only the store path). Maybe there would be a .jj/store/format file
// indicating which kind of store it is?
if store_path.is_dir() {
backend = Box::new(LocalBackend::load(store_path));
} else {
let mut store_file = File::open(store_path).unwrap();
let mut buf = Vec::new();
store_file.read_to_end(&mut buf).unwrap();
let contents = String::from_utf8(buf).unwrap();
assert!(contents.starts_with("git: "));
let git_backend_path_str = contents[5..].to_string();
let git_backend_path =
std::fs::canonicalize(repo_path.join(PathBuf::from(git_backend_path_str))).unwrap();
backend = Box::new(GitBackend::load(&git_backend_path));
}
Store::new(backend)
}
pub fn hash_length(&self) -> usize {
self.backend.hash_length()
}
pub fn git_repo(&self) -> Option<git2::Repository> {
self.backend.git_repo()
}
pub fn empty_tree_id(&self) -> &TreeId {
self.backend.empty_tree_id()
}
pub fn root_commit_id(&self) -> &CommitId {
&self.root_commit_id
}
pub fn root_commit(&self) -> Commit {
self.get_commit(&self.root_commit_id).unwrap()
}
pub fn get_commit(&self, id: &CommitId) -> BackendResult<Commit> {
let data = self.get_backend_commit(id)?;
Ok(Commit::new(
self.weak_self.as_ref().unwrap().upgrade().unwrap(),
id.clone(),
data,
))
}
fn make_root_commit(&self) -> backend::Commit {
let timestamp = Timestamp {
timestamp: MillisSinceEpoch(0),
tz_offset: 0,
};
let signature = Signature {
name: String::new(),
email: String::new(),
timestamp,
};
let change_id = ChangeId(vec![0; 16]);
backend::Commit {
parents: vec![],
predecessors: vec![],
root_tree: self.backend.empty_tree_id().clone(),
change_id,
description: String::new(),
author: signature.clone(),
committer: signature,
is_open: false,
is_pruned: false,
}
}
fn get_backend_commit(&self, id: &CommitId) -> BackendResult<Arc<backend::Commit>> {
{
let read_locked_cached = self.commit_cache.read().unwrap();
if let Some(data) = read_locked_cached.get(id).cloned() {
return Ok(data);
}
}
let commit = if id == self.root_commit_id() {
self.make_root_commit()
} else {
self.backend.read_commit(id)?
};
let data = Arc::new(commit);
let mut write_locked_cache = self.commit_cache.write().unwrap();
write_locked_cache.insert(id.clone(), data.clone());
Ok(data)
}
pub fn write_commit(&self, commit: backend::Commit) -> Commit {
let commit_id = self.backend.write_commit(&commit).unwrap();
let data = Arc::new(commit);
{
let mut write_locked_cache = self.commit_cache.write().unwrap();
write_locked_cache.insert(commit_id.clone(), data.clone());
}
let commit = Commit::new(
self.weak_self.as_ref().unwrap().upgrade().unwrap(),
commit_id,
data,
);
commit
}
pub fn get_tree(&self, dir: &RepoPath, id: &TreeId) -> BackendResult<Tree> {
let data = self.get_backend_tree(dir, id)?;
Ok(Tree::new(
self.weak_self.as_ref().unwrap().upgrade().unwrap(),
dir.clone(),
id.clone(),
data,
))
}
fn get_backend_tree(&self, dir: &RepoPath, id: &TreeId) -> BackendResult<Arc<backend::Tree>> {
let key = (dir.clone(), id.clone());
{
let read_locked_cache = self.tree_cache.read().unwrap();
if let Some(data) = read_locked_cache.get(&key).cloned() {
return Ok(data);
}
}
let data = Arc::new(self.backend.read_tree(dir, id)?);
let mut write_locked_cache = self.tree_cache.write().unwrap();
write_locked_cache.insert(key, data.clone());
Ok(data)
}
pub fn write_tree(&self, path: &RepoPath, contents: &backend::Tree) -> BackendResult<TreeId> {
// TODO: This should also do caching like write_commit does.
self.backend.write_tree(path, contents)
}
pub fn read_file(&self, path: &RepoPath, id: &FileId) -> BackendResult<Box<dyn Read>> {
self.backend.read_file(path, id)
}
pub fn write_file(&self, path: &RepoPath, contents: &mut dyn Read) -> BackendResult<FileId> {
self.backend.write_file(path, contents)
}
pub fn read_symlink(&self, path: &RepoPath, id: &SymlinkId) -> BackendResult<String> {
self.backend.read_symlink(path, id)
}
pub fn write_symlink(&self, path: &RepoPath, contents: &str) -> BackendResult<SymlinkId> {
self.backend.write_symlink(path, contents)
}
pub fn read_conflict(&self, id: &ConflictId) -> BackendResult<Conflict> {
self.backend.read_conflict(id)
}
pub fn write_conflict(&self, contents: &Conflict) -> BackendResult<ConflictId> {
self.backend.write_conflict(contents)
}
pub fn tree_builder(&self, base_tree_id: TreeId) -> TreeBuilder {
TreeBuilder::new(
self.weak_self.as_ref().unwrap().upgrade().unwrap(),
base_tree_id,
)
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct ChangeId(pub Vec<u8>);
impl Debug for ChangeId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("ChangeId").field(&self.hex()).finish()
}
}
impl ChangeId {
pub fn from_hex(hex: &str) -> Self {
ChangeId(hex::decode(hex).unwrap())
}
pub fn hex(&self) -> String {
hex::encode(&self.0)
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct TreeId(pub Vec<u8>);
impl Debug for TreeId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("TreeId").field(&self.hex()).finish()
}
}
impl TreeId {
pub fn hex(&self) -> String {
hex::encode(&self.0)
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct FileId(pub Vec<u8>);
impl Debug for FileId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("FileId").field(&self.hex()).finish()
}
}
impl FileId {
pub fn hex(&self) -> String {
hex::encode(&self.0)
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct SymlinkId(pub Vec<u8>);
impl Debug for SymlinkId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("SymlinkId").field(&self.hex()).finish()
}
}
impl SymlinkId {
pub fn hex(&self) -> String {
hex::encode(&self.0)
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub struct ConflictId(pub Vec<u8>);
impl Debug for ConflictId {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.debug_tuple("ConflictId").field(&self.hex()).finish()
}
}
impl ConflictId {
pub fn hex(&self) -> String {
hex::encode(&self.0)
}
}
pub enum Phase {
Public,
Draft,
}
#[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord)]
pub struct MillisSinceEpoch(pub u64);
#[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord)]
pub struct Timestamp {
pub timestamp: MillisSinceEpoch,
// time zone offset in minutes
pub tz_offset: i32,
}
impl Timestamp {
pub fn now() -> Self {
let now = chrono::offset::Local::now();
Self {
timestamp: MillisSinceEpoch(now.timestamp_millis() as u64),
tz_offset: now.offset().local_minus_utc() / 60,
}
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Signature {
pub name: String,
pub email: String,
pub timestamp: Timestamp,
}
#[derive(Debug, Clone)]
pub struct Commit {
pub parents: Vec<CommitId>,
pub predecessors: Vec<CommitId>,
pub root_tree: TreeId,
pub change_id: ChangeId,
pub description: String,
pub author: Signature,
pub committer: Signature,
pub is_open: bool,
pub is_pruned: bool,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ConflictPart {
// TODO: Store e.g. CommitId here too? Labels (theirs/ours/base)? Would those still be
// useful e.g. after rebasing this conflict?
pub value: TreeValue,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Conflict {
// A conflict is represented by a list of positive and negative states that need to be applied.
// In a simple 3-way merge of B and C with merge base A, the conflict will be { add: [B, C],
// remove: [A] }. Also note that a conflict of the form { add: [A], remove: [] } is the
// same as non-conflict A.
pub removes: Vec<ConflictPart>,
pub adds: Vec<ConflictPart>,
}
impl Default for Conflict {
fn default() -> Self {
Conflict {
removes: Default::default(),
adds: Default::default(),
}
}
}
#[derive(Debug, Error, PartialEq, Eq)]
pub enum StoreError {
#[error("Object not found")]
NotFound,
#[error("Error: {0}")]
Other(String),
}
pub type StoreResult<T> = Result<T, StoreError>;
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum TreeValue {
Normal { id: FileId, executable: bool },
Symlink(SymlinkId),
Tree(TreeId),
GitSubmodule(CommitId),
Conflict(ConflictId),
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct TreeEntry<'a> {
name: &'a RepoPathComponent,
value: &'a TreeValue,
}
impl<'a> TreeEntry<'a> {
pub fn new(name: &'a RepoPathComponent, value: &'a TreeValue) -> Self {
TreeEntry { name, value }
}
pub fn name(&self) -> &'a RepoPathComponent {
self.name
}
pub fn value(&self) -> &'a TreeValue {
self.value
}
}
pub struct TreeEntriesNonRecursiveIter<'a> {
iter: std::collections::btree_map::Iter<'a, RepoPathComponent, TreeValue>,
}
impl<'a> Iterator for TreeEntriesNonRecursiveIter<'a> {
type Item = TreeEntry<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.iter
.next()
.map(|(name, value)| TreeEntry { name, value })
}
}
#[derive(Debug, Clone)]
pub struct Tree {
entries: BTreeMap<RepoPathComponent, TreeValue>,
}
impl Default for Tree {
fn default() -> Self {
Self {
entries: BTreeMap::new(),
}
}
}
impl Tree {
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
pub fn entries(&self) -> TreeEntriesNonRecursiveIter {
TreeEntriesNonRecursiveIter {
iter: self.entries.iter(),
}
}
pub fn set(&mut self, name: RepoPathComponent, value: TreeValue) {
self.entries.insert(name, value);
}
pub fn remove(&mut self, name: &RepoPathComponent) {
self.entries.remove(name);
}
pub fn entry(&self, name: &RepoPathComponent) -> Option<TreeEntry> {
self.entries
.get_key_value(name)
.map(|(name, value)| TreeEntry { name, value })
}
pub fn value(&self, name: &RepoPathComponent) -> Option<&TreeValue> {
self.entries.get(name)
}
}
pub trait Store: Send + Sync + Debug {
fn hash_length(&self) -> usize;
fn git_repo(&self) -> Option<git2::Repository>;
fn read_file(&self, path: &RepoPath, id: &FileId) -> StoreResult<Box<dyn Read>>;
fn write_file(&self, path: &RepoPath, contents: &mut dyn Read) -> StoreResult<FileId>;
fn read_symlink(&self, path: &RepoPath, id: &SymlinkId) -> StoreResult<String>;
fn write_symlink(&self, path: &RepoPath, target: &str) -> StoreResult<SymlinkId>;
fn empty_tree_id(&self) -> &TreeId;
fn read_tree(&self, path: &RepoPath, id: &TreeId) -> StoreResult<Tree>;
fn write_tree(&self, path: &RepoPath, contents: &Tree) -> StoreResult<TreeId>;
fn read_commit(&self, id: &CommitId) -> StoreResult<Commit>;
fn write_commit(&self, contents: &Commit) -> StoreResult<CommitId>;
// TODO: Pass in the paths here too even though they are unused, just like for
// files and trees?
fn read_conflict(&self, id: &ConflictId) -> StoreResult<Conflict>;
fn write_conflict(&self, contents: &Conflict) -> StoreResult<ConflictId>;
}

View file

@ -1,230 +0,0 @@
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::sync::{Arc, RwLock, Weak};
use crate::commit::Commit;
use crate::git_store::GitStore;
use crate::local_store::LocalStore;
use crate::repo_path::RepoPath;
use crate::store;
use crate::store::{
ChangeId, CommitId, Conflict, ConflictId, FileId, MillisSinceEpoch, Signature, Store,
StoreResult, SymlinkId, Timestamp, TreeId,
};
use crate::tree::Tree;
use crate::tree_builder::TreeBuilder;
/// Wraps the low-level store and makes it return more convenient types. Also
/// adds the root commit and adds caching.
/// TODO: Come up with a better name, possibly by renaming the current Store
/// trait to something else.
#[derive(Debug)]
pub struct StoreWrapper {
weak_self: Option<Weak<StoreWrapper>>,
store: Box<dyn Store>,
root_commit_id: CommitId,
commit_cache: RwLock<HashMap<CommitId, Arc<store::Commit>>>,
tree_cache: RwLock<HashMap<(RepoPath, TreeId), Arc<store::Tree>>>,
}
impl StoreWrapper {
pub fn new(store: Box<dyn Store>) -> Arc<Self> {
let root_commit_id = CommitId(vec![0; store.hash_length()]);
let mut wrapper = Arc::new(StoreWrapper {
weak_self: None,
store,
root_commit_id,
commit_cache: Default::default(),
tree_cache: Default::default(),
});
let weak_self = Arc::downgrade(&wrapper);
let mut ref_mut = unsafe { Arc::get_mut_unchecked(&mut wrapper) };
ref_mut.weak_self = Some(weak_self);
wrapper
}
pub fn load_store(repo_path: &Path) -> Arc<StoreWrapper> {
let store_path = repo_path.join("store");
let store: Box<dyn Store>;
// TODO: Perhaps .jj/store should always be a directory. Then .jj/git would live
// inside that directory and this function would not need to know the repo path
// (only the store path). Maybe there would be a .jj/store/format file
// indicating which kind of store it is?
if store_path.is_dir() {
store = Box::new(LocalStore::load(store_path));
} else {
let mut store_file = File::open(store_path).unwrap();
let mut buf = Vec::new();
store_file.read_to_end(&mut buf).unwrap();
let contents = String::from_utf8(buf).unwrap();
assert!(contents.starts_with("git: "));
let git_store_path_str = contents[5..].to_string();
let git_store_path =
std::fs::canonicalize(repo_path.join(PathBuf::from(git_store_path_str))).unwrap();
store = Box::new(GitStore::load(&git_store_path));
}
StoreWrapper::new(store)
}
pub fn hash_length(&self) -> usize {
self.store.hash_length()
}
pub fn git_repo(&self) -> Option<git2::Repository> {
self.store.git_repo()
}
pub fn empty_tree_id(&self) -> &TreeId {
self.store.empty_tree_id()
}
pub fn root_commit_id(&self) -> &CommitId {
&self.root_commit_id
}
pub fn root_commit(&self) -> Commit {
self.get_commit(&self.root_commit_id).unwrap()
}
pub fn get_commit(&self, id: &CommitId) -> StoreResult<Commit> {
let data = self.get_store_commit(id)?;
Ok(Commit::new(
self.weak_self.as_ref().unwrap().upgrade().unwrap(),
id.clone(),
data,
))
}
fn make_root_commit(&self) -> store::Commit {
let timestamp = Timestamp {
timestamp: MillisSinceEpoch(0),
tz_offset: 0,
};
let signature = Signature {
name: String::new(),
email: String::new(),
timestamp,
};
let change_id = ChangeId(vec![0; 16]);
store::Commit {
parents: vec![],
predecessors: vec![],
root_tree: self.store.empty_tree_id().clone(),
change_id,
description: String::new(),
author: signature.clone(),
committer: signature,
is_open: false,
is_pruned: false,
}
}
fn get_store_commit(&self, id: &CommitId) -> StoreResult<Arc<store::Commit>> {
{
let read_locked_cached = self.commit_cache.read().unwrap();
if let Some(data) = read_locked_cached.get(id).cloned() {
return Ok(data);
}
}
let commit = if id == self.root_commit_id() {
self.make_root_commit()
} else {
self.store.read_commit(id)?
};
let data = Arc::new(commit);
let mut write_locked_cache = self.commit_cache.write().unwrap();
write_locked_cache.insert(id.clone(), data.clone());
Ok(data)
}
pub fn write_commit(&self, commit: store::Commit) -> Commit {
let commit_id = self.store.write_commit(&commit).unwrap();
let data = Arc::new(commit);
{
let mut write_locked_cache = self.commit_cache.write().unwrap();
write_locked_cache.insert(commit_id.clone(), data.clone());
}
let commit = Commit::new(
self.weak_self.as_ref().unwrap().upgrade().unwrap(),
commit_id,
data,
);
commit
}
pub fn get_tree(&self, dir: &RepoPath, id: &TreeId) -> StoreResult<Tree> {
let data = self.get_store_tree(dir, id)?;
Ok(Tree::new(
self.weak_self.as_ref().unwrap().upgrade().unwrap(),
dir.clone(),
id.clone(),
data,
))
}
fn get_store_tree(&self, dir: &RepoPath, id: &TreeId) -> StoreResult<Arc<store::Tree>> {
let key = (dir.clone(), id.clone());
{
let read_locked_cache = self.tree_cache.read().unwrap();
if let Some(data) = read_locked_cache.get(&key).cloned() {
return Ok(data);
}
}
let data = Arc::new(self.store.read_tree(dir, id)?);
let mut write_locked_cache = self.tree_cache.write().unwrap();
write_locked_cache.insert(key, data.clone());
Ok(data)
}
pub fn write_tree(&self, path: &RepoPath, contents: &store::Tree) -> StoreResult<TreeId> {
// TODO: This should also do caching like write_commit does.
self.store.write_tree(path, contents)
}
pub fn read_file(&self, path: &RepoPath, id: &FileId) -> StoreResult<Box<dyn Read>> {
self.store.read_file(path, id)
}
pub fn write_file(&self, path: &RepoPath, contents: &mut dyn Read) -> StoreResult<FileId> {
self.store.write_file(path, contents)
}
pub fn read_symlink(&self, path: &RepoPath, id: &SymlinkId) -> StoreResult<String> {
self.store.read_symlink(path, id)
}
pub fn write_symlink(&self, path: &RepoPath, contents: &str) -> StoreResult<SymlinkId> {
self.store.write_symlink(path, contents)
}
pub fn read_conflict(&self, id: &ConflictId) -> StoreResult<Conflict> {
self.store.read_conflict(id)
}
pub fn write_conflict(&self, contents: &Conflict) -> StoreResult<ConflictId> {
self.store.write_conflict(contents)
}
pub fn tree_builder(&self, base_tree_id: TreeId) -> TreeBuilder {
TreeBuilder::new(
self.weak_self.as_ref().unwrap().upgrade().unwrap(),
base_tree_id,
)
}
}

View file

@ -20,13 +20,13 @@ use std::sync::Arc;
use itertools::Itertools;
use tempfile::TempDir;
use crate::backend::{FileId, TreeId, TreeValue};
use crate::commit::Commit;
use crate::commit_builder::CommitBuilder;
use crate::repo::{MutableRepo, ReadonlyRepo};
use crate::repo_path::RepoPath;
use crate::settings::UserSettings;
use crate::store::{FileId, TreeId, TreeValue};
use crate::store_wrapper::StoreWrapper;
use crate::store::Store;
use crate::tree::Tree;
use crate::tree_builder::TreeBuilder;
@ -62,7 +62,7 @@ pub fn init_repo(settings: &UserSettings, use_git: bool) -> (TempDir, Arc<Readon
(temp_dir, repo)
}
pub fn write_file(store: &StoreWrapper, path: &RepoPath, contents: &str) -> FileId {
pub fn write_file(store: &Store, path: &RepoPath, contents: &str) -> FileId {
store.write_file(path, &mut contents.as_bytes()).unwrap()
}

View file

@ -15,13 +15,13 @@
use std::collections::HashMap;
use std::sync::{Arc, Mutex};
use crate::backend::Timestamp;
use crate::evolution::ReadonlyEvolution;
use crate::index::ReadonlyIndex;
use crate::op_store;
use crate::op_store::{OperationId, OperationMetadata};
use crate::operation::Operation;
use crate::repo::{MutableRepo, ReadonlyRepo, RepoLoader};
use crate::store::Timestamp;
use crate::view::View;
use crate::working_copy::WorkingCopy;

View file

@ -19,22 +19,22 @@ use std::iter::Peekable;
use std::pin::Pin;
use std::sync::Arc;
use crate::backend::{
BackendError, Conflict, ConflictId, ConflictPart, TreeEntriesNonRecursiveIter, TreeEntry,
TreeId, TreeValue,
};
use crate::files::MergeResult;
use crate::matchers::{EverythingMatcher, Matcher};
use crate::repo_path::{RepoPath, RepoPathComponent, RepoPathJoin};
use crate::store::{
Conflict, ConflictId, ConflictPart, StoreError, TreeEntriesNonRecursiveIter, TreeEntry, TreeId,
TreeValue,
};
use crate::store_wrapper::StoreWrapper;
use crate::{files, store};
use crate::store::Store;
use crate::{backend, files};
#[derive(Clone)]
pub struct Tree {
store: Arc<StoreWrapper>,
store: Arc<Store>,
dir: RepoPath,
id: TreeId,
data: Arc<store::Tree>,
data: Arc<backend::Tree>,
}
impl Debug for Tree {
@ -60,12 +60,7 @@ impl DiffSummary {
}
impl Tree {
pub fn new(
store: Arc<StoreWrapper>,
dir: RepoPath,
id: TreeId,
data: Arc<store::Tree>,
) -> Self {
pub fn new(store: Arc<Store>, dir: RepoPath, id: TreeId, data: Arc<backend::Tree>) -> Self {
Tree {
store,
dir,
@ -74,16 +69,16 @@ impl Tree {
}
}
pub fn null(store: Arc<StoreWrapper>, dir: RepoPath) -> Self {
pub fn null(store: Arc<Store>, dir: RepoPath) -> Self {
Tree {
store,
dir,
id: TreeId(vec![]),
data: Arc::new(store::Tree::default()),
data: Arc::new(backend::Tree::default()),
}
}
pub fn store(&self) -> &Arc<StoreWrapper> {
pub fn store(&self) -> &Arc<Store> {
&self.store
}
@ -95,7 +90,7 @@ impl Tree {
&self.id
}
pub fn data(&self) -> &store::Tree {
pub fn data(&self) -> &backend::Tree {
&self.data
}
@ -496,7 +491,7 @@ pub fn merge_trees(
side1_tree: &Tree,
base_tree: &Tree,
side2_tree: &Tree,
) -> Result<TreeId, StoreError> {
) -> Result<TreeId, BackendError> {
let store = base_tree.store().as_ref();
let dir = base_tree.dir();
assert_eq!(side1_tree.dir(), dir);
@ -539,13 +534,13 @@ pub fn merge_trees(
}
fn merge_tree_value(
store: &StoreWrapper,
store: &Store,
dir: &RepoPath,
basename: &RepoPathComponent,
maybe_base: Option<&TreeValue>,
maybe_side1: Option<&TreeValue>,
maybe_side2: Option<&TreeValue>,
) -> Result<Option<TreeValue>, StoreError> {
) -> Result<Option<TreeValue>, BackendError> {
// Resolve non-trivial conflicts:
// * resolve tree conflicts by recursing
// * try to resolve file conflicts by merging the file contents
@ -646,10 +641,7 @@ fn merge_tree_value(
})
}
fn conflict_part_to_conflict(
store: &StoreWrapper,
part: &ConflictPart,
) -> Result<Conflict, StoreError> {
fn conflict_part_to_conflict(store: &Store, part: &ConflictPart) -> Result<Conflict, BackendError> {
match &part.value {
TreeValue::Conflict(id) => {
let conflict = store.read_conflict(id)?;
@ -665,9 +657,9 @@ fn conflict_part_to_conflict(
}
fn simplify_conflict(
store: &StoreWrapper,
store: &Store,
conflict: &Conflict,
) -> Result<Option<TreeValue>, StoreError> {
) -> Result<Option<TreeValue>, BackendError> {
// Important cases to simplify:
//
// D

View file

@ -15,10 +15,10 @@
use std::collections::{BTreeMap, HashSet};
use std::sync::Arc;
use crate::backend;
use crate::backend::{TreeId, TreeValue};
use crate::repo_path::{RepoPath, RepoPathJoin};
use crate::store;
use crate::store::{TreeId, TreeValue};
use crate::store_wrapper::StoreWrapper;
use crate::store::Store;
use crate::tree::Tree;
#[derive(Debug)]
@ -29,13 +29,13 @@ enum Override {
#[derive(Debug)]
pub struct TreeBuilder {
store: Arc<StoreWrapper>,
store: Arc<Store>,
base_tree_id: TreeId,
overrides: BTreeMap<RepoPath, Override>,
}
impl TreeBuilder {
pub fn new(store: Arc<StoreWrapper>, base_tree_id: TreeId) -> TreeBuilder {
pub fn new(store: Arc<Store>, base_tree_id: TreeId) -> TreeBuilder {
let overrides = BTreeMap::new();
TreeBuilder {
store,
@ -44,7 +44,7 @@ impl TreeBuilder {
}
}
pub fn repo(&self) -> &StoreWrapper {
pub fn repo(&self) -> &Store {
self.store.as_ref()
}
@ -108,7 +108,7 @@ impl TreeBuilder {
}
}
fn get_base_trees(&mut self) -> BTreeMap<RepoPath, store::Tree> {
fn get_base_trees(&mut self) -> BTreeMap<RepoPath, backend::Tree> {
let mut tree_cache = BTreeMap::new();
let mut base_trees = BTreeMap::new();
let store = self.store.clone();

View file

@ -14,11 +14,11 @@
use std::collections::{BTreeMap, HashSet};
use crate::backend::CommitId;
use crate::index::IndexRef;
use crate::op_store;
use crate::op_store::{BranchTarget, RefTarget};
use crate::refs::merge_ref_targets;
use crate::store::CommitId;
#[derive(PartialEq, Eq, Clone, Hash, Debug)]
pub enum RefName {

View file

@ -31,13 +31,15 @@ use protobuf::Message;
use tempfile::NamedTempFile;
use thiserror::Error;
use crate::backend::{
BackendError, CommitId, FileId, MillisSinceEpoch, SymlinkId, TreeId, TreeValue,
};
use crate::commit::Commit;
use crate::gitignore::GitIgnoreFile;
use crate::lock::FileLock;
use crate::matchers::EverythingMatcher;
use crate::repo_path::{RepoPath, RepoPathComponent, RepoPathJoin};
use crate::store::{CommitId, FileId, MillisSinceEpoch, StoreError, SymlinkId, TreeId, TreeValue};
use crate::store_wrapper::StoreWrapper;
use crate::store::Store;
use crate::tree::Diff;
#[derive(Debug, PartialEq, Eq, Clone)]
@ -76,7 +78,7 @@ impl FileState {
}
pub struct TreeState {
store: Arc<StoreWrapper>,
store: Arc<Store>,
working_copy_path: PathBuf,
state_path: PathBuf,
tree_id: TreeId,
@ -146,7 +148,7 @@ pub enum CheckoutError {
#[error("Concurrent checkout")]
ConcurrentCheckout,
#[error("Internal error: {0:?}")]
InternalStoreError(StoreError),
InternalBackendError(BackendError),
}
impl TreeState {
@ -158,21 +160,13 @@ impl TreeState {
&self.file_states
}
pub fn init(
store: Arc<StoreWrapper>,
working_copy_path: PathBuf,
state_path: PathBuf,
) -> TreeState {
pub fn init(store: Arc<Store>, working_copy_path: PathBuf, state_path: PathBuf) -> TreeState {
let mut wc = TreeState::empty(store, working_copy_path, state_path);
wc.save();
wc
}
fn empty(
store: Arc<StoreWrapper>,
working_copy_path: PathBuf,
state_path: PathBuf,
) -> TreeState {
fn empty(store: Arc<Store>, working_copy_path: PathBuf, state_path: PathBuf) -> TreeState {
let tree_id = store.empty_tree_id().clone();
// Canonicalize the working copy path because "repo/." makes libgit2 think that
// everything should be ignored
@ -186,11 +180,7 @@ impl TreeState {
}
}
pub fn load(
store: Arc<StoreWrapper>,
working_copy_path: PathBuf,
state_path: PathBuf,
) -> TreeState {
pub fn load(store: Arc<Store>, working_copy_path: PathBuf, state_path: PathBuf) -> TreeState {
let maybe_file = File::open(state_path.join("tree_state"));
let file = match maybe_file {
Err(ref err) if err.kind() == std::io::ErrorKind::NotFound => {
@ -481,15 +471,15 @@ impl TreeState {
.store
.get_tree(&RepoPath::root(), &self.tree_id)
.map_err(|err| match err {
StoreError::NotFound => CheckoutError::SourceNotFound,
other => CheckoutError::InternalStoreError(other),
BackendError::NotFound => CheckoutError::SourceNotFound,
other => CheckoutError::InternalBackendError(other),
})?;
let new_tree =
self.store
.get_tree(&RepoPath::root(), &tree_id)
.map_err(|err| match err {
StoreError::NotFound => CheckoutError::TargetNotFound,
other => CheckoutError::InternalStoreError(other),
BackendError::NotFound => CheckoutError::TargetNotFound,
other => CheckoutError::InternalBackendError(other),
})?;
let mut stats = CheckoutStats {
@ -587,7 +577,7 @@ impl TreeState {
}
pub struct WorkingCopy {
store: Arc<StoreWrapper>,
store: Arc<Store>,
working_copy_path: PathBuf,
state_path: PathBuf,
commit_id: RefCell<Option<CommitId>>,
@ -597,11 +587,7 @@ pub struct WorkingCopy {
}
impl WorkingCopy {
pub fn init(
store: Arc<StoreWrapper>,
working_copy_path: PathBuf,
state_path: PathBuf,
) -> WorkingCopy {
pub fn init(store: Arc<Store>, working_copy_path: PathBuf, state_path: PathBuf) -> WorkingCopy {
// Leave the commit_id empty so a subsequent call to check out the root revision
// will have an effect.
let proto = crate::protos::working_copy::Checkout::new();
@ -621,11 +607,7 @@ impl WorkingCopy {
}
}
pub fn load(
store: Arc<StoreWrapper>,
working_copy_path: PathBuf,
state_path: PathBuf,
) -> WorkingCopy {
pub fn load(store: Arc<Store>, working_copy_path: PathBuf, state_path: PathBuf) -> WorkingCopy {
WorkingCopy {
store,
working_copy_path,

View file

@ -88,8 +88,8 @@ fn merge_directories(left: &Path, base: &Path, right: &Path, output: &Path) {
}
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_bad_locking_children(use_git: bool) {
// Test that two new commits created on separate machines are both visible (not
// lost due to lack of locking)
@ -139,8 +139,8 @@ fn test_bad_locking_children(use_git: bool) {
assert_eq!(op.parents.len(), 2);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_bad_locking_interrupted(use_git: bool) {
// Test that an interrupted update of the op-heads resulting in on op-head
// that's a descendant of the other is resolved without creating a new

View file

@ -20,8 +20,8 @@ use jujutsu_lib::testutils;
use jujutsu_lib::tree::DiffSummary;
use test_case::test_case;
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_initial(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -64,8 +64,8 @@ fn test_initial(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_rewrite(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);

View file

@ -36,8 +36,8 @@ fn count_non_merge_operations(repo: &ReadonlyRepo) -> usize {
num_ops
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_commit_parallel(use_git: bool) {
// This loads a Repo instance and creates and commits many concurrent
// transactions from it. It then reloads the repo. That should merge all the
@ -70,8 +70,8 @@ fn test_commit_parallel(use_git: bool) {
assert_eq!(count_non_merge_operations(&repo), num_threads + 1);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_commit_parallel_instances(use_git: bool) {
// Like the test above but creates a new repo instance for every thread, which
// makes it behave very similar to separate processes.

View file

@ -19,8 +19,8 @@ use jujutsu_lib::tree::DiffSummary;
use maplit::hashset;
use test_case::test_case;
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_types(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -58,8 +58,8 @@ fn test_types(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_tree_file_transition(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -88,8 +88,8 @@ fn test_tree_file_transition(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_sorting(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -153,8 +153,8 @@ fn test_sorting(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_matcher_dir_file_transition(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -220,8 +220,8 @@ fn test_matcher_dir_file_transition(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_matcher_normal_cases(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);

View file

@ -33,8 +33,8 @@ fn child_commit(settings: &UserSettings, repo: &ReadonlyRepo, commit: &Commit) -
testutils::create_random_commit(settings, repo).set_parents(vec![commit.id().clone()])
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_obsolete_and_orphan(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -80,8 +80,8 @@ fn test_obsolete_and_orphan(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_divergent(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -107,8 +107,8 @@ fn test_divergent(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_divergent_pruned(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -134,8 +134,8 @@ fn test_divergent_pruned(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_divergent_duplicate(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -163,8 +163,8 @@ fn test_divergent_duplicate(use_git: bool) {
// TODO: Create a #[repo_test] proc macro that injects the `settings` and `repo`
// variables into the test function
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_new_parent_rewritten(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -187,8 +187,8 @@ fn test_new_parent_rewritten(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_new_parent_cherry_picked(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -210,8 +210,8 @@ fn test_new_parent_cherry_picked(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_new_parent_is_pruned(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -237,8 +237,8 @@ fn test_new_parent_is_pruned(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_new_parent_divergent(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -275,8 +275,8 @@ fn test_new_parent_divergent(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_new_parent_divergent_one_not_pruned(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -319,8 +319,8 @@ fn test_new_parent_divergent_one_not_pruned(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_new_parent_divergent_all_pruned(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -365,8 +365,8 @@ fn test_new_parent_divergent_all_pruned(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_new_parent_split(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -398,8 +398,8 @@ fn test_new_parent_split(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_new_parent_split_pruned_descendant(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -435,8 +435,8 @@ fn test_new_parent_split_pruned_descendant(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_new_parent_split_forked(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -474,8 +474,8 @@ fn test_new_parent_split_forked(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_new_parent_split_forked_pruned(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -511,8 +511,8 @@ fn test_new_parent_split_forked_pruned(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_evolve_orphan(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -554,8 +554,8 @@ fn test_evolve_orphan(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
/// When evolving a merge commit, the new commit should not have a parent that
/// is an ancestor of another parent.
fn test_evolve_orphan_merge_ancestor_of_other_parent(use_git: bool) {
@ -589,8 +589,8 @@ fn test_evolve_orphan_merge_ancestor_of_other_parent(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evolve_pruned_orphan(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -625,8 +625,8 @@ fn test_evolve_pruned_orphan(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_evolve_multiple_orphans(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -679,8 +679,8 @@ fn test_evolve_multiple_orphans(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_evolve_divergent(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);

View file

@ -16,12 +16,12 @@ use std::path::PathBuf;
use std::sync::Arc;
use git2::Oid;
use jujutsu_lib::backend::CommitId;
use jujutsu_lib::commit::Commit;
use jujutsu_lib::git::{GitFetchError, GitPushError, GitRefUpdate};
use jujutsu_lib::op_store::{BranchTarget, RefTarget};
use jujutsu_lib::repo::ReadonlyRepo;
use jujutsu_lib::settings::UserSettings;
use jujutsu_lib::store::CommitId;
use jujutsu_lib::testutils::create_random_commit;
use jujutsu_lib::{git, testutils};
use maplit::{btreemap, hashset};

View file

@ -14,12 +14,12 @@
use std::sync::Arc;
use jujutsu_lib::backend::CommitId;
use jujutsu_lib::commit::Commit;
use jujutsu_lib::commit_builder::CommitBuilder;
use jujutsu_lib::index::IndexRef;
use jujutsu_lib::repo::ReadonlyRepo;
use jujutsu_lib::settings::UserSettings;
use jujutsu_lib::store::CommitId;
use jujutsu_lib::testutils;
use jujutsu_lib::testutils::{create_random_commit, CommitGraphBuilder};
use test_case::test_case;
@ -38,8 +38,8 @@ fn generation_number<'a>(index: impl Into<IndexRef<'a>>, commit_id: &CommitId) -
.generation_number()
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_empty_repo(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -62,8 +62,8 @@ fn test_index_commits_empty_repo(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_standard_cases(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -132,8 +132,8 @@ fn test_index_commits_standard_cases(use_git: bool) {
assert!(index.is_ancestor(commit_a.id(), commit_h.id()));
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_criss_cross(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -231,8 +231,8 @@ fn test_index_commits_criss_cross(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_previous_operations(use_git: bool) {
// Test that commits visible only in previous operations are indexed.
let settings = testutils::user_settings();
@ -284,8 +284,8 @@ fn test_index_commits_previous_operations(use_git: bool) {
assert_eq!(generation_number(index.as_ref(), commit_c.id()), 3);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_incremental(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, mut repo) = testutils::init_repo(&settings, use_git);
@ -333,8 +333,8 @@ fn test_index_commits_incremental(use_git: bool) {
assert_eq!(generation_number(index.as_ref(), commit_c.id()), 3);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_incremental_empty_transaction(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, mut repo) = testutils::init_repo(&settings, use_git);
@ -375,8 +375,8 @@ fn test_index_commits_incremental_empty_transaction(use_git: bool) {
assert_eq!(generation_number(index.as_ref(), commit_a.id()), 1);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_incremental_already_indexed(use_git: bool) {
// Tests that trying to add a commit that's already been added is a no-op.
let settings = testutils::user_settings();
@ -424,8 +424,8 @@ fn commits_by_level(repo: &ReadonlyRepo) -> Vec<u32> {
.collect()
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_index_commits_incremental_squashed(use_git: bool) {
let settings = testutils::user_settings();

View file

@ -68,8 +68,8 @@ fn test_init_external_git() {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_init_no_config_set(use_git: bool) {
// Test that we can create a repo without setting any config
let settings = UserSettings::from_config(config::Config::new());
@ -90,8 +90,8 @@ fn test_init_no_config_set(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_init_checkout(use_git: bool) {
// Test the contents of the checkout after init
let settings = testutils::user_settings();

View file

@ -26,8 +26,8 @@ fn test_load_bad_path() {
assert_eq!(result.err(), Some(RepoLoadError::NoRepoHere(wc_path)));
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_load_from_subdir(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -41,8 +41,8 @@ fn test_load_from_subdir(use_git: bool) {
assert_eq!(same_repo.working_copy_path(), repo.working_copy_path());
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_load_at_operation(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);

View file

@ -13,14 +13,14 @@
// limitations under the License.
use itertools::Itertools;
use jujutsu_lib::backend::{ConflictPart, TreeValue};
use jujutsu_lib::repo_path::{RepoPath, RepoPathComponent};
use jujutsu_lib::store::{ConflictPart, TreeValue};
use jujutsu_lib::tree::Tree;
use jujutsu_lib::{testutils, tree};
use test_case::test_case;
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_same_type(use_git: bool) {
// Tests all possible cases where the entry type is unchanged, specifically
// using only normal files in all trees (no symlinks, no trees, etc.).
@ -219,8 +219,8 @@ fn test_same_type(use_git: bool) {
};
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_subtrees(use_git: bool) {
// Tests that subtrees are merged.
@ -275,8 +275,8 @@ fn test_subtrees(use_git: bool) {
assert_eq!(entries, expected_entries);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_subtree_becomes_empty(use_git: bool) {
// Tests that subtrees that become empty are removed from the parent tree.
@ -306,8 +306,8 @@ fn test_subtree_becomes_empty(use_git: bool) {
assert_eq!(merged_tree.id(), store.empty_tree_id());
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_types(use_git: bool) {
// Tests conflicts between different types. This is mostly to test that the
// conflicts survive the roundtrip to the store.
@ -433,8 +433,8 @@ fn test_types(use_git: bool) {
};
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_simplify_conflict(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);

View file

@ -14,11 +14,11 @@
use std::sync::Arc;
use jujutsu_lib::backend::{Conflict, ConflictId, ConflictPart, TreeValue};
use jujutsu_lib::commit_builder::CommitBuilder;
use jujutsu_lib::op_store::RefTarget;
use jujutsu_lib::repo_path::RepoPath;
use jujutsu_lib::store::{Conflict, ConflictId, ConflictPart, TreeValue};
use jujutsu_lib::store_wrapper::StoreWrapper;
use jujutsu_lib::store::Store;
use jujutsu_lib::testutils;
use jujutsu_lib::testutils::CommitGraphBuilder;
use test_case::test_case;
@ -26,8 +26,8 @@ use test_case::test_case;
// TODO Many of the tests here are not run with Git because they end up creating
// two commits with the same contents.
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_checkout_open(use_git: bool) {
// Test that MutableRepo::check_out() uses the requested commit if it's open
let settings = testutils::user_settings();
@ -46,8 +46,8 @@ fn test_checkout_open(use_git: bool) {
assert_eq!(repo.view().checkout(), actual_checkout.id());
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_checkout_closed(use_git: bool) {
// Test that MutableRepo::check_out() creates a child if the requested commit is
// closed
@ -69,8 +69,8 @@ fn test_checkout_closed(use_git: bool) {
assert_eq!(repo.view().checkout(), actual_checkout.id());
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_checkout_open_with_conflict(use_git: bool) {
// Test that MutableRepo::check_out() creates a child if the requested
// commit is open and has conflicts
@ -108,8 +108,8 @@ fn test_checkout_open_with_conflict(use_git: bool) {
assert_eq!(repo.view().checkout(), actual_checkout.id());
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_checkout_closed_with_conflict(use_git: bool) {
// Test that MutableRepo::check_out() creates a child if the requested commit is
// closed and has conflicts
@ -147,7 +147,7 @@ fn test_checkout_closed_with_conflict(use_git: bool) {
assert_eq!(repo.view().checkout(), actual_checkout.id());
}
fn write_conflict(store: &Arc<StoreWrapper>, file_path: &RepoPath) -> ConflictId {
fn write_conflict(store: &Arc<Store>, file_path: &RepoPath) -> ConflictId {
let file_id1 = testutils::write_file(store, file_path, "a\n");
let file_id2 = testutils::write_file(store, file_path, "b\n");
let file_id3 = testutils::write_file(store, file_path, "c\n");
@ -176,8 +176,8 @@ fn write_conflict(store: &Arc<StoreWrapper>, file_path: &RepoPath) -> ConflictId
store.write_conflict(&conflict).unwrap()
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_checkout_previous_not_empty(use_git: bool) {
// Test that MutableRepo::check_out() does not usually prune the previous
// commit.
@ -202,8 +202,8 @@ fn test_checkout_previous_not_empty(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_checkout_previous_empty(use_git: bool) {
// Test that MutableRepo::check_out() prunes the previous commit if it was
// empty.
@ -232,8 +232,8 @@ fn test_checkout_previous_empty(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_checkout_previous_empty_and_obsolete(use_git: bool) {
// Test that MutableRepo::check_out() does not unnecessarily prune the previous
// commit if it was empty but already obsolete.
@ -266,8 +266,8 @@ fn test_checkout_previous_empty_and_obsolete(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_checkout_previous_empty_and_pruned(use_git: bool) {
// Test that MutableRepo::check_out() does not unnecessarily prune the previous
// commit if it was empty but already obsolete.
@ -296,8 +296,8 @@ fn test_checkout_previous_empty_and_pruned(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_add_head_success(use_git: bool) {
// Test that MutableRepo::add_head() adds the head, and that it's still there
// after commit. It should also be indexed.
@ -330,8 +330,8 @@ fn test_add_head_success(use_git: bool) {
assert_eq!(index_stats.max_generation_number, 1);
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_add_head_ancestor(use_git: bool) {
// Test that MutableRepo::add_head() does not add a head if it's an ancestor of
// an existing head.
@ -360,8 +360,8 @@ fn test_add_head_ancestor(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_add_head_not_immediate_child(use_git: bool) {
// Test that MutableRepo::add_head() can be used for adding a head that is not
// an immediate child of a current head.
@ -405,8 +405,8 @@ fn test_add_head_not_immediate_child(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_remove_head(use_git: bool) {
// Test that MutableRepo::remove_head() removes the head, and that it's still
// removed after commit. It should remain in the index, since we otherwise would
@ -446,8 +446,8 @@ fn test_remove_head(use_git: bool) {
assert!(repo.index().has_id(commit3.id()));
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_remove_head_ancestor_git_ref(use_git: bool) {
// Test that MutableRepo::remove_head() does not leave the view with a git ref
// pointing to a commit that's not reachable by any head.
@ -490,8 +490,8 @@ fn test_remove_head_ancestor_git_ref(use_git: bool) {
assert!(!heads.contains(commit1.id()));
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_add_public_head(use_git: bool) {
// Test that MutableRepo::add_public_head() adds the head, and that it's still
// there after commit.
@ -511,8 +511,8 @@ fn test_add_public_head(use_git: bool) {
assert!(repo.view().public_heads().contains(commit1.id()));
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_add_public_head_ancestor(use_git: bool) {
// Test that MutableRepo::add_public_head() does not add a public head if it's
// an ancestor of an existing public head.
@ -535,8 +535,8 @@ fn test_add_public_head_ancestor(use_git: bool) {
assert!(!repo.view().public_heads().contains(commit1.id()));
}
#[test_case(false ; "local store")]
// #[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
// #[test_case(true ; "git backend")]
fn test_remove_public_head(use_git: bool) {
// Test that MutableRepo::remove_public_head() removes the head, and that it's
// still removed after commit.

View file

@ -14,9 +14,9 @@
use std::path::Path;
use jujutsu_lib::backend::CommitId;
use jujutsu_lib::commit_builder::CommitBuilder;
use jujutsu_lib::repo::RepoRef;
use jujutsu_lib::store::CommitId;
use jujutsu_lib::testutils;
use test_case::test_case;
@ -27,8 +27,8 @@ fn list_dir(dir: &Path) -> Vec<String> {
.collect()
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_unpublished_operation(use_git: bool) {
// Test that the operation doesn't get published until that's requested.
let settings = testutils::user_settings();
@ -48,8 +48,8 @@ fn test_unpublished_operation(use_git: bool) {
assert_eq!(list_dir(&op_heads_dir), vec![op_id1.hex()]);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_consecutive_operations(use_git: bool) {
// Test that consecutive operations result in a single op-head on disk after
// each operation
@ -80,8 +80,8 @@ fn test_consecutive_operations(use_git: bool) {
assert_eq!(list_dir(&op_heads_dir), vec![op_id2.hex()]);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_concurrent_operations(use_git: bool) {
// Test that consecutive operations result in multiple op-heads on disk until
// the repo has been reloaded (which currently happens right away).
@ -125,8 +125,8 @@ fn assert_heads(repo: RepoRef, expected: Vec<&CommitId>) {
assert_eq!(*repo.view().heads(), expected);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_isolation(use_git: bool) {
// Test that two concurrent transactions don't see each other's changes.
let settings = testutils::user_settings();

View file

@ -12,17 +12,17 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use jujutsu_lib::backend::{CommitId, MillisSinceEpoch, Signature, Timestamp};
use jujutsu_lib::commit_builder::CommitBuilder;
use jujutsu_lib::op_store::RefTarget;
use jujutsu_lib::repo::RepoRef;
use jujutsu_lib::revset::{parse, resolve_symbol, RevsetError};
use jujutsu_lib::store::{CommitId, MillisSinceEpoch, Signature, Timestamp};
use jujutsu_lib::testutils::CommitGraphBuilder;
use jujutsu_lib::{git, testutils};
use test_case::test_case;
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_resolve_symbol_root(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -232,8 +232,8 @@ fn test_resolve_symbol_change_id() {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_resolve_symbol_checkout(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -391,8 +391,8 @@ fn resolve_commit_ids(repo: RepoRef, revset_str: &str) -> Vec<CommitId> {
.collect()
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_root_and_checkout(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -419,8 +419,8 @@ fn test_evaluate_expression_root_and_checkout(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_parents(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -481,8 +481,8 @@ fn test_evaluate_expression_parents(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_children(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -548,8 +548,8 @@ fn test_evaluate_expression_children(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_ancestors(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -585,8 +585,8 @@ fn test_evaluate_expression_ancestors(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_range(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -649,8 +649,8 @@ fn test_evaluate_expression_range(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_dag_range(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -724,8 +724,8 @@ fn test_evaluate_expression_dag_range(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_descendants(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -782,8 +782,8 @@ fn test_evaluate_expression_descendants(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_all_heads(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -803,8 +803,8 @@ fn test_evaluate_expression_all_heads(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_public_heads(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -837,8 +837,8 @@ fn test_evaluate_expression_public_heads(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_git_refs(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -907,8 +907,8 @@ fn test_evaluate_expression_git_refs(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_obsolete(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -954,8 +954,8 @@ fn test_evaluate_expression_obsolete(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_merges(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -986,8 +986,8 @@ fn test_evaluate_expression_merges(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_description(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -1033,8 +1033,8 @@ fn test_evaluate_expression_description(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_union(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -1107,8 +1107,8 @@ fn test_evaluate_expression_union(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_intersection(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -1148,8 +1148,8 @@ fn test_evaluate_expression_intersection(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_evaluate_expression_difference(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);

View file

@ -19,8 +19,8 @@ use jujutsu_lib::testutils;
use jujutsu_lib::testutils::CommitGraphBuilder;
use test_case::test_case;
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_rebase_descendants_sideways(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -74,8 +74,8 @@ fn test_rebase_descendants_sideways(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_rebase_descendants_forward(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -115,8 +115,8 @@ fn test_rebase_descendants_forward(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_rebase_descendants_backward(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -153,8 +153,8 @@ fn test_rebase_descendants_backward(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_rebase_descendants_internal_merge(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -212,8 +212,8 @@ fn test_rebase_descendants_internal_merge(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_rebase_descendants_external_merge(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -260,8 +260,8 @@ fn test_rebase_descendants_external_merge(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_rebase_descendants_degenerate_merge(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -300,8 +300,8 @@ fn test_rebase_descendants_degenerate_merge(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_rebase_descendants_widen_merge(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -351,8 +351,8 @@ fn test_rebase_descendants_widen_merge(use_git: bool) {
tx.discard();
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_rebase_descendants_contents(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);

View file

@ -18,8 +18,8 @@ use jujutsu_lib::testutils::CommitGraphBuilder;
use maplit::{btreemap, hashset};
use test_case::test_case;
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_heads_empty(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -32,8 +32,8 @@ fn test_heads_empty(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_heads_fork(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);
@ -56,8 +56,8 @@ fn test_heads_fork(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_heads_merge(use_git: bool) {
let settings = testutils::user_settings();
let (_temp_dir, repo) = testutils::init_repo(&settings, use_git);

View file

@ -19,17 +19,17 @@ use std::os::unix::fs::PermissionsExt;
use std::sync::Arc;
use itertools::Itertools;
use jujutsu_lib::backend::TreeValue;
use jujutsu_lib::commit_builder::CommitBuilder;
use jujutsu_lib::repo::ReadonlyRepo;
use jujutsu_lib::repo_path::{RepoPath, RepoPathComponent};
use jujutsu_lib::settings::UserSettings;
use jujutsu_lib::store::TreeValue;
use jujutsu_lib::testutils;
use jujutsu_lib::tree_builder::TreeBuilder;
use test_case::test_case;
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_root(use_git: bool) {
// Test that the working copy is clean and empty after init.
let settings = testutils::user_settings();
@ -45,8 +45,8 @@ fn test_root(use_git: bool) {
assert_eq!(&new_tree_id, repo.store().empty_tree_id());
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_checkout_file_transitions(use_git: bool) {
// Tests switching between commits where a certain path is of one type in one
// commit and another type in the other. Includes a "missing" type, so we cover
@ -234,8 +234,8 @@ fn test_checkout_file_transitions(use_git: bool) {
}
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_commit_racy_timestamps(use_git: bool) {
// Tests that file modifications are detected even if they happen the same
// millisecond as the updated working copy state.
@ -265,8 +265,8 @@ fn test_commit_racy_timestamps(use_git: bool) {
}
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_gitignores(use_git: bool) {
// Tests that .gitignore files are respected.
@ -335,8 +335,8 @@ fn test_gitignores(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_gitignores_checkout_overwrites_ignored(use_git: bool) {
// Tests that a .gitignore'd file gets overwritten if check out a commit where
// the file is tracked.
@ -391,8 +391,8 @@ fn test_gitignores_checkout_overwrites_ignored(use_git: bool) {
.is_some());
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_gitignores_ignored_directory_already_tracked(use_git: bool) {
// Tests that a .gitignore'd directory that already has a tracked file in it
// does not get removed when committing the working directory.
@ -435,8 +435,8 @@ fn test_gitignores_ignored_directory_already_tracked(use_git: bool) {
assert!(new_tree.path_value(&file_path).is_some());
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_dotgit_ignored(use_git: bool) {
// Tests that .git directories and files are always ignored (we could accept
// them if the backend is not git).

View file

@ -23,8 +23,8 @@ use jujutsu_lib::testutils;
use jujutsu_lib::working_copy::CheckoutError;
use test_case::test_case;
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_concurrent_checkout(use_git: bool) {
// Test that we error out if a concurrent checkout is detected (i.e. if the
// current checkout changed on disk after we read it).
@ -68,8 +68,8 @@ fn test_concurrent_checkout(use_git: bool) {
);
}
#[test_case(false ; "local store")]
#[test_case(true ; "git store")]
#[test_case(false ; "local backend")]
#[test_case(true ; "git backend")]
fn test_checkout_parallel(use_git: bool) {
// Test that concurrent checkouts by different processes (simulated by using
// different repo instances) is safe.

View file

@ -31,6 +31,7 @@ use std::{fs, io};
use clap::{crate_version, App, Arg, ArgMatches, SubCommand};
use criterion::Criterion;
use itertools::Itertools;
use jujutsu_lib::backend::{BackendError, CommitId, Timestamp, TreeValue};
use jujutsu_lib::commit::Commit;
use jujutsu_lib::commit_builder::CommitBuilder;
use jujutsu_lib::dag_walk::topo_order_reverse;
@ -53,8 +54,7 @@ use jujutsu_lib::revset::{RevsetError, RevsetExpression, RevsetParseError};
use jujutsu_lib::revset_graph_iterator::RevsetGraphEdgeType;
use jujutsu_lib::rewrite::{back_out_commit, merge_commit_trees, rebase_commit, DescendantRebaser};
use jujutsu_lib::settings::UserSettings;
use jujutsu_lib::store::{CommitId, StoreError, Timestamp, TreeValue};
use jujutsu_lib::store_wrapper::StoreWrapper;
use jujutsu_lib::store::Store;
use jujutsu_lib::transaction::Transaction;
use jujutsu_lib::tree::{Diff, DiffSummary};
use jujutsu_lib::working_copy::{CheckoutStats, WorkingCopy};
@ -87,8 +87,8 @@ impl From<std::io::Error> for CommandError {
}
}
impl From<StoreError> for CommandError {
fn from(err: StoreError) -> Self {
impl From<BackendError> for CommandError {
fn from(err: BackendError) -> Self {
CommandError::UserError(format!("Unexpected error from store: {}", err))
}
}
@ -3205,7 +3205,7 @@ fn cmd_operation(
Ok(())
}
fn get_git_repo(store: &StoreWrapper) -> Result<git2::Repository, CommandError> {
fn get_git_repo(store: &Store) -> Result<git2::Repository, CommandError> {
match store.git_repo() {
None => Err(CommandError::UserError(
"The repo is not backed by a git repo".to_string(),

View file

@ -18,10 +18,10 @@ use std::path::{Path, PathBuf};
use std::process::Command;
use std::sync::Arc;
use jujutsu_lib::backend::{BackendError, TreeId, TreeValue};
use jujutsu_lib::matchers::EverythingMatcher;
use jujutsu_lib::repo_path::RepoPath;
use jujutsu_lib::store::{StoreError, TreeId, TreeValue};
use jujutsu_lib::store_wrapper::StoreWrapper;
use jujutsu_lib::store::Store;
use jujutsu_lib::tree::{merge_trees, Tree};
use jujutsu_lib::tree_builder::TreeBuilder;
use jujutsu_lib::working_copy::{CheckoutError, TreeState};
@ -37,7 +37,7 @@ pub enum DiffEditError {
#[error("Failed to write directories to diff: {0:?}")]
CheckoutError(CheckoutError),
#[error("Internal error: {0:?}")]
InternalStoreError(StoreError),
InternalBackendError(BackendError),
}
impl From<CheckoutError> for DiffEditError {
@ -46,18 +46,18 @@ impl From<CheckoutError> for DiffEditError {
}
}
impl From<StoreError> for DiffEditError {
fn from(err: StoreError) -> Self {
DiffEditError::InternalStoreError(err)
impl From<BackendError> for DiffEditError {
fn from(err: BackendError) -> Self {
DiffEditError::InternalBackendError(err)
}
}
fn add_to_tree(
store: &StoreWrapper,
store: &Store,
tree_builder: &mut TreeBuilder,
repo_path: &RepoPath,
value: &TreeValue,
) -> Result<(), StoreError> {
) -> Result<(), BackendError> {
match value {
TreeValue::Conflict(conflict_id) => {
let conflict = store.read_conflict(conflict_id)?;
@ -73,7 +73,7 @@ fn add_to_tree(
}
fn check_out(
store: Arc<StoreWrapper>,
store: Arc<Store>,
wc_dir: PathBuf,
state_dir: PathBuf,
tree_id: TreeId,

View file

@ -15,9 +15,9 @@
extern crate pest;
use chrono::{FixedOffset, TimeZone, Utc};
use jujutsu_lib::backend::{CommitId, Signature};
use jujutsu_lib::commit::Commit;
use jujutsu_lib::repo::RepoRef;
use jujutsu_lib::store::{CommitId, Signature};
use pest::iterators::{Pair, Pairs};
use pest::Parser;

View file

@ -17,9 +17,9 @@ use std::io;
use std::ops::Add;
use itertools::Itertools;
use jujutsu_lib::backend::{CommitId, Signature};
use jujutsu_lib::commit::Commit;
use jujutsu_lib::repo::RepoRef;
use jujutsu_lib::store::{CommitId, Signature};
use crate::formatter::Formatter;