refactor: rename client to peer &

use efficient ContainerID repr
This commit is contained in:
Zixuan Chen 2023-07-04 12:33:03 +08:00
parent 38ee1e5a78
commit 87887b1b2c
37 changed files with 378 additions and 266 deletions

View file

@ -1,9 +1,9 @@
use loro_internal::{id::ClientID, LoroCore}; use loro_internal::{id::PeerID, LoroCore};
use crate::raw_store::RawStore; use crate::raw_store::RawStore;
pub struct Loro { pub struct Loro {
pub this_client_id: ClientID, pub this_client_id: PeerID,
pub raw_store: Option<RawStore>, pub raw_store: Option<RawStore>,
pub log_store: Option<LoroCore>, pub log_store: Option<LoroCore>,
} }

View file

@ -1,6 +1,6 @@
use fxhash::FxHashMap; use fxhash::FxHashMap;
use loro_internal::{ use loro_internal::{
id::{ClientID, Counter}, id::{Counter, PeerID},
version::VersionVector, version::VersionVector,
}; };
use rle::{HasLength, RleVecWithIndex}; use rle::{HasLength, RleVecWithIndex};
@ -10,8 +10,8 @@ use crate::raw_change::{ChangeData, ChangeHash};
pub type Mac = [u8; 32]; pub type Mac = [u8; 32];
pub struct RawStore { pub struct RawStore {
changes: FxHashMap<ClientID, RleVecWithIndex<ChangeData>>, changes: FxHashMap<PeerID, RleVecWithIndex<ChangeData>>,
macs: Option<FxHashMap<ClientID, Mac>>, macs: Option<FxHashMap<PeerID, Mac>>,
} }
impl RawStore { impl RawStore {
@ -47,7 +47,7 @@ impl RawStore {
true true
} }
pub fn get_final_hash(&self, client_id: ClientID) -> ChangeHash { pub fn get_final_hash(&self, client_id: PeerID) -> ChangeHash {
let changes = self.changes.get(&client_id).unwrap(); let changes = self.changes.get(&client_id).unwrap();
let last = changes.vec().last().unwrap(); let last = changes.vec().last().unwrap();
last.hash.unwrap() last.hash.unwrap()

View file

@ -56,6 +56,15 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitmaps"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
dependencies = [
"typenum",
]
[[package]] [[package]]
name = "bumpalo" name = "bumpalo"
version = "3.11.1" version = "3.11.1"
@ -182,6 +191,18 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "enum_dispatch"
version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11f36e95862220b211a6e2aa5eca09b4fa391b13cd52ceb8035a24bf65a79de2"
dependencies = [
"once_cell",
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "fnv" name = "fnv"
version = "1.0.7" version = "1.0.7"
@ -197,6 +218,17 @@ dependencies = [
"byteorder", "byteorder",
] ]
[[package]]
name = "generic-btree"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff012c9d64685dc26c5ebac85b7e56a29799646a945239db2d91fc3a597de6d8"
dependencies = [
"fxhash",
"smallvec",
"thunderdome",
]
[[package]] [[package]]
name = "getrandom" name = "getrandom"
version = "0.2.8" version = "0.2.8"
@ -243,6 +275,20 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "im"
version = "15.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9"
dependencies = [
"bitmaps",
"rand_core",
"rand_xoshiro",
"sized-chunks",
"typenum",
"version_check",
]
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.10.5" version = "0.10.5"
@ -276,6 +322,16 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "jumprope"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "829c74fe88dda0d2a5425b022b44921574a65c4eb78e6e39a61b40eb416a4ef8"
dependencies = [
"rand",
"str_indices",
]
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.138" version = "0.2.138"
@ -322,8 +378,12 @@ dependencies = [
"crdt-list", "crdt-list",
"debug-log", "debug-log",
"enum-as-inner", "enum-as-inner",
"enum_dispatch",
"fxhash", "fxhash",
"generic-btree",
"im",
"itertools", "itertools",
"jumprope",
"num", "num",
"postcard", "postcard",
"rand", "rand",
@ -604,6 +664,15 @@ dependencies = [
"getrandom", "getrandom",
] ]
[[package]]
name = "rand_xoshiro"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa"
dependencies = [
"rand_core",
]
[[package]] [[package]]
name = "redox_syscall" name = "redox_syscall"
version = "0.2.16" version = "0.2.16"
@ -734,6 +803,16 @@ version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de"
[[package]]
name = "sized-chunks"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e"
dependencies = [
"bitmaps",
"typenum",
]
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "1.10.0" version = "1.10.0"
@ -781,6 +860,12 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "str_indices"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f026164926842ec52deb1938fae44f83dfdb82d0a5b0270c5bd5935ab74d6dd"
[[package]] [[package]]
name = "string_cache" name = "string_cache"
version = "0.8.4" version = "0.8.4"
@ -856,6 +941,12 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "thunderdome"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92e170f93360bf9ae6fe3c31116bbf27adb1d054cedd6bc3d7857e34f2d98d0b"
[[package]] [[package]]
name = "tracing" name = "tracing"
version = "0.1.37" version = "0.1.37"
@ -888,6 +979,12 @@ dependencies = [
"once_cell", "once_cell",
] ]
[[package]]
name = "typenum"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]] [[package]]
name = "unicode-ident" name = "unicode-ident"
version = "1.0.5" version = "1.0.5"

View file

@ -112,7 +112,7 @@ impl<O: Rle + HasIndex> Mergable<ChangeMergeCfg> for Change<O> {
return false; return false;
} }
self.id.client_id == other.id.client_id self.id.peer == other.id.peer
&& self.id.counter + self.content_len() as Counter == other.id.counter && self.id.counter + self.content_len() as Counter == other.id.counter
&& self.lamport + self.content_len() as Lamport == other.lamport && self.lamport + self.content_len() as Lamport == other.lamport
} }

View file

@ -7,9 +7,9 @@
use crate::{ use crate::{
event::{Observer, ObserverHandler, SubscriptionID}, event::{Observer, ObserverHandler, SubscriptionID},
hierarchy::Hierarchy, hierarchy::Hierarchy,
id::{Counter, PeerID},
log_store::ImportContext, log_store::ImportContext,
op::{InnerContent, RemoteContent, RichOp}, op::{InnerContent, RemoteContent, RichOp},
version::PatchedVersionVector,
InternalString, LoroError, LoroValue, VersionVector, ID, InternalString, LoroError, LoroValue, VersionVector, ID,
}; };
@ -172,7 +172,8 @@ pub enum ContainerID {
container_type: ContainerType, container_type: ContainerType,
}, },
Normal { Normal {
id: ID, peer: PeerID,
counter: Counter,
container_type: ContainerType, container_type: ContainerType,
}, },
} }
@ -184,9 +185,15 @@ impl Display for ContainerID {
name, name,
container_type, container_type,
} => f.write_fmt(format_args!("/{}:{}", name, container_type))?, } => f.write_fmt(format_args!("/{}:{}", name, container_type))?,
ContainerID::Normal { id, container_type } => { ContainerID::Normal {
f.write_fmt(format_args!("{}:{}", id, container_type))? peer,
} counter,
container_type,
} => f.write_fmt(format_args!(
"{}:{}",
ID::new(*peer, *counter),
container_type
))?,
}; };
Ok(()) Ok(())
} }
@ -210,10 +217,8 @@ impl TryFrom<&str> for ContainerID {
let counter = parts.next().ok_or(())?.parse().map_err(|_| ())?; let counter = parts.next().ok_or(())?.parse().map_err(|_| ())?;
let client = parts.next().ok_or(())?.parse().map_err(|_| ())?; let client = parts.next().ok_or(())?.parse().map_err(|_| ())?;
Ok(ContainerID::Normal { Ok(ContainerID::Normal {
id: ID { counter,
counter, peer: client,
client_id: client,
},
container_type, container_type,
}) })
} }
@ -241,7 +246,9 @@ impl From<&ContainerID> for ContainerIdRaw {
fn from(id: &ContainerID) -> Self { fn from(id: &ContainerID) -> Self {
match id { match id {
ContainerID::Root { name, .. } => ContainerIdRaw::Root { name: name.clone() }, ContainerID::Root { name, .. } => ContainerIdRaw::Root { name: name.clone() },
ContainerID::Normal { id, .. } => ContainerIdRaw::Normal { id: *id }, ContainerID::Normal { peer, counter, .. } => ContainerIdRaw::Normal {
id: ID::new(*peer, *counter),
},
} }
} }
} }
@ -250,7 +257,9 @@ impl From<ContainerID> for ContainerIdRaw {
fn from(id: ContainerID) -> Self { fn from(id: ContainerID) -> Self {
match id { match id {
ContainerID::Root { name, .. } => ContainerIdRaw::Root { name }, ContainerID::Root { name, .. } => ContainerIdRaw::Root { name },
ContainerID::Normal { id, .. } => ContainerIdRaw::Normal { id }, ContainerID::Normal { peer, counter, .. } => ContainerIdRaw::Normal {
id: ID::new(peer, counter),
},
} }
} }
} }
@ -262,7 +271,11 @@ impl ContainerIdRaw {
name, name,
container_type, container_type,
}, },
ContainerIdRaw::Normal { id } => ContainerID::Normal { id, container_type }, ContainerIdRaw::Normal { id } => ContainerID::Normal {
peer: id.peer,
counter: id.counter,
container_type,
},
} }
} }
} }
@ -270,7 +283,11 @@ impl ContainerIdRaw {
impl ContainerID { impl ContainerID {
#[inline] #[inline]
pub fn new_normal(id: ID, container_type: ContainerType) -> Self { pub fn new_normal(id: ID, container_type: ContainerType) -> Self {
ContainerID::Normal { id, container_type } ContainerID::Normal {
peer: id.peer,
counter: id.counter,
container_type,
}
} }
#[inline] #[inline]

View file

@ -21,7 +21,7 @@ use crate::{
delta::Delta, delta::Delta,
event::{Diff, Index}, event::{Diff, Index},
hierarchy::Hierarchy, hierarchy::Hierarchy,
id::{ClientID, Counter}, id::{Counter, PeerID},
log_store::ImportContext, log_store::ImportContext,
op::{InnerContent, Op, RemoteContent, RichOp}, op::{InnerContent, Op, RemoteContent, RichOp},
prelim::Prelim, prelim::Prelim,
@ -560,12 +560,12 @@ impl ContainerTrait for ListContainer {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct List { pub struct List {
container: Weak<Mutex<ContainerInstance>>, container: Weak<Mutex<ContainerInstance>>,
client_id: ClientID, client_id: PeerID,
container_idx: ContainerIdx, container_idx: ContainerIdx,
} }
impl List { impl List {
pub fn from_instance(instance: Weak<Mutex<ContainerInstance>>, client_id: ClientID) -> Self { pub fn from_instance(instance: Weak<Mutex<ContainerInstance>>, client_id: PeerID) -> Self {
let container_idx = { let container_idx = {
let list = instance.upgrade().unwrap(); let list = instance.upgrade().unwrap();
let list = list.try_lock().unwrap(); let list = list.try_lock().unwrap();
@ -741,7 +741,7 @@ impl ContainerWrapper for List {
f(list) f(list)
} }
fn client_id(&self) -> ClientID { fn client_id(&self) -> PeerID {
self.client_id self.client_id
} }

View file

@ -21,7 +21,7 @@ use crate::{
}, },
event::{Diff, Index}, event::{Diff, Index},
hierarchy::Hierarchy, hierarchy::Hierarchy,
id::ClientID, id::PeerID,
log_store::ImportContext, log_store::ImportContext,
op::{InnerContent, Op, RemoteContent, RichOp}, op::{InnerContent, Op, RemoteContent, RichOp},
prelim::Prelim, prelim::Prelim,
@ -426,12 +426,12 @@ impl ContainerTrait for MapContainer {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Map { pub struct Map {
container: Weak<Mutex<ContainerInstance>>, container: Weak<Mutex<ContainerInstance>>,
client_id: ClientID, client_id: PeerID,
container_idx: ContainerIdx, container_idx: ContainerIdx,
} }
impl Map { impl Map {
pub fn from_instance(instance: Weak<Mutex<ContainerInstance>>, client_id: ClientID) -> Self { pub fn from_instance(instance: Weak<Mutex<ContainerInstance>>, client_id: PeerID) -> Self {
let container_idx = instance.upgrade().unwrap().try_lock().unwrap().idx(); let container_idx = instance.upgrade().unwrap().try_lock().unwrap().idx();
Self { Self {
container: instance, container: instance,
@ -506,7 +506,7 @@ impl Map {
impl ContainerWrapper for Map { impl ContainerWrapper for Map {
type Container = MapContainer; type Container = MapContainer;
fn client_id(&self) -> ClientID { fn client_id(&self) -> PeerID {
self.client_id self.client_id
} }

View file

@ -16,7 +16,7 @@ use crate::{
context::Context, context::Context,
event::{Index, ObserverHandler, SubscriptionID}, event::{Index, ObserverHandler, SubscriptionID},
hierarchy::Hierarchy, hierarchy::Hierarchy,
id::ClientID, id::PeerID,
log_store::ImportContext, log_store::ImportContext,
op::{RemoteContent, RichOp}, op::{RemoteContent, RichOp},
transaction::Transaction, transaction::Transaction,
@ -433,7 +433,7 @@ pub trait ContainerWrapper {
ans ans
} }
fn client_id(&self) -> ClientID; fn client_id(&self) -> PeerID;
fn id(&self) -> ContainerID { fn id(&self) -> ContainerID {
self.with_container(|x| x.id().clone()) self.with_container(|x| x.id().clone())

View file

@ -15,7 +15,7 @@ use crate::{
delta::{Delta, DeltaItem}, delta::{Delta, DeltaItem},
event::{Diff, Utf16Meta}, event::{Diff, Utf16Meta},
hierarchy::Hierarchy, hierarchy::Hierarchy,
id::{ClientID, Counter}, id::{Counter, PeerID},
log_store::ImportContext, log_store::ImportContext,
op::{InnerContent, Op, RemoteContent, RichOp}, op::{InnerContent, Op, RemoteContent, RichOp},
transaction::Transaction, transaction::Transaction,
@ -552,12 +552,12 @@ impl ContainerTrait for TextContainer {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Text { pub struct Text {
container: Weak<Mutex<ContainerInstance>>, container: Weak<Mutex<ContainerInstance>>,
client_id: ClientID, client_id: PeerID,
container_idx: ContainerIdx, container_idx: ContainerIdx,
} }
impl Text { impl Text {
pub fn from_instance(instance: Weak<Mutex<ContainerInstance>>, client_id: ClientID) -> Self { pub fn from_instance(instance: Weak<Mutex<ContainerInstance>>, client_id: PeerID) -> Self {
let container_idx = { let container_idx = {
let x = instance.upgrade().unwrap(); let x = instance.upgrade().unwrap();
let x = x.try_lock().unwrap(); let x = x.try_lock().unwrap();
@ -659,7 +659,7 @@ impl Text {
impl ContainerWrapper for Text { impl ContainerWrapper for Text {
type Container = TextContainer; type Container = TextContainer;
fn client_id(&self) -> crate::id::ClientID { fn client_id(&self) -> crate::id::PeerID {
self.client_id self.client_id
} }

View file

@ -13,7 +13,7 @@ use crate::{
}; };
#[allow(unused)] #[allow(unused)]
use crate::ClientID; use crate::PeerID;
use self::{ use self::{
content_map::ContentMap, content_map::ContentMap,
@ -44,7 +44,7 @@ pub mod yata_impl;
#[derive(Debug)] #[derive(Debug)]
pub struct Tracker { pub struct Tracker {
#[cfg(feature = "test_utils")] #[cfg(feature = "test_utils")]
client_id: ClientID, client_id: PeerID,
/// from start_vv to latest vv are applied /// from start_vv to latest vv are applied
start_vv: VersionVector, start_vv: VersionVector,
/// latest applied ops version vector /// latest applied ops version vector
@ -66,7 +66,7 @@ unsafe impl Sync for Tracker {}
impl From<ID> for u128 { impl From<ID> for u128 {
fn from(id: ID) -> Self { fn from(id: ID) -> Self {
((id.client_id as u128) << 64) | id.counter as u128 ((id.peer as u128) << 64) | id.counter as u128
} }
} }
@ -117,7 +117,7 @@ impl Tracker {
for span in self.content.iter() { for span in self.content.iter() {
let yspan = span.as_ref(); let yspan = span.as_ref();
let id_span = IdSpan::new( let id_span = IdSpan::new(
yspan.id.client_id, yspan.id.peer,
yspan.id.counter, yspan.id.counter,
yspan.atom_len() as Counter + yspan.id.counter, yspan.atom_len() as Counter + yspan.id.counter,
); );
@ -210,7 +210,7 @@ impl Tracker {
} }
if self.all_vv().includes_id(id) { if self.all_vv().includes_id(id) {
let this_ctr = self.all_vv().get(&id.client_id).unwrap(); let this_ctr = self.all_vv().get(&id.peer).unwrap();
let shift = this_ctr - id.counter; let shift = this_ctr - id.counter;
self.forward(std::iter::once(id.to_span(shift as usize)), false); self.forward(std::iter::once(id.to_span(shift as usize)), false);
if shift as usize >= content.atom_len() { if shift as usize >= content.atom_len() {
@ -366,8 +366,8 @@ impl Tracker {
/// apply an operation directly to the current tracker /// apply an operation directly to the current tracker
fn apply(&mut self, id: ID, content: &InnerContent) { fn apply(&mut self, id: ID, content: &InnerContent) {
assert!(*self.current_vv.get(&id.client_id).unwrap_or(&0) <= id.counter); assert!(*self.current_vv.get(&id.peer).unwrap_or(&0) <= id.counter);
assert!(*self.all_vv.get(&id.client_id).unwrap_or(&0) <= id.counter); assert!(*self.all_vv.get(&id.peer).unwrap_or(&0) <= id.counter);
self.current_vv self.current_vv
.set_end(id.inc(content.content_len() as i32)); .set_end(id.inc(content.content_len() as i32));
self.all_vv.set_end(id.inc(content.content_len() as i32)); self.all_vv.set_end(id.inc(content.content_len() as i32));

View file

@ -112,7 +112,7 @@ impl ContentMap {
if cursor.as_ref().status.is_activated() { if cursor.as_ref().status.is_activated() {
let cursor = cursor.unwrap(); let cursor = cursor.unwrap();
ans.push(IdSpan::new( ans.push(IdSpan::new(
id.client_id, id.peer,
id.counter + (cursor.offset as i32), id.counter + (cursor.offset as i32),
id.counter + (cursor.offset + cursor.len) as i32, id.counter + (cursor.offset + cursor.len) as i32,
)); ));
@ -129,7 +129,7 @@ impl ContentMap {
let id = cursor.as_ref().id; let id = cursor.as_ref().id;
let cursor = cursor.unwrap(); let cursor = cursor.unwrap();
ans.push(IdSpan::new( ans.push(IdSpan::new(
id.client_id, id.peer,
id.counter + (cursor.offset as i32), id.counter + (cursor.offset as i32),
id.counter + (cursor.offset + cursor.len) as i32, id.counter + (cursor.offset + cursor.len) as i32,
)); ));

View file

@ -116,14 +116,14 @@ impl YSpan {
#[inline] #[inline]
pub fn contain_id(&self, id: ID) -> bool { pub fn contain_id(&self, id: ID) -> bool {
self.id.client_id == id.client_id self.id.peer == id.peer
&& self.id.counter <= id.counter && self.id.counter <= id.counter
&& id.counter < self.id.counter + self.atom_len() as i32 && id.counter < self.id.counter + self.atom_len() as i32
} }
#[inline] #[inline]
pub fn overlap(&self, id: IdSpan) -> bool { pub fn overlap(&self, id: IdSpan) -> bool {
if self.id.client_id != id.client_id { if self.id.peer != id.client_id {
return false; return false;
} }
@ -149,7 +149,7 @@ impl YSpan {
impl Mergable for YSpan { impl Mergable for YSpan {
fn is_mergable(&self, other: &Self, _: &()) -> bool { fn is_mergable(&self, other: &Self, _: &()) -> bool {
other.id.client_id == self.id.client_id other.id.peer == self.id.peer
&& self.status == other.status && self.status == other.status
&& self.after_status == other.after_status && self.after_status == other.after_status
&& self.id.counter + self.atom_len() as Counter == other.id.counter && self.id.counter + self.atom_len() as Counter == other.id.counter

View file

@ -129,7 +129,7 @@ impl ListCrdt for YataImpl {
} }
fn cmp_id(op_a: &Self::OpUnit, op_b: &Self::OpUnit) -> std::cmp::Ordering { fn cmp_id(op_a: &Self::OpUnit, op_b: &Self::OpUnit) -> std::cmp::Ordering {
op_a.id.client_id.cmp(&op_b.id.client_id) op_a.id.peer.cmp(&op_b.id.peer)
} }
fn contains(op: &Self::OpUnit, id: Self::OpId) -> bool { fn contains(op: &Self::OpUnit, id: Self::OpId) -> bool {
@ -263,7 +263,7 @@ pub mod fuzz {
Tracker, Tracker,
}, },
}, },
id::{ClientID, Counter, ID}, id::{Counter, PeerID, ID},
span::IdSpan, span::IdSpan,
}; };
@ -333,7 +333,7 @@ pub mod fuzz {
let mut tracker = Tracker::new(Default::default(), Counter::MAX / 2); let mut tracker = Tracker::new(Default::default(), Counter::MAX / 2);
#[cfg(feature = "test_utils")] #[cfg(feature = "test_utils")]
{ {
tracker.client_id = client_id as ClientID; tracker.client_id = client_id as PeerID;
} }
tracker tracker

View file

@ -22,7 +22,7 @@ mod test;
use crate::{ use crate::{
change::Lamport, change::Lamport,
id::{ClientID, Counter, ID}, id::{Counter, PeerID, ID},
span::{CounterSpan, HasId, HasIdSpan, HasLamport, HasLamportSpan, IdSpan}, span::{CounterSpan, HasId, HasIdSpan, HasLamport, HasLamportSpan, IdSpan},
version::{Frontiers, IdSpanVector, VersionVector, VersionVectorDiff}, version::{Frontiers, IdSpanVector, VersionVector, VersionVectorDiff},
}; };
@ -99,18 +99,18 @@ impl<T: Dag + ?Sized> DagUtils for T {
if from.len() == 1 && to.len() == 1 { if from.len() == 1 && to.len() == 1 {
let from = from[0]; let from = from[0];
let to = to[0]; let to = to[0];
if from.client_id == to.client_id { if from.peer == to.peer {
let from_span = self.get(from).unwrap(); let from_span = self.get(from).unwrap();
let to_span = self.get(to).unwrap(); let to_span = self.get(to).unwrap();
if std::ptr::eq(from_span, to_span) { if std::ptr::eq(from_span, to_span) {
if from.counter < to.counter { if from.counter < to.counter {
ans.right.insert( ans.right.insert(
from.client_id, from.peer,
CounterSpan::new(from.counter + 1, to.counter + 1), CounterSpan::new(from.counter + 1, to.counter + 1),
); );
} else { } else {
ans.left.insert( ans.left.insert(
from.client_id, from.peer,
CounterSpan::new(to.counter + 1, from.counter + 1), CounterSpan::new(to.counter + 1, from.counter + 1),
); );
} }
@ -119,7 +119,7 @@ impl<T: Dag + ?Sized> DagUtils for T {
if from_span.deps().len() == 1 && to_span.contains_id(from_span.deps()[0]) { if from_span.deps().len() == 1 && to_span.contains_id(from_span.deps()[0]) {
ans.left.insert( ans.left.insert(
from.client_id, from.peer,
CounterSpan::new(to.counter + 1, from.counter + 1), CounterSpan::new(to.counter + 1, from.counter + 1),
); );
return ans; return ans;
@ -127,7 +127,7 @@ impl<T: Dag + ?Sized> DagUtils for T {
if to_span.deps().len() == 1 && from_span.contains_id(to_span.deps()[0]) { if to_span.deps().len() == 1 && from_span.contains_id(to_span.deps()[0]) {
ans.right.insert( ans.right.insert(
from.client_id, from.peer,
CounterSpan::new(from.counter + 1, to.counter + 1), CounterSpan::new(from.counter + 1, to.counter + 1),
); );
return ans; return ans;
@ -194,7 +194,7 @@ where
{ {
let mut vv = VersionVector::new(); let mut vv = VersionVector::new();
let mut visited: FxHashSet<ID> = FxHashSet::default(); let mut visited: FxHashSet<ID> = FxHashSet::default();
vv.insert(id.client_id, id.counter + 1); vv.insert(id.peer, id.counter + 1);
let node = get(id).unwrap(); let node = get(id).unwrap();
if node.deps().is_empty() { if node.deps().is_empty() {
@ -324,13 +324,13 @@ fn _find_common_ancestor<'a, F, D, G>(
b_ids: &[ID], b_ids: &[ID],
notify: &mut G, notify: &mut G,
find_path: bool, find_path: bool,
) -> FxHashMap<ClientID, Counter> ) -> FxHashMap<PeerID, Counter>
where where
D: DagNode + 'a, D: DagNode + 'a,
F: Fn(ID) -> Option<&'a D>, F: Fn(ID) -> Option<&'a D>,
G: FnMut(IdSpan, NodeType), G: FnMut(IdSpan, NodeType),
{ {
let mut ans: FxHashMap<ClientID, Counter> = Default::default(); let mut ans: FxHashMap<PeerID, Counter> = Default::default();
let mut queue: BinaryHeap<(OrdIdSpan, NodeType)> = BinaryHeap::new(); let mut queue: BinaryHeap<(OrdIdSpan, NodeType)> = BinaryHeap::new();
for id in a_ids { for id in a_ids {
queue.push((OrdIdSpan::from_dag_node(*id, get).unwrap(), NodeType::A)); queue.push((OrdIdSpan::from_dag_node(*id, get).unwrap(), NodeType::A));
@ -338,7 +338,7 @@ where
for id in b_ids { for id in b_ids {
queue.push((OrdIdSpan::from_dag_node(*id, get).unwrap(), NodeType::B)); queue.push((OrdIdSpan::from_dag_node(*id, get).unwrap(), NodeType::B));
} }
let mut visited: HashMap<ClientID, (Counter, NodeType), _> = FxHashMap::default(); let mut visited: HashMap<PeerID, (Counter, NodeType), _> = FxHashMap::default();
// invariants in this method: // invariants in this method:
// //
// - visited's (client, counters) are subset of max(version_vector_a, version_vector_b) // - visited's (client, counters) are subset of max(version_vector_a, version_vector_b)
@ -380,10 +380,8 @@ where
} }
} else { } else {
if node_type != NodeType::Shared { if node_type != NodeType::Shared {
if visited.get(&node.id.client_id).map(|(_, t)| *t) if visited.get(&node.id.peer).map(|(_, t)| *t) != Some(NodeType::Shared) {
!= Some(NodeType::Shared) ans.insert(node.id.peer, other_node.id_last().counter);
{
ans.insert(node.id.client_id, other_node.id_last().counter);
} }
node_type = NodeType::Shared; node_type = NodeType::Shared;
} }
@ -401,20 +399,20 @@ where
} }
// detect whether client is visited by other // detect whether client is visited by other
if let Some((ctr, visited_type)) = visited.get_mut(&node.id.client_id) { if let Some((ctr, visited_type)) = visited.get_mut(&node.id.peer) {
debug_assert!(*ctr >= node.id_last().counter); debug_assert!(*ctr >= node.id_last().counter);
if *visited_type == NodeType::Shared { if *visited_type == NodeType::Shared {
node_type = NodeType::Shared; node_type = NodeType::Shared;
} else if *visited_type != node_type { } else if *visited_type != node_type {
// if node_type is shared, ans should already contains it or its descendance // if node_type is shared, ans should already contains it or its descendance
if node_type != NodeType::Shared { if node_type != NodeType::Shared {
ans.insert(node.id.client_id, node.id_last().counter); ans.insert(node.id.peer, node.id_last().counter);
} }
*visited_type = NodeType::Shared; *visited_type = NodeType::Shared;
node_type = NodeType::Shared; node_type = NodeType::Shared;
} }
} else { } else {
visited.insert(node.id.client_id, (node.id_last().counter, node_type)); visited.insert(node.id.peer, (node.id_last().counter, node_type));
} }
// if this is not shared, the end of the span must be only reachable from A, or only reachable from B. // if this is not shared, the end of the span must be only reachable from A, or only reachable from B.
@ -480,7 +478,7 @@ where
if left.len() == 1 && right.len() == 1 { if left.len() == 1 && right.len() == 1 {
let left = left[0]; let left = left[0];
let right = right[0]; let right = right[0];
if left.client_id == right.client_id { if left.peer == right.peer {
let left_span = get(left).unwrap(); let left_span = get(left).unwrap();
let right_span = get(right).unwrap(); let right_span = get(right).unwrap();
if std::ptr::eq(left_span, right_span) { if std::ptr::eq(left_span, right_span) {
@ -585,9 +583,9 @@ where
pub fn remove_included_frontiers(frontiers: &mut VersionVector, new_change_deps: &[ID]) { pub fn remove_included_frontiers(frontiers: &mut VersionVector, new_change_deps: &[ID]) {
for dep in new_change_deps.iter() { for dep in new_change_deps.iter() {
if let Some(last) = frontiers.get_last(dep.client_id) { if let Some(last) = frontiers.get_last(dep.peer) {
if last <= dep.counter { if last <= dep.counter {
frontiers.remove(&dep.client_id); frontiers.remove(&dep.peer);
} }
} }
} }

View file

@ -225,14 +225,14 @@ impl<'a, T: DagNode, D: Dag<Node = T>> DagCausalIter<'a, D> {
// traverse all nodes // traverse all nodes
while let Some(id) = q.pop() { while let Some(id) = q.pop() {
let client = id.client_id; let client = id.peer;
let node = dag.get(id).unwrap(); let node = dag.get(id).unwrap();
let deps = node.deps(); let deps = node.deps();
if deps.len().is_zero() { if deps.len().is_zero() {
in_degrees.insert(id, 0); in_degrees.insert(id, 0);
} }
for dep in deps.iter() { for dep in deps.iter() {
let filter = if let Some(span) = target.get(&dep.client_id) { let filter = if let Some(span) = target.get(&dep.peer) {
dep.counter < span.min() dep.counter < span.min()
} else { } else {
true true
@ -287,7 +287,7 @@ impl<'a, T: DagNode + 'a, D: Dag<Node = T>> Iterator for DagCausalIter<'a, D> {
} }
let node_id = self.stack.pop().unwrap(); let node_id = self.stack.pop().unwrap();
let target_span = self.target.get_mut(&node_id.client_id).unwrap(); let target_span = self.target.get_mut(&node_id.peer).unwrap();
debug_assert_eq!( debug_assert_eq!(
node_id.counter, node_id.counter,
target_span.min(), target_span.min(),
@ -329,7 +329,7 @@ impl<'a, T: DagNode + 'a, D: Dag<Node = T>> Iterator for DagCausalIter<'a, D> {
// NOTE: we expect user to update the tracker, to apply node, after visiting the node // NOTE: we expect user to update the tracker, to apply node, after visiting the node
self.frontier = Frontiers::from_id(node.id_start().inc(slice_end - 1)); self.frontier = Frontiers::from_id(node.id_start().inc(slice_end - 1));
let current_client = node_id.client_id; let current_client = node_id.peer;
let mut keys = Vec::new(); let mut keys = Vec::new();
let mut heap = BinaryHeap::new(); let mut heap = BinaryHeap::new();
// The in-degree of the successor node minus 1, and if it becomes 0, it is added to the heap // The in-degree of the successor node minus 1, and if it becomes 0, it is added to the heap
@ -339,7 +339,7 @@ impl<'a, T: DagNode + 'a, D: Dag<Node = T>> Iterator for DagCausalIter<'a, D> {
self.in_degrees.entry(*succ_id).and_modify(|i| *i -= 1); self.in_degrees.entry(*succ_id).and_modify(|i| *i -= 1);
if let Some(in_degree) = self.in_degrees.get(succ_id) { if let Some(in_degree) = self.in_degrees.get(succ_id) {
if in_degree.is_zero() { if in_degree.is_zero() {
heap.push((succ_id.client_id != current_client, *succ_id)); heap.push((succ_id.peer != current_client, *succ_id));
self.in_degrees.remove(succ_id); self.in_degrees.remove(succ_id);
} }
} }
@ -432,10 +432,7 @@ mod test {
let end = n.slice.end; let end = n.slice.end;
let change = n.data; let change = n.data;
vv.set_end(ID::new( vv.set_end(ID::new(change.id.peer, end as Counter + change.id.counter));
change.id.client_id,
end as Counter + change.id.counter,
));
println!("{:?}\n", vv); println!("{:?}\n", vv);
} }
} }
@ -472,11 +469,11 @@ mod test {
let mut from_vv = VersionVector::new(); let mut from_vv = VersionVector::new();
from_vv.set_last(ID { from_vv.set_last(ID {
client_id: 1, peer: 1,
counter: 1, counter: 1,
}); });
from_vv.set_last(ID { from_vv.set_last(ID {
client_id: 2, peer: 2,
counter: 1, counter: 1,
}); });
let mut vv = from_vv.clone(); let mut vv = from_vv.clone();
@ -484,11 +481,11 @@ mod test {
for n in c1_store.iter_causal( for n in c1_store.iter_causal(
&[ &[
ID { ID {
client_id: 1, peer: 1,
counter: 1, counter: 1,
}, },
ID { ID {
client_id: 2, peer: 2,
counter: 1, counter: 1,
}, },
], ],
@ -501,10 +498,7 @@ mod test {
let end = n.slice.end; let end = n.slice.end;
let change = n.data; let change = n.data;
vv.set_end(ID::new( vv.set_end(ID::new(change.id.peer, end as Counter + change.id.counter));
change.id.client_id,
end as Counter + change.id.counter,
));
println!("{:?}\n", vv); println!("{:?}\n", vv);
} }
} }

View file

@ -1,6 +1,6 @@
use super::*; use super::*;
struct BreakPoints { struct BreakPoints {
break_points: FxHashMap<ClientID, FxHashSet<Counter>>, break_points: FxHashMap<PeerID, FxHashSet<Counter>>,
/// start ID to ID. The target ID may be in the middle of an op. /// start ID to ID. The target ID may be in the middle of an op.
/// ///
/// only includes links across different clients /// only includes links across different clients
@ -8,7 +8,7 @@ struct BreakPoints {
} }
struct Output { struct Output {
clients: FxHashMap<ClientID, Vec<IdSpan>>, clients: FxHashMap<PeerID, Vec<IdSpan>>,
/// start ID to start ID. /// start ID to start ID.
/// ///
/// only includes links across different clients /// only includes links across different clients
@ -60,7 +60,7 @@ fn to_str(output: Output) -> String {
for id_to in id_tos.iter() { for id_to in id_tos.iter() {
s += format!( s += format!(
"{}-{} --> {}-{}", "{}-{} --> {}-{}",
id_from.client_id, id_from.counter, id_to.client_id, id_to.counter id_from.peer, id_from.counter, id_to.peer, id_to.counter
) )
.as_str(); .as_str();
new_line!(); new_line!();
@ -75,7 +75,7 @@ fn break_points_to_output(input: BreakPoints) -> Output {
clients: FxHashMap::default(), clients: FxHashMap::default(),
links: FxHashMap::default(), links: FxHashMap::default(),
}; };
let breaks: FxHashMap<ClientID, Vec<Counter>> = input let breaks: FxHashMap<PeerID, Vec<Counter>> = input
.break_points .break_points
.into_iter() .into_iter()
.map(|(client_id, set)| { .map(|(client_id, set)| {
@ -94,7 +94,7 @@ fn break_points_to_output(input: BreakPoints) -> Output {
for (id_from, id_tos) in input.links.iter() { for (id_from, id_tos) in input.links.iter() {
for id_to in id_tos { for id_to in id_tos {
let client_breaks = breaks.get(&id_to.client_id).unwrap(); let client_breaks = breaks.get(&id_to.peer).unwrap();
match client_breaks.binary_search(&id_to.counter) { match client_breaks.binary_search(&id_to.counter) {
Ok(_) => { Ok(_) => {
output.links.entry(*id_from).or_default().push(*id_to); output.links.entry(*id_from).or_default().push(*id_to);
@ -104,7 +104,7 @@ fn break_points_to_output(input: BreakPoints) -> Output {
.links .links
.entry(*id_from) .entry(*id_from)
.or_default() .or_default()
.push(ID::new(id_to.client_id, client_breaks[index - 1])); .push(ID::new(id_to.peer, client_breaks[index - 1]));
} }
} }
} }
@ -120,17 +120,17 @@ fn get_dag_break_points<T: DagNode>(dag: &impl Dag<Node = T>) -> BreakPoints {
for node in dag.iter() { for node in dag.iter() {
let id = node.id_start(); let id = node.id_start();
let set = break_points.break_points.entry(id.client_id).or_default(); let set = break_points.break_points.entry(id.peer).or_default();
set.insert(id.counter); set.insert(id.counter);
set.insert(id.counter + node.content_len() as Counter); set.insert(id.counter + node.content_len() as Counter);
for dep in node.deps() { for dep in node.deps() {
if dep.client_id == id.client_id { if dep.peer == id.peer {
continue; continue;
} }
break_points break_points
.break_points .break_points
.entry(dep.client_id) .entry(dep.peer)
.or_default() .or_default()
.insert(dep.counter); .insert(dep.counter);
break_points.links.entry(id).or_default().push(*dep); break_points.links.entry(id).or_default().push(*dep);

View file

@ -7,7 +7,7 @@ use super::*;
use crate::{ use crate::{
array_mut_ref, array_mut_ref,
change::Lamport, change::Lamport,
id::{ClientID, Counter, ID}, id::{Counter, PeerID, ID},
span::HasIdSpan, span::HasIdSpan,
}; };
@ -71,11 +71,11 @@ impl HasLength for TestNode {
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
struct TestDag { struct TestDag {
nodes: FxHashMap<ClientID, Vec<TestNode>>, nodes: FxHashMap<PeerID, Vec<TestNode>>,
frontier: Vec<ID>, frontier: Vec<ID>,
version_vec: VersionVector, version_vec: VersionVector,
next_lamport: Lamport, next_lamport: Lamport,
client_id: ClientID, client_id: PeerID,
} }
impl TestDag { impl TestDag {
@ -88,7 +88,7 @@ impl Dag for TestDag {
type Node = TestNode; type Node = TestNode;
fn get(&self, id: ID) -> Option<&Self::Node> { fn get(&self, id: ID) -> Option<&Self::Node> {
let arr = self.nodes.get(&id.client_id)?; let arr = self.nodes.get(&id.peer)?;
arr.binary_search_by(|node| { arr.binary_search_by(|node| {
if node.id.counter > id.counter { if node.id.counter > id.counter {
Ordering::Greater Ordering::Greater
@ -111,7 +111,7 @@ impl Dag for TestDag {
} }
impl TestDag { impl TestDag {
pub fn new(client_id: ClientID) -> Self { pub fn new(client_id: PeerID) -> Self {
Self { Self {
nodes: FxHashMap::default(), nodes: FxHashMap::default(),
frontier: Vec::new(), frontier: Vec::new(),
@ -135,7 +135,7 @@ impl TestDag {
let id = ID::new(client_id, *counter); let id = ID::new(client_id, *counter);
*counter += len as Counter; *counter += len as Counter;
let deps = std::mem::replace(&mut self.frontier, vec![id.inc(len as Counter - 1)]); let deps = std::mem::replace(&mut self.frontier, vec![id.inc(len as Counter - 1)]);
if deps.len() == 1 && deps[0].client_id == client_id { if deps.len() == 1 && deps[0].peer == client_id {
// can merge two op // can merge two op
let arr = self.nodes.get_mut(&client_id).unwrap(); let arr = self.nodes.get_mut(&client_id).unwrap();
let mut last = arr.last_mut().unwrap(); let mut last = arr.last_mut().unwrap();
@ -182,21 +182,18 @@ impl TestDag {
fn update_frontier(frontier: &mut Vec<ID>, new_node_id: ID, new_node_deps: &[ID]) { fn update_frontier(frontier: &mut Vec<ID>, new_node_id: ID, new_node_deps: &[ID]) {
frontier.retain(|x| { frontier.retain(|x| {
if x.client_id == new_node_id.client_id && x.counter <= new_node_id.counter { if x.peer == new_node_id.peer && x.counter <= new_node_id.counter {
return false; return false;
} }
!new_node_deps !new_node_deps
.iter() .iter()
.any(|y| y.client_id == x.client_id && y.counter >= x.counter) .any(|y| y.peer == x.peer && y.counter >= x.counter)
}); });
// nodes from the same client with `counter < new_node_id.counter` // nodes from the same client with `counter < new_node_id.counter`
// are filtered out from frontier. // are filtered out from frontier.
if frontier if frontier.iter().all(|x| x.peer != new_node_id.peer) {
.iter()
.all(|x| x.client_id != new_node_id.client_id)
{
frontier.push(new_node_id); frontier.push(new_node_id);
} }
} }
@ -204,10 +201,10 @@ impl TestDag {
fn _try_push_node( fn _try_push_node(
&mut self, &mut self,
node: &TestNode, node: &TestNode,
pending: &mut Vec<(ClientID, usize)>, pending: &mut Vec<(PeerID, usize)>,
i: usize, i: usize,
) -> bool { ) -> bool {
let client_id = node.id.client_id; let client_id = node.id.peer;
if self.contains(node.id_last()) { if self.contains(node.id_last()) {
return false; return false;
} }
@ -260,7 +257,7 @@ fn test_dag() {
assert_eq!( assert_eq!(
a.frontier()[0], a.frontier()[0],
ID { ID {
client_id: 0, peer: 0,
counter: 1 counter: 1
} }
); );
@ -552,7 +549,7 @@ mod find_path {
preprocess(&mut interactions, dag_num); preprocess(&mut interactions, dag_num);
let mut dags = Vec::new(); let mut dags = Vec::new();
for i in 0..dag_num { for i in 0..dag_num {
dags.push(TestDag::new(i as ClientID)); dags.push(TestDag::new(i as PeerID));
} }
for interaction in interactions.iter_mut() { for interaction in interactions.iter_mut() {
@ -914,7 +911,7 @@ mod find_common_ancestors_proptest {
preprocess(&mut after_merge_insertion, dag_num); preprocess(&mut after_merge_insertion, dag_num);
let mut dags = Vec::new(); let mut dags = Vec::new();
for i in 0..dag_num { for i in 0..dag_num {
dags.push(TestDag::new(i as ClientID)); dags.push(TestDag::new(i as PeerID));
} }
for interaction in before_merge_insertion { for interaction in before_merge_insertion {
@ -981,7 +978,7 @@ mod find_common_ancestors_proptest {
preprocess(&mut after_merge_insertion, dag_num); preprocess(&mut after_merge_insertion, dag_num);
let mut dags = Vec::new(); let mut dags = Vec::new();
for i in 0..dag_num { for i in 0..dag_num {
dags.push(TestDag::new(i as ClientID)); dags.push(TestDag::new(i as PeerID));
} }
for mut interaction in before_merge_insertion { for mut interaction in before_merge_insertion {
@ -1124,7 +1121,7 @@ mod dag_partial_iter {
preprocess(&mut interactions, dag_num); preprocess(&mut interactions, dag_num);
let mut dags = Vec::new(); let mut dags = Vec::new();
for i in 0..dag_num { for i in 0..dag_num {
dags.push(TestDag::new(i as ClientID)); dags.push(TestDag::new(i as PeerID));
} }
for interaction in interactions.iter_mut() { for interaction in interactions.iter_mut() {
@ -1182,19 +1179,19 @@ mod dag_partial_iter {
// dbg!(&sliced, &forward, &retreat, slice); // dbg!(&sliced, &forward, &retreat, slice);
} }
assert!(diff_spans assert!(diff_spans
.get(&data.id.client_id) .get(&data.id.peer)
.unwrap() .unwrap()
.contains(sliced.id.counter)); .contains(sliced.id.counter));
vv.forward(&forward); vv.forward(&forward);
vv.retreat(&retreat); vv.retreat(&retreat);
let mut data_vv = map.get(&data.id).unwrap().clone(); let mut data_vv = map.get(&data.id).unwrap().clone();
data_vv.extend_to_include(IdSpan::new( data_vv.extend_to_include(IdSpan::new(
sliced.id.client_id, sliced.id.peer,
sliced.id.counter, sliced.id.counter,
sliced.id.counter + 1, sliced.id.counter + 1,
)); ));
data_vv.shrink_to_exclude(IdSpan::new( data_vv.shrink_to_exclude(IdSpan::new(
sliced.id.client_id, sliced.id.peer,
sliced.id.counter, sliced.id.counter,
sliced.id_end().counter, sliced.id_end().counter,
)); ));

View file

@ -1,11 +1,11 @@
use thiserror::Error; use thiserror::Error;
use crate::id::ClientID; use crate::id::PeerID;
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum LoroError { pub enum LoroError {
#[error("Context's client_id({found:?}) does not match Container's client_id({expected:?})")] #[error("Context's client_id({found:?}) does not match Container's client_id({expected:?})")]
UnmatchedContext { expected: ClientID, found: ClientID }, UnmatchedContext { expected: PeerID, found: PeerID },
#[error("Decode version vector error. Please provide correct version.")] #[error("Decode version vector error. Please provide correct version.")]
DecodeVersionVectorError, DecodeVersionVectorError,
#[error("Decode error ({0})")] #[error("Decode error ({0})")]

View file

@ -6,7 +6,7 @@ use tabled::{TableIteratorExt, Tabled};
pub mod recursive; pub mod recursive;
pub mod recursive_txn; pub mod recursive_txn;
use crate::{array_mut_ref, id::ClientID, LoroCore, Transact, VersionVector}; use crate::{array_mut_ref, id::PeerID, LoroCore, Transact, VersionVector};
#[derive(arbitrary::Arbitrary, EnumAsInner, Clone, PartialEq, Eq, Debug)] #[derive(arbitrary::Arbitrary, EnumAsInner, Clone, PartialEq, Eq, Debug)]
pub enum Action { pub enum Action {
@ -414,7 +414,7 @@ where
pub fn normalize(site_num: u8, actions: &mut [Action]) -> Vec<Action> { pub fn normalize(site_num: u8, actions: &mut [Action]) -> Vec<Action> {
let mut sites = Vec::new(); let mut sites = Vec::new();
for i in 0..site_num { for i in 0..site_num {
sites.push(LoroCore::new(Default::default(), Some(i as ClientID))); sites.push(LoroCore::new(Default::default(), Some(i as PeerID)));
} }
let mut applied = Vec::new(); let mut applied = Vec::new();
@ -441,7 +441,7 @@ pub fn normalize(site_num: u8, actions: &mut [Action]) -> Vec<Action> {
pub fn test_multi_sites(site_num: u8, actions: &mut [Action]) { pub fn test_multi_sites(site_num: u8, actions: &mut [Action]) {
let mut sites = Vec::new(); let mut sites = Vec::new();
for i in 0..site_num { for i in 0..site_num {
sites.push(LoroCore::new(Default::default(), Some(i as ClientID))); sites.push(LoroCore::new(Default::default(), Some(i as PeerID)));
} }
let mut applied = Vec::new(); let mut applied = Vec::new();

View file

@ -15,7 +15,7 @@ use crate::{
container::ContainerID, container::ContainerID,
delta::DeltaItem, delta::DeltaItem,
event::{Diff, Observer}, event::{Diff, Observer},
id::ClientID, id::PeerID,
ContainerType, List, LoroCore, LoroValue, Map, Text, Transact, ContainerType, List, LoroCore, LoroValue, Map, Text, Transact,
}; };
use crate::{container::registry::ContainerIdx, EncodeMode}; use crate::{container::registry::ContainerIdx, EncodeMode};
@ -60,7 +60,7 @@ struct Actor {
} }
impl Actor { impl Actor {
fn new(id: ClientID) -> Self { fn new(id: PeerID) -> Self {
let mut actor = Actor { let mut actor = Actor {
loro: LoroCore::new(Default::default(), Some(id)), loro: LoroCore::new(Default::default(), Some(id)),
value_tracker: Arc::new(Mutex::new(LoroValue::Map(Default::default()))), value_tracker: Arc::new(Mutex::new(LoroValue::Map(Default::default()))),

View file

@ -14,7 +14,7 @@ use crate::{
container::{registry::ContainerIdx, ContainerID}, container::{registry::ContainerIdx, ContainerID},
delta::DeltaItem, delta::DeltaItem,
event::{Diff, Observer}, event::{Diff, Observer},
id::ClientID, id::PeerID,
transaction::TransactionWrap, transaction::TransactionWrap,
ContainerType, EncodeMode, List, LoroCore, LoroValue, Map, Text, Transact, ContainerType, EncodeMode, List, LoroCore, LoroValue, Map, Text, Transact,
}; };
@ -60,7 +60,7 @@ struct Actor {
} }
impl Actor { impl Actor {
fn new(id: ClientID) -> Self { fn new(id: PeerID) -> Self {
let loro = LoroCore::new(Default::default(), Some(id)); let loro = LoroCore::new(Default::default(), Some(id));
let txn = loro.transact(); let txn = loro.transact();
let mut actor = Actor { let mut actor = Actor {

View file

@ -7,26 +7,26 @@ use crate::{
LoroError, LoroError,
}; };
pub type ClientID = u64; pub type PeerID = u64;
pub type Counter = i32; pub type Counter = i32;
const UNKNOWN: ClientID = 404; const UNKNOWN: PeerID = 404;
// Note: It will be encoded into binary format, so its order should not be changed. // Note: It will be encoded into binary format, so its order should not be changed.
#[derive(PartialEq, Eq, Hash, Clone, Copy, Serialize, Deserialize)] #[derive(PartialEq, Eq, Hash, Clone, Copy, Serialize, Deserialize)]
pub struct ID { pub struct ID {
pub client_id: ClientID, pub peer: PeerID,
pub counter: Counter, pub counter: Counter,
} }
impl Debug for ID { impl Debug for ID {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(format!("c{}:{}", self.client_id, self.counter).as_str()) f.write_str(format!("c{}:{}", self.peer, self.counter).as_str())
} }
} }
impl Display for ID { impl Display for ID {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(format!("{}@{}", self.counter, self.client_id).as_str()) f.write_str(format!("{}@{}", self.counter, self.peer).as_str())
} }
} }
@ -43,15 +43,18 @@ impl TryFrom<&str> for ID {
.parse::<Counter>() .parse::<Counter>()
.map_err(|_| LoroError::DecodeError("Invalid ID format".into()))?; .map_err(|_| LoroError::DecodeError("Invalid ID format".into()))?;
let client_id = splitted[1] let client_id = splitted[1]
.parse::<ClientID>() .parse::<PeerID>()
.map_err(|_| LoroError::DecodeError("Invalid ID format".into()))?; .map_err(|_| LoroError::DecodeError("Invalid ID format".into()))?;
Ok(ID { client_id, counter }) Ok(ID {
peer: client_id,
counter,
})
} }
} }
impl PartialOrd for ID { impl PartialOrd for ID {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
match self.client_id.partial_cmp(&other.client_id) { match self.peer.partial_cmp(&other.peer) {
Some(core::cmp::Ordering::Equal) => {} Some(core::cmp::Ordering::Equal) => {}
ord => return ord, ord => return ord,
} }
@ -61,7 +64,7 @@ impl PartialOrd for ID {
impl Ord for ID { impl Ord for ID {
fn cmp(&self, other: &Self) -> std::cmp::Ordering { fn cmp(&self, other: &Self) -> std::cmp::Ordering {
match self.client_id.cmp(&other.client_id) { match self.peer.cmp(&other.peer) {
core::cmp::Ordering::Equal => self.counter.cmp(&other.counter), core::cmp::Ordering::Equal => self.counter.cmp(&other.counter),
ord => ord, ord => ord,
} }
@ -69,14 +72,14 @@ impl Ord for ID {
} }
pub const ROOT_ID: ID = ID { pub const ROOT_ID: ID = ID {
client_id: ClientID::MAX, peer: PeerID::MAX,
counter: i32::MAX, counter: i32::MAX,
}; };
impl From<u128> for ID { impl From<u128> for ID {
fn from(id: u128) -> Self { fn from(id: u128) -> Self {
ID { ID {
client_id: (id >> 64) as ClientID, peer: (id >> 64) as PeerID,
counter: id as Counter, counter: id as Counter,
} }
} }
@ -84,8 +87,8 @@ impl From<u128> for ID {
impl ID { impl ID {
#[inline] #[inline]
pub fn new(client_id: ClientID, counter: Counter) -> Self { pub fn new(peer: PeerID, counter: Counter) -> Self {
ID { client_id, counter } ID { peer, counter }
} }
#[inline] #[inline]
@ -95,13 +98,13 @@ impl ID {
#[inline] #[inline]
pub fn is_null(&self) -> bool { pub fn is_null(&self) -> bool {
self.client_id == ClientID::MAX self.peer == PeerID::MAX
} }
#[inline] #[inline]
pub fn to_span(&self, len: usize) -> IdSpan { pub fn to_span(&self, len: usize) -> IdSpan {
IdSpan { IdSpan {
client_id: self.client_id, client_id: self.peer,
counter: CounterSpan::new(self.counter, self.counter + len as Counter), counter: CounterSpan::new(self.counter, self.counter + len as Counter),
} }
} }
@ -109,33 +112,33 @@ impl ID {
#[inline] #[inline]
pub fn unknown(counter: Counter) -> Self { pub fn unknown(counter: Counter) -> Self {
ID { ID {
client_id: UNKNOWN, peer: UNKNOWN,
counter, counter,
} }
} }
#[inline] #[inline]
pub fn is_unknown(&self) -> bool { pub fn is_unknown(&self) -> bool {
self.client_id == UNKNOWN self.peer == UNKNOWN
} }
#[inline] #[inline]
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) fn is_connected_id(&self, other: &Self, self_len: usize) -> bool { pub(crate) fn is_connected_id(&self, other: &Self, self_len: usize) -> bool {
self.client_id == other.client_id && self.counter + self_len as Counter == other.counter self.peer == other.peer && self.counter + self_len as Counter == other.counter
} }
#[inline] #[inline]
pub fn inc(&self, inc: i32) -> Self { pub fn inc(&self, inc: i32) -> Self {
ID { ID {
client_id: self.client_id, peer: self.peer,
counter: self.counter + inc, counter: self.counter + inc,
} }
} }
#[inline] #[inline]
pub fn contains(&self, len: Counter, target: ID) -> bool { pub fn contains(&self, len: Counter, target: ID) -> bool {
self.client_id == target.client_id self.peer == target.peer
&& self.counter <= target.counter && self.counter <= target.counter
&& target.counter < self.counter + len && target.counter < self.counter + len
} }

View file

@ -37,7 +37,7 @@ mod value;
pub use error::LoroError; pub use error::LoroError;
pub(crate) mod macros; pub(crate) mod macros;
pub(crate) use change::{Lamport, Timestamp}; pub(crate) use change::{Lamport, Timestamp};
pub(crate) use id::{ClientID, ID}; pub(crate) use id::{PeerID, ID};
pub(crate) use op::{ContentType, InsertContentTrait, Op}; pub(crate) use op::{ContentType, InsertContentTrait, Op};
// TODO: rename as Key? // TODO: rename as Key?

View file

@ -26,7 +26,7 @@ use crate::{
ContainerID, ContainerID,
}, },
dag::Dag, dag::Dag,
id::{ClientID, Counter}, id::{Counter, PeerID},
op::RemoteOp, op::RemoteOp,
span::{HasCounterSpan, HasIdSpan, IdSpan}, span::{HasCounterSpan, HasIdSpan, IdSpan},
ContainerType, Lamport, Op, Timestamp, VersionVector, ID, ContainerType, Lamport, Op, Timestamp, VersionVector, ID,
@ -59,8 +59,8 @@ impl GcConfig {
} }
} }
pub(crate) type ClientChanges = FxHashMap<ClientID, RleVecWithIndex<Change, ChangeMergeCfg>>; pub(crate) type ClientChanges = FxHashMap<PeerID, RleVecWithIndex<Change, ChangeMergeCfg>>;
pub(crate) type RemoteClientChanges = FxHashMap<ClientID, Vec<Change<RemoteOp>>>; pub(crate) type RemoteClientChanges = FxHashMap<PeerID, Vec<Change<RemoteOp>>>;
#[derive(Debug)] #[derive(Debug)]
/// LogStore stores the full history of Loro /// LogStore stores the full history of Loro
@ -77,7 +77,7 @@ pub struct LogStore {
latest_lamport: Lamport, latest_lamport: Lamport,
latest_timestamp: Timestamp, latest_timestamp: Timestamp,
frontiers: Frontiers, frontiers: Frontiers,
pub(crate) this_client_id: ClientID, pub(crate) this_client_id: PeerID,
/// CRDT container manager /// CRDT container manager
pub(crate) reg: ContainerRegistry, pub(crate) reg: ContainerRegistry,
pending_changes: RemoteClientChanges, pending_changes: RemoteClientChanges,
@ -87,7 +87,7 @@ pub struct LogStore {
type ContainerGuard<'a> = MutexGuard<'a, ContainerInstance>; type ContainerGuard<'a> = MutexGuard<'a, ContainerInstance>;
impl LogStore { impl LogStore {
pub(crate) fn new(cfg: Configure, client_id: Option<ClientID>) -> Arc<RwLock<Self>> { pub(crate) fn new(cfg: Configure, client_id: Option<PeerID>) -> Arc<RwLock<Self>> {
let this_client_id = client_id.unwrap_or_else(|| cfg.rand.next_u64()); let this_client_id = client_id.unwrap_or_else(|| cfg.rand.next_u64());
Arc::new(RwLock::new(Self { Arc::new(RwLock::new(Self {
cfg, cfg,
@ -105,7 +105,7 @@ impl LogStore {
#[inline] #[inline]
pub fn lookup_change(&self, id: ID) -> Option<&Change> { pub fn lookup_change(&self, id: ID) -> Option<&Change> {
self.changes.get(&id.client_id).and_then(|changes| { self.changes.get(&id.peer).and_then(|changes| {
if id.counter <= changes.last().unwrap().id_last().counter { if id.counter <= changes.last().unwrap().id_last().counter {
Some(changes.get(id.counter as usize).unwrap().element) Some(changes.get(id.counter as usize).unwrap().element)
} else { } else {
@ -114,13 +114,13 @@ impl LogStore {
}) })
} }
pub fn export(&self, remote_vv: &VersionVector) -> FxHashMap<ClientID, Vec<Change<RemoteOp>>> { pub fn export(&self, remote_vv: &VersionVector) -> FxHashMap<PeerID, Vec<Change<RemoteOp>>> {
let mut ans: FxHashMap<ClientID, Vec<Change<RemoteOp>>> = Default::default(); let mut ans: FxHashMap<PeerID, Vec<Change<RemoteOp>>> = Default::default();
let self_vv = self.vv(); let self_vv = self.vv();
for span in self_vv.sub_iter(remote_vv) { for span in self_vv.sub_iter(remote_vv) {
let changes = self.get_changes_slice(span.id_span()); let changes = self.get_changes_slice(span.id_span());
for change in changes.iter() { for change in changes.iter() {
let vec = ans.entry(change.id.client_id).or_insert_with(Vec::new); let vec = ans.entry(change.id.peer).or_insert_with(Vec::new);
vec.push(self.change_to_export_format(change)); vec.push(self.change_to_export_format(change));
} }
} }
@ -207,21 +207,21 @@ impl LogStore {
#[inline(always)] #[inline(always)]
pub fn next_id(&self) -> ID { pub fn next_id(&self) -> ID {
ID { ID {
client_id: self.this_client_id, peer: self.this_client_id,
counter: self.get_next_counter(self.this_client_id), counter: self.get_next_counter(self.this_client_id),
} }
} }
#[inline(always)] #[inline(always)]
pub fn next_id_for(&self, client: ClientID) -> ID { pub fn next_id_for(&self, client: PeerID) -> ID {
ID { ID {
client_id: client, peer: client,
counter: self.get_next_counter(client), counter: self.get_next_counter(client),
} }
} }
#[inline(always)] #[inline(always)]
pub fn this_client_id(&self) -> ClientID { pub fn this_client_id(&self) -> PeerID {
self.this_client_id self.this_client_id
} }
@ -241,7 +241,7 @@ impl LogStore {
} }
pub fn includes_id(&self, id: ID) -> bool { pub fn includes_id(&self, id: ID) -> bool {
let Some(changes) = self.changes.get(&id.client_id) else { let Some(changes) = self.changes.get(&id.peer) else {
return false return false
}; };
changes.last().unwrap().id_last().counter >= id.counter changes.last().unwrap().id_last().counter >= id.counter
@ -256,7 +256,7 @@ impl LogStore {
let lamport = self.next_lamport(); let lamport = self.next_lamport();
let timestamp = (self.cfg.get_time)(); let timestamp = (self.cfg.get_time)();
let id = ID { let id = ID {
client_id: self.this_client_id, peer: self.this_client_id,
counter: self.get_next_counter(self.this_client_id), counter: self.get_next_counter(self.this_client_id),
}; };
let last = ops.last().unwrap(); let last = ops.last().unwrap();
@ -293,13 +293,13 @@ impl LogStore {
#[inline] #[inline]
pub fn contains_id(&self, id: ID) -> bool { pub fn contains_id(&self, id: ID) -> bool {
self.changes self.changes
.get(&id.client_id) .get(&id.peer)
.map_or(0, |changes| changes.atom_len()) .map_or(0, |changes| changes.atom_len())
> id.counter as usize > id.counter as usize
} }
#[inline] #[inline]
fn get_next_counter(&self, client_id: ClientID) -> Counter { fn get_next_counter(&self, client_id: PeerID) -> Counter {
self.changes self.changes
.get(&client_id) .get(&client_id)
.map(|changes| changes.atom_len()) .map(|changes| changes.atom_len())
@ -308,7 +308,7 @@ impl LogStore {
#[inline] #[inline]
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) fn iter_client_op(&self, client_id: ClientID) -> iter::ClientOpIter<'_> { pub(crate) fn iter_client_op(&self, client_id: PeerID) -> iter::ClientOpIter<'_> {
iter::ClientOpIter { iter::ClientOpIter {
change_index: 0, change_index: 0,
op_index: 0, op_index: 0,
@ -414,7 +414,7 @@ impl Dag for LogStore {
fn get(&self, id: ID) -> Option<&Self::Node> { fn get(&self, id: ID) -> Option<&Self::Node> {
self.changes self.changes
.get(&id.client_id) .get(&id.peer)
.and_then(|x| x.get(id.counter as usize).map(|x| x.element)) .and_then(|x| x.get(id.counter as usize).map(|x| x.element))
} }

View file

@ -18,7 +18,7 @@ use crate::{
dag::Dag, dag::Dag,
event::EventDiff, event::EventDiff,
hierarchy::Hierarchy, hierarchy::Hierarchy,
id::{ClientID, Counter, ID}, id::{Counter, PeerID, ID},
log_store::RemoteClientChanges, log_store::RemoteClientChanges,
op::{RemoteContent, RemoteOp}, op::{RemoteContent, RemoteOp},
smstring::SmString, smstring::SmString,
@ -28,7 +28,7 @@ use crate::{
}; };
type ClientIdx = u32; type ClientIdx = u32;
type Clients = Vec<ClientID>; type Clients = Vec<PeerID>;
type Containers = Vec<ContainerID>; type Containers = Vec<ContainerID>;
#[columnar(vec, ser, de)] #[columnar(vec, ser, de)]
@ -96,7 +96,7 @@ struct DocEncoding {
#[instrument(skip_all)] #[instrument(skip_all)]
pub(super) fn encode_changes(store: &LogStore, vv: &VersionVector) -> Result<Vec<u8>, LoroError> { pub(super) fn encode_changes(store: &LogStore, vv: &VersionVector) -> Result<Vec<u8>, LoroError> {
let mut client_id_to_idx: FxHashMap<ClientID, ClientIdx> = FxHashMap::default(); let mut client_id_to_idx: FxHashMap<PeerID, ClientIdx> = FxHashMap::default();
let mut clients = Vec::with_capacity(store.changes.len()); let mut clients = Vec::with_capacity(store.changes.len());
let mut container_indexes = Vec::new(); let mut container_indexes = Vec::new();
let mut container_idx2index = FxHashMap::default(); let mut container_idx2index = FxHashMap::default();
@ -124,9 +124,9 @@ pub(super) fn encode_changes(store: &LogStore, vv: &VersionVector) -> Result<Vec
for change in &diff_changes { for change in &diff_changes {
for deps in change.deps.iter() { for deps in change.deps.iter() {
client_id_to_idx.entry(deps.client_id).or_insert_with(|| { client_id_to_idx.entry(deps.peer).or_insert_with(|| {
let idx = clients.len() as ClientIdx; let idx = clients.len() as ClientIdx;
clients.push(deps.client_id); clients.push(deps.peer);
idx idx
}); });
} }
@ -139,13 +139,13 @@ pub(super) fn encode_changes(store: &LogStore, vv: &VersionVector) -> Result<Vec
let mut deps = Vec::with_capacity(change_num); let mut deps = Vec::with_capacity(change_num);
for change in diff_changes { for change in diff_changes {
let client_idx = client_id_to_idx[&change.id.client_id]; let client_idx = client_id_to_idx[&change.id.peer];
let mut dep_on_self = false; let mut dep_on_self = false;
let mut deps_len = 0; let mut deps_len = 0;
for dep in change.deps.iter() { for dep in change.deps.iter() {
if change.id.client_id != dep.client_id { if change.id.peer != dep.peer {
deps.push(DepsEncoding::new( deps.push(DepsEncoding::new(
*client_id_to_idx.get(&dep.client_id).unwrap(), *client_id_to_idx.get(&dep.peer).unwrap(),
dep.counter, dep.counter,
)); ));
deps_len += 1; deps_len += 1;
@ -329,7 +329,10 @@ pub(super) fn decode_changes_to_inner_format(
} }
let change = Change { let change = Change {
id: ID { client_id, counter }, id: ID {
peer: client_id,
counter,
},
// calc lamport after parsing all changes // calc lamport after parsing all changes
lamport: 0, lamport: 0,
timestamp, timestamp,
@ -402,22 +405,22 @@ pub(super) fn decode_changes_to_inner_format(
pub(crate) fn get_lamport_by_deps( pub(crate) fn get_lamport_by_deps(
deps: &Frontiers, deps: &Frontiers,
lamport_map: &FxHashMap<ClientID, Vec<(Range<Counter>, Lamport)>>, lamport_map: &FxHashMap<PeerID, Vec<(Range<Counter>, Lamport)>>,
store: Option<&LogStore>, store: Option<&LogStore>,
) -> Result<Lamport, ClientID> { ) -> Result<Lamport, PeerID> {
let mut ans = Vec::new(); let mut ans = Vec::new();
for id in deps.iter() { for id in deps.iter() {
if let Some(c) = store.and_then(|x| x.lookup_change(*id)) { if let Some(c) = store.and_then(|x| x.lookup_change(*id)) {
let offset = id.counter - c.id.counter; let offset = id.counter - c.id.counter;
ans.push(c.lamport + offset as u32); ans.push(c.lamport + offset as u32);
} else if let Some(v) = lamport_map.get(&id.client_id) { } else if let Some(v) = lamport_map.get(&id.peer) {
if let Some((lamport, offset)) = get_value_from_range_map(v, id.counter) { if let Some((lamport, offset)) = get_value_from_range_map(v, id.counter) {
ans.push(lamport + offset); ans.push(lamport + offset);
} else { } else {
return Err(id.client_id); return Err(id.peer);
} }
} else { } else {
return Err(id.client_id); return Err(id.peer);
} }
} }
Ok(ans.into_iter().max().unwrap_or(0) + 1) Ok(ans.into_iter().max().unwrap_or(0) + 1)

View file

@ -20,7 +20,7 @@ use crate::{
dag::{remove_included_frontiers, Dag}, dag::{remove_included_frontiers, Dag},
event::EventDiff, event::EventDiff,
hierarchy::Hierarchy, hierarchy::Hierarchy,
id::{ClientID, Counter, ID}, id::{Counter, PeerID, ID},
log_store::{encoding::encode_changes::get_lamport_by_deps, ImportContext}, log_store::{encoding::encode_changes::get_lamport_by_deps, ImportContext},
op::{InnerContent, Op}, op::{InnerContent, Op},
span::HasLamportSpan, span::HasLamportSpan,
@ -32,7 +32,7 @@ use super::encode_changes::{ChangeEncoding, DepsEncoding};
type Containers = Vec<ContainerID>; type Containers = Vec<ContainerID>;
type ClientIdx = u32; type ClientIdx = u32;
type Clients = Vec<ClientID>; type Clients = Vec<PeerID>;
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub enum EncodedStateContent { pub enum EncodedStateContent {
@ -56,7 +56,7 @@ impl StateContent {
fn into_encoded( fn into_encoded(
self, self,
key_to_idx: &FxHashMap<InternalString, usize>, key_to_idx: &FxHashMap<InternalString, usize>,
client_id_to_idx: &FxHashMap<ClientID, ClientIdx>, client_id_to_idx: &FxHashMap<PeerID, ClientIdx>,
) -> EncodedStateContent { ) -> EncodedStateContent {
match self { match self {
StateContent::List { pool, state_len } => EncodedStateContent::List { pool, state_len }, StateContent::List { pool, state_len } => EncodedStateContent::List { pool, state_len },
@ -87,7 +87,7 @@ impl StateContent {
} }
impl EncodedStateContent { impl EncodedStateContent {
pub fn into_state(self, keys: &[InternalString], clients: &[ClientID]) -> StateContent { pub fn into_state(self, keys: &[InternalString], clients: &[PeerID]) -> StateContent {
match self { match self {
EncodedStateContent::List { pool, state_len } => StateContent::List { pool, state_len }, EncodedStateContent::List { pool, state_len } => StateContent::List { pool, state_len },
EncodedStateContent::Map { EncodedStateContent::Map {
@ -194,7 +194,7 @@ fn convert_inner_content(
pub(super) fn encode_snapshot(store: &LogStore, gc: bool) -> Result<Vec<u8>, LoroError> { pub(super) fn encode_snapshot(store: &LogStore, gc: bool) -> Result<Vec<u8>, LoroError> {
debug_log::debug_dbg!(&store.vv); debug_log::debug_dbg!(&store.vv);
debug_log::debug_dbg!(&store.changes); debug_log::debug_dbg!(&store.changes);
let mut client_id_to_idx: FxHashMap<ClientID, ClientIdx> = FxHashMap::default(); let mut client_id_to_idx: FxHashMap<PeerID, ClientIdx> = FxHashMap::default();
let mut clients = Vec::with_capacity(store.changes.len()); let mut clients = Vec::with_capacity(store.changes.len());
let mut change_num = 0; let mut change_num = 0;
for (key, changes) in store.changes.iter() { for (key, changes) in store.changes.iter() {
@ -229,17 +229,17 @@ pub(super) fn encode_snapshot(store: &LogStore, gc: bool) -> Result<Vec<u8>, Lor
let mut deps = Vec::with_capacity(change_num); let mut deps = Vec::with_capacity(change_num);
for (client_idx, (_, change_vec)) in store.changes.iter().enumerate() { for (client_idx, (_, change_vec)) in store.changes.iter().enumerate() {
for change in change_vec.iter() { for change in change_vec.iter() {
let client_id = change.id.client_id; let client_id = change.id.peer;
let mut op_len = 0; let mut op_len = 0;
let mut deps_len = 0; let mut deps_len = 0;
let mut dep_on_self = false; let mut dep_on_self = false;
for dep in change.deps.iter() { for dep in change.deps.iter() {
// the first change will encode the self-client deps // the first change will encode the self-client deps
if dep.client_id == client_id { if dep.peer == client_id {
dep_on_self = true; dep_on_self = true;
} else { } else {
deps.push(DepsEncoding::new( deps.push(DepsEncoding::new(
*client_id_to_idx.get(&dep.client_id).unwrap(), *client_id_to_idx.get(&dep.peer).unwrap(),
dep.counter, dep.counter,
)); ));
deps_len += 1; deps_len += 1;
@ -433,7 +433,10 @@ pub(super) fn decode_snapshot(
deps.push(ID::new(client_id, counter - 1)); deps.push(ID::new(client_id, counter - 1));
} }
let change = Change { let change = Change {
id: ID { client_id, counter }, id: ID {
peer: client_id,
counter,
},
// cal lamport after parsing all changes // cal lamport after parsing all changes
lamport: 0, lamport: 0,
timestamp, timestamp,
@ -528,7 +531,7 @@ fn load_snapshot(
new_store: &mut LogStore, new_store: &mut LogStore,
new_hierarchy: &mut Hierarchy, new_hierarchy: &mut Hierarchy,
vv: VersionVector, vv: VersionVector,
changes: FxHashMap<ClientID, RleVecWithIndex<Change, ChangeMergeCfg>>, changes: FxHashMap<PeerID, RleVecWithIndex<Change, ChangeMergeCfg>>,
containers: Vec<ContainerID>, containers: Vec<ContainerID>,
container_states: Vec<EncodedStateContent>, container_states: Vec<EncodedStateContent>,
keys: &[InternalString], keys: &[InternalString],
@ -579,7 +582,7 @@ fn load_snapshot(
fn calc_vv( fn calc_vv(
changes_encoding: &[ChangeEncoding], changes_encoding: &[ChangeEncoding],
ops_encoding: &[SnapshotOpEncoding], ops_encoding: &[SnapshotOpEncoding],
clients: &[ClientID], clients: &[PeerID],
idx_to_container_type: &FxHashMap<usize, ContainerType>, idx_to_container_type: &FxHashMap<usize, ContainerType>,
) -> VersionVector { ) -> VersionVector {
let mut vv = FxHashMap::default(); let mut vv = FxHashMap::default();

View file

@ -6,7 +6,7 @@ use tracing::instrument;
use crate::{ use crate::{
change::{Change, Lamport, Timestamp}, change::{Change, Lamport, Timestamp},
container::ContainerID, container::ContainerID,
id::{ClientID, Counter, ID}, id::{Counter, PeerID, ID},
log_store::RemoteClientChanges, log_store::RemoteClientChanges,
op::{RemoteContent, RemoteOp}, op::{RemoteContent, RemoteOp},
version::Frontiers, version::Frontiers,
@ -27,7 +27,7 @@ struct EncodedClientChanges {
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
struct FirstChangeInfo { struct FirstChangeInfo {
pub(crate) client: ClientID, pub(crate) client: PeerID,
pub(crate) counter: Counter, pub(crate) counter: Counter,
pub(crate) lamport: Lamport, pub(crate) lamport: Lamport,
pub(crate) timestamp: Timestamp, pub(crate) timestamp: Timestamp,
@ -91,7 +91,7 @@ where
I: Iterator<Item = Change<RemoteOp>>, I: Iterator<Item = Change<RemoteOp>>,
{ {
let first_change = changes.next().unwrap(); let first_change = changes.next().unwrap();
let this_client_id = first_change.id.client_id; let this_client_id = first_change.id.peer;
let mut data = Vec::with_capacity(changes.size_hint().0 + 1); let mut data = Vec::with_capacity(changes.size_hint().0 + 1);
let mut last_change = first_change.clone(); let mut last_change = first_change.clone();
data.push(EncodedChange { data.push(EncodedChange {
@ -161,7 +161,7 @@ fn convert_encoded_to_changes(changes: EncodedClientChanges) -> Vec<Change<Remot
} }
let change = Change { let change = Change {
id: ID { id: ID {
client_id: changes.meta.client, peer: changes.meta.client,
counter: start_counter, counter: start_counter,
}, },
lamport: last_lamport + encoded.lamport_delta, lamport: last_lamport + encoded.lamport_delta,

View file

@ -1,7 +1,7 @@
use crate::change::Change; use crate::change::Change;
use crate::event::EventDiff; use crate::event::EventDiff;
use crate::hierarchy::Hierarchy; use crate::hierarchy::Hierarchy;
use crate::id::{ClientID, Counter, ID}; use crate::id::{Counter, PeerID, ID};
use crate::op::RemoteOp; use crate::op::RemoteOp;
use crate::span::{CounterSpan, HasCounter, HasCounterSpan}; use crate::span::{CounterSpan, HasCounter, HasCounterSpan};
use crate::version::PatchedVersionVector; use crate::version::PatchedVersionVector;
@ -292,10 +292,7 @@ impl LogStore {
} }
} }
current_vv.set_end(ID::new( current_vv.set_end(ID::new(change.id.peer, end as Counter + change.id.counter));
change.id.client_id,
end as Counter + change.id.counter,
));
} }
debug_log::group!("apply effects"); debug_log::group!("apply effects");
let mut queue: VecDeque<_> = container_map.into_values().collect(); let mut queue: VecDeque<_> = container_map.into_values().collect();
@ -367,7 +364,7 @@ impl LogStore {
// sort changes by lamport from small to large // sort changes by lamport from small to large
.sorted_by(|a, b| a.lamport.cmp(&b.lamport)) .sorted_by(|a, b| a.lamport.cmp(&b.lamport))
.for_each(|mut c| { .for_each(|mut c| {
let c_client_id = c.id.client_id; let c_client_id = c.id.peer;
if pending_clients.contains(&c_client_id) { if pending_clients.contains(&c_client_id) {
self.pending_changes.get_mut(&c_client_id).unwrap().push(c); self.pending_changes.get_mut(&c_client_id).unwrap().push(c);
return; return;
@ -401,7 +398,7 @@ impl LogStore {
fn try_apply_pending( fn try_apply_pending(
&mut self, &mut self,
client_id: &ClientID, client_id: &PeerID,
latest_vv: &mut VersionVector, latest_vv: &mut VersionVector,
retain_changes: &mut RemoteClientChanges, retain_changes: &mut RemoteClientChanges,
) { ) {
@ -414,7 +411,7 @@ impl LogStore {
match can_remote_change_be_applied(latest_vv, peek_c) { match can_remote_change_be_applied(latest_vv, peek_c) {
ChangeApplyState::Directly => { ChangeApplyState::Directly => {
let c = may_apply_iter.next().unwrap(); let c = may_apply_iter.next().unwrap();
let c_client_id = c.id.client_id; let c_client_id = c.id.peer;
latest_vv.set_end(c.id_end()); latest_vv.set_end(c.id_end());
// other pending // other pending
retain_changes retain_changes
@ -465,14 +462,14 @@ enum ChangeApplyState {
Existing, Existing,
Directly, Directly,
/// The client id of first missing dep /// The client id of first missing dep
Future(ClientID), Future(PeerID),
} }
fn can_remote_change_be_applied( fn can_remote_change_be_applied(
vv: &VersionVector, vv: &VersionVector,
change: &mut Change<RemoteOp>, change: &mut Change<RemoteOp>,
) -> ChangeApplyState { ) -> ChangeApplyState {
let change_client_id = change.id.client_id; let change_client_id = change.id.peer;
let CounterSpan { start, end } = change.ctr_span(); let CounterSpan { start, end } = change.ctr_span();
let vv_latest_ctr = vv.get(&change_client_id).copied().unwrap_or(0); let vv_latest_ctr = vv.get(&change_client_id).copied().unwrap_or(0);
if vv_latest_ctr < start { if vv_latest_ctr < start {
@ -482,9 +479,9 @@ fn can_remote_change_be_applied(
return ChangeApplyState::Existing; return ChangeApplyState::Existing;
} }
for dep in change.deps.iter() { for dep in change.deps.iter() {
let dep_vv_latest_ctr = vv.get(&dep.client_id).copied().unwrap_or(0); let dep_vv_latest_ctr = vv.get(&dep.peer).copied().unwrap_or(0);
if dep_vv_latest_ctr - 1 < dep.counter { if dep_vv_latest_ctr - 1 < dep.counter {
return ChangeApplyState::Future(dep.client_id); return ChangeApplyState::Future(dep.peer);
} }
} }

View file

@ -1,6 +1,6 @@
use crate::Op; use crate::Op;
use crate::id::ClientID; use crate::id::PeerID;
use crate::op::RichOp; use crate::op::RichOp;
@ -51,7 +51,7 @@ pub struct OpSpanIter<'a> {
impl<'a> OpSpanIter<'a> { impl<'a> OpSpanIter<'a> {
pub fn new( pub fn new(
changes: &'a FxHashMap<ClientID, RleVecWithIndex<Change, ChangeMergeCfg>>, changes: &'a FxHashMap<PeerID, RleVecWithIndex<Change, ChangeMergeCfg>>,
target_span: IdSpan, target_span: IdSpan,
) -> Self { ) -> Self {
let rle_changes = changes.get(&target_span.client_id).unwrap(); let rle_changes = changes.get(&target_span.client_id).unwrap();

View file

@ -19,7 +19,7 @@ use crate::{
configure::Configure, configure::Configure,
container::{list::List, map::Map, text::Text, ContainerIdRaw, ContainerType}, container::{list::List, map::Map, text::Text, ContainerIdRaw, ContainerType},
event::{Observer, SubscriptionID}, event::{Observer, SubscriptionID},
id::ClientID, id::PeerID,
op::RemoteOp, op::RemoteOp,
LogStore, VersionVector, LogStore, VersionVector,
}; };
@ -37,7 +37,7 @@ impl Default for LoroCore {
impl LoroCore { impl LoroCore {
#[inline] #[inline]
pub fn new(cfg: Configure, client_id: Option<ClientID>) -> Self { pub fn new(cfg: Configure, client_id: Option<PeerID>) -> Self {
Self { Self {
log_store: LogStore::new(cfg, client_id), log_store: LogStore::new(cfg, client_id),
hierarchy: Default::default(), hierarchy: Default::default(),
@ -45,7 +45,7 @@ impl LoroCore {
} }
#[inline] #[inline]
pub fn client_id(&self) -> ClientID { pub fn client_id(&self) -> PeerID {
self.log_store.read().unwrap().this_client_id() self.log_store.read().unwrap().this_client_id()
} }

View file

@ -4,7 +4,7 @@ use crate::{
registry::{ContainerIdx, ContainerInstance}, registry::{ContainerIdx, ContainerInstance},
ContainerID, ContainerTrait, ContainerID, ContainerTrait,
}, },
id::{ClientID, Counter, ID}, id::{Counter, PeerID, ID},
span::{HasCounter, HasId, HasLamport}, span::{HasCounter, HasId, HasLamport},
}; };
use rle::{HasIndex, HasLength, Mergable, RleVec, Sliceable}; use rle::{HasIndex, HasLength, Mergable, RleVec, Sliceable};
@ -34,7 +34,7 @@ pub struct RemoteOp {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct RichOp<'a> { pub struct RichOp<'a> {
op: &'a Op, op: &'a Op,
client_id: ClientID, client_id: PeerID,
lamport: Lamport, lamport: Lamport,
timestamp: Timestamp, timestamp: Timestamp,
start: usize, start: usize,
@ -45,7 +45,7 @@ pub struct RichOp<'a> {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct OwnedRichOp { pub struct OwnedRichOp {
pub op: Op, pub op: Op,
pub client_id: ClientID, pub client_id: PeerID,
pub lamport: Lamport, pub lamport: Lamport,
pub timestamp: Timestamp, pub timestamp: Timestamp,
} }
@ -188,7 +188,7 @@ impl HasCounter for RemoteOp {
impl<'a> HasId for RichOp<'a> { impl<'a> HasId for RichOp<'a> {
fn id_start(&self) -> ID { fn id_start(&self) -> ID {
ID { ID {
client_id: self.client_id, peer: self.client_id,
counter: self.op.counter + self.start as Counter, counter: self.op.counter + self.start as Counter,
} }
} }
@ -207,7 +207,7 @@ impl<'a> HasLamport for RichOp<'a> {
} }
impl<'a> RichOp<'a> { impl<'a> RichOp<'a> {
pub fn new(op: &'a Op, client_id: ClientID, lamport: Lamport, timestamp: Timestamp) -> Self { pub fn new(op: &'a Op, client_id: PeerID, lamport: Lamport, timestamp: Timestamp) -> Self {
RichOp { RichOp {
op, op,
client_id, client_id,
@ -222,7 +222,7 @@ impl<'a> RichOp<'a> {
let diff = op.counter - change.id.counter; let diff = op.counter - change.id.counter;
RichOp { RichOp {
op, op,
client_id: change.id.client_id, client_id: change.id.peer,
lamport: change.lamport + diff as Lamport, lamport: change.lamport + diff as Lamport,
timestamp: change.timestamp, timestamp: change.timestamp,
start: 0, start: 0,
@ -240,7 +240,7 @@ impl<'a> RichOp<'a> {
let op_slice_end = (end - op_index_in_change).clamp(0, op.atom_len() as i32); let op_slice_end = (end - op_index_in_change).clamp(0, op.atom_len() as i32);
RichOp { RichOp {
op, op,
client_id: change.id.client_id, client_id: change.id.peer,
lamport: change.lamport + op_index_in_change as Lamport, lamport: change.lamport + op_index_in_change as Lamport,
timestamp: change.timestamp, timestamp: change.timestamp,
start: op_slice_start as usize, start: op_slice_start as usize,

View file

@ -11,7 +11,7 @@ use smallvec::SmallVec;
use crate::change::{Lamport, Timestamp}; use crate::change::{Lamport, Timestamp};
use crate::dag::{Dag, DagNode}; use crate::dag::{Dag, DagNode};
use crate::event::Diff; use crate::event::Diff;
use crate::id::{ClientID, Counter, ID}; use crate::id::{Counter, PeerID, ID};
use crate::log_store::ClientChanges; use crate::log_store::ClientChanges;
use crate::span::{HasId, HasLamport}; use crate::span::{HasId, HasLamport};
use crate::version::{Frontiers, VersionVector}; use crate::version::{Frontiers, VersionVector};
@ -36,14 +36,14 @@ pub struct OpLog {
/// It's faster to answer the question like what's the LCA version /// It's faster to answer the question like what's the LCA version
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone, Default)]
pub struct AppDag { pub struct AppDag {
map: FxHashMap<ClientID, RleVec<[AppDagNode; 1]>>, map: FxHashMap<PeerID, RleVec<[AppDagNode; 1]>>,
frontiers: Frontiers, frontiers: Frontiers,
vv: VersionVector, vv: VersionVector,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct AppDagNode { pub struct AppDagNode {
client: ClientID, client: PeerID,
cnt: Counter, cnt: Counter,
lamport: Lamport, lamport: Lamport,
parents: SmallVec<[ID; 2]>, parents: SmallVec<[ID; 2]>,

View file

@ -25,7 +25,7 @@ impl Sliceable for AppDagNode {
impl HasId for AppDagNode { impl HasId for AppDagNode {
fn id_start(&self) -> ID { fn id_start(&self) -> ID {
ID { ID {
client_id: self.client, peer: self.client,
counter: self.cnt, counter: self.cnt,
} }
} }
@ -63,7 +63,10 @@ impl Dag for AppDag {
} }
fn get(&self, id: ID) -> Option<&Self::Node> { fn get(&self, id: ID) -> Option<&Self::Node> {
let ID { client_id, counter } = id; let ID {
peer: client_id,
counter,
} = id;
self.map self.map
.get(&client_id) .get(&client_id)
.and_then(|rle| rle.get(counter).map(|x| x.element)) .and_then(|rle| rle.get(counter).map(|x| x.element))

View file

@ -2,7 +2,7 @@ use std::fmt::Debug;
use crate::{ use crate::{
change::Lamport, change::Lamport,
id::{ClientID, Counter, ID}, id::{Counter, PeerID, ID},
version::IdSpanVector, version::IdSpanVector,
}; };
use rle::{HasLength, Mergable, Slice, Sliceable}; use rle::{HasLength, Mergable, Slice, Sliceable};
@ -170,13 +170,13 @@ impl Mergable for CounterSpan {
/// We need this because it'll make merging deletions easier. /// We need this because it'll make merging deletions easier.
#[derive(Clone, Copy, PartialEq, Eq, Debug)] #[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct IdSpan { pub struct IdSpan {
pub client_id: ClientID, pub client_id: PeerID,
pub counter: CounterSpan, pub counter: CounterSpan,
} }
impl IdSpan { impl IdSpan {
#[inline] #[inline]
pub fn new(client_id: ClientID, from: Counter, to: Counter) -> Self { pub fn new(client_id: PeerID, from: Counter, to: Counter) -> Self {
Self { Self {
client_id, client_id,
counter: CounterSpan { counter: CounterSpan {
@ -188,7 +188,7 @@ impl IdSpan {
#[inline] #[inline]
pub fn contains(&self, id: ID) -> bool { pub fn contains(&self, id: ID) -> bool {
self.client_id == id.client_id && self.counter.contains(id.counter) self.client_id == id.peer && self.counter.contains(id.counter)
} }
#[inline(always)] #[inline(always)]
@ -292,7 +292,7 @@ pub trait HasIdSpan: HasId + HasLength {
fn intersect<T: HasIdSpan>(&self, other: &T) -> bool { fn intersect<T: HasIdSpan>(&self, other: &T) -> bool {
let self_start = self.id_start(); let self_start = self.id_start();
let other_start = self.id_start(); let other_start = self.id_start();
if self_start.client_id != other_start.client_id { if self_start.peer != other_start.peer {
false false
} else { } else {
let self_start = self_start.counter; let self_start = self_start.counter;
@ -306,7 +306,7 @@ pub trait HasIdSpan: HasId + HasLength {
fn id_span(&self) -> IdSpan { fn id_span(&self) -> IdSpan {
let id = self.id_start(); let id = self.id_start();
IdSpan::new( IdSpan::new(
id.client_id, id.peer,
id.counter, id.counter,
id.counter + self.content_len() as Counter, id.counter + self.content_len() as Counter,
) )
@ -324,7 +324,7 @@ pub trait HasIdSpan: HasId + HasLength {
fn contains_id(&self, id: ID) -> bool { fn contains_id(&self, id: ID) -> bool {
let id_start = self.id_start(); let id_start = self.id_start();
if id.client_id != id_start.client_id { if id.peer != id_start.peer {
return false; return false;
} }

View file

@ -8,7 +8,7 @@ use crate::{
container::{registry::ContainerIdx, ContainerID}, container::{registry::ContainerIdx, ContainerID},
event::{Diff, EventDiff, RawEvent}, event::{Diff, EventDiff, RawEvent},
hierarchy::Hierarchy, hierarchy::Hierarchy,
id::ClientID, id::PeerID,
log_store::{LoroEncoder, RemoteClientChanges}, log_store::{LoroEncoder, RemoteClientChanges},
version::Frontiers, version::Frontiers,
ContainerType, InternalString, List, LogStore, LoroCore, LoroError, Map, Text, ContainerType, InternalString, List, LogStore, LoroCore, LoroError, Map, Text,
@ -105,7 +105,7 @@ impl<T: AsRef<str>> From<T> for Origin {
} }
pub struct Transaction { pub struct Transaction {
pub(crate) client_id: ClientID, pub(crate) client_id: PeerID,
pub(crate) store: Weak<RwLock<LogStore>>, pub(crate) store: Weak<RwLock<LogStore>>,
pub(crate) hierarchy: Weak<Mutex<Hierarchy>>, pub(crate) hierarchy: Weak<Mutex<Hierarchy>>,
pub(crate) origin: Option<Origin>, pub(crate) origin: Option<Origin>,

View file

@ -15,7 +15,7 @@ use crate::{
change::Lamport, change::Lamport,
id::{Counter, ID}, id::{Counter, ID},
span::{CounterSpan, HasId, HasIdSpan, IdSpan}, span::{CounterSpan, HasId, HasIdSpan, IdSpan},
ClientID, LoroError, LoroError, PeerID,
}; };
/// [VersionVector](https://en.wikipedia.org/wiki/Version_vector) /// [VersionVector](https://en.wikipedia.org/wiki/Version_vector)
@ -32,7 +32,7 @@ use crate::{
/// see also [im]. /// see also [im].
#[repr(transparent)] #[repr(transparent)]
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VersionVector(FxHashMap<ClientID, Counter>); pub struct VersionVector(FxHashMap<PeerID, Counter>);
#[derive(Debug, Clone, Default, Serialize, Deserialize)] #[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct Frontiers(SmallVec<[ID; 2]>); pub struct Frontiers(SmallVec<[ID; 2]>);
@ -120,7 +120,7 @@ impl PartialEq for VersionVector {
impl Eq for VersionVector {} impl Eq for VersionVector {}
impl Deref for VersionVector { impl Deref for VersionVector {
type Target = FxHashMap<ClientID, Counter>; type Target = FxHashMap<PeerID, Counter>;
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
&self.0 &self.0
@ -128,21 +128,21 @@ impl Deref for VersionVector {
} }
// TODO: wrap this type? // TODO: wrap this type?
pub type IdSpanVector = FxHashMap<ClientID, CounterSpan>; pub type IdSpanVector = FxHashMap<PeerID, CounterSpan>;
impl HasId for (&ClientID, &CounterSpan) { impl HasId for (&PeerID, &CounterSpan) {
fn id_start(&self) -> ID { fn id_start(&self) -> ID {
ID { ID {
client_id: *self.0, peer: *self.0,
counter: self.1.min(), counter: self.1.min(),
} }
} }
} }
impl HasId for (ClientID, CounterSpan) { impl HasId for (PeerID, CounterSpan) {
fn id_start(&self) -> ID { fn id_start(&self) -> ID {
ID { ID {
client_id: self.0, peer: self.0,
counter: self.1.min(), counter: self.1.min(),
} }
} }
@ -192,7 +192,7 @@ impl VersionVectorDiff {
} }
} }
fn subtract_start(m: &mut FxHashMap<ClientID, CounterSpan>, target: IdSpan) { fn subtract_start(m: &mut FxHashMap<PeerID, CounterSpan>, target: IdSpan) {
if let Some(span) = m.get_mut(&target.client_id) { if let Some(span) = m.get_mut(&target.client_id) {
if span.start < target.counter.end { if span.start < target.counter.end {
span.start = target.counter.end; span.start = target.counter.end;
@ -200,7 +200,7 @@ fn subtract_start(m: &mut FxHashMap<ClientID, CounterSpan>, target: IdSpan) {
} }
} }
fn merge(m: &mut FxHashMap<ClientID, CounterSpan>, mut target: IdSpan) { fn merge(m: &mut FxHashMap<PeerID, CounterSpan>, mut target: IdSpan) {
target.normalize_(); target.normalize_();
if let Some(span) = m.get_mut(&target.client_id) { if let Some(span) = m.get_mut(&target.client_id) {
span.start = span.start.min(target.counter.start); span.start = span.start.min(target.counter.start);
@ -378,7 +378,7 @@ impl VersionVector {
.filter_map(|(client_id, &counter)| { .filter_map(|(client_id, &counter)| {
if counter > 0 { if counter > 0 {
Some(ID { Some(ID {
client_id: *client_id, peer: *client_id,
counter: counter - 1, counter: counter - 1,
}) })
} else { } else {
@ -396,11 +396,11 @@ impl VersionVector {
/// set the inclusive ending point. target id will be included by self /// set the inclusive ending point. target id will be included by self
#[inline] #[inline]
pub fn set_last(&mut self, id: ID) { pub fn set_last(&mut self, id: ID) {
self.0.insert(id.client_id, id.counter + 1); self.0.insert(id.peer, id.counter + 1);
} }
#[inline] #[inline]
pub fn get_last(&self, client_id: ClientID) -> Option<Counter> { pub fn get_last(&self, client_id: PeerID) -> Option<Counter> {
self.0 self.0
.get(&client_id) .get(&client_id)
.and_then(|&x| if x == 0 { None } else { Some(x - 1) }) .and_then(|&x| if x == 0 { None } else { Some(x - 1) })
@ -409,14 +409,14 @@ impl VersionVector {
/// set the exclusive ending point. target id will NOT be included by self /// set the exclusive ending point. target id will NOT be included by self
#[inline] #[inline]
pub fn set_end(&mut self, id: ID) { pub fn set_end(&mut self, id: ID) {
self.0.insert(id.client_id, id.counter); self.0.insert(id.peer, id.counter);
} }
/// Update the end counter of the given client if the end is greater. /// Update the end counter of the given client if the end is greater.
/// Return whether updated /// Return whether updated
#[inline] #[inline]
pub fn try_update_last(&mut self, id: ID) -> bool { pub fn try_update_last(&mut self, id: ID) -> bool {
if let Some(end) = self.0.get_mut(&id.client_id) { if let Some(end) = self.0.get_mut(&id.peer) {
if *end < id.counter + 1 { if *end < id.counter + 1 {
*end = id.counter + 1; *end = id.counter + 1;
true true
@ -424,7 +424,7 @@ impl VersionVector {
false false
} }
} else { } else {
self.0.insert(id.client_id, id.counter + 1); self.0.insert(id.peer, id.counter + 1);
true true
} }
} }
@ -457,7 +457,7 @@ impl VersionVector {
} }
pub fn includes_id(&self, id: ID) -> bool { pub fn includes_id(&self, id: ID) -> bool {
if let Some(end) = self.get(&id.client_id) { if let Some(end) = self.get(&id.peer) {
if *end > id.counter { if *end > id.counter {
return true; return true;
} }
@ -467,7 +467,7 @@ impl VersionVector {
pub fn intersect_span<S: HasIdSpan>(&self, target: &S) -> Option<CounterSpan> { pub fn intersect_span<S: HasIdSpan>(&self, target: &S) -> Option<CounterSpan> {
let id = target.id_start(); let id = target.id_start();
if let Some(end) = self.get(&id.client_id) { if let Some(end) = self.get(&id.peer) {
if *end > id.counter { if *end > id.counter {
return Some(CounterSpan { return Some(CounterSpan {
start: id.counter, start: id.counter,
@ -492,7 +492,7 @@ impl VersionVector {
} }
pub fn extend_to_include_last_id(&mut self, id: ID) { pub fn extend_to_include_last_id(&mut self, id: ID) {
if let Some(counter) = self.get_mut(&id.client_id) { if let Some(counter) = self.get_mut(&id.peer) {
if *counter <= id.counter { if *counter <= id.counter {
*counter = id.counter + 1; *counter = id.counter + 1;
} }
@ -577,8 +577,8 @@ impl Default for VersionVector {
} }
} }
impl From<FxHashMap<ClientID, Counter>> for VersionVector { impl From<FxHashMap<PeerID, Counter>> for VersionVector {
fn from(map: FxHashMap<ClientID, Counter>) -> Self { fn from(map: FxHashMap<PeerID, Counter>) -> Self {
let mut im_map = FxHashMap::default(); let mut im_map = FxHashMap::default();
for (client_id, counter) in map { for (client_id, counter) in map {
im_map.insert(client_id, counter); im_map.insert(client_id, counter);
@ -613,7 +613,7 @@ impl FromIterator<ID> for VersionVector {
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord, Serialize, Deserialize)] #[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord, Serialize, Deserialize)]
pub(crate) struct TotalOrderStamp { pub(crate) struct TotalOrderStamp {
pub(crate) lamport: Lamport, pub(crate) lamport: Lamport,
pub(crate) client_id: ClientID, pub(crate) client_id: PeerID,
} }
pub fn are_frontiers_eq(a: &[ID], b: &[ID]) -> bool { pub fn are_frontiers_eq(a: &[ID], b: &[ID]) -> bool {
@ -704,19 +704,19 @@ impl PatchedVersionVector {
#[inline] #[inline]
pub fn extend_to_include_last_id(&mut self, id: ID) { pub fn extend_to_include_last_id(&mut self, id: ID) {
self.patch.extend_to_include_last_id(id); self.patch.extend_to_include_last_id(id);
self.omit_if_needless(id.client_id); self.omit_if_needless(id.peer);
} }
#[inline] #[inline]
pub fn set_end(&mut self, id: ID) { pub fn set_end(&mut self, id: ID) {
self.patch.set_end(id); self.patch.set_end(id);
self.omit_if_needless(id.client_id); self.omit_if_needless(id.peer);
} }
#[inline] #[inline]
pub fn set_last(&mut self, id: ID) { pub fn set_last(&mut self, id: ID) {
self.patch.set_last(id); self.patch.set_last(id);
self.omit_if_needless(id.client_id); self.omit_if_needless(id.peer);
} }
#[inline] #[inline]
@ -773,7 +773,7 @@ impl PatchedVersionVector {
} }
#[inline(always)] #[inline(always)]
fn omit_if_needless(&mut self, client_id: ClientID) { fn omit_if_needless(&mut self, client_id: PeerID) {
if let Some(patch_value) = self.patch.get(&client_id) { if let Some(patch_value) = self.patch.get(&client_id) {
if *patch_value == *self.base.get(&client_id).unwrap_or(&0) { if *patch_value == *self.base.get(&client_id).unwrap_or(&0) {
self.patch.remove(&client_id); self.patch.remove(&client_id);
@ -782,14 +782,14 @@ impl PatchedVersionVector {
} }
#[inline] #[inline]
pub fn get(&self, client_id: &ClientID) -> Option<&Counter> { pub fn get(&self, client_id: &PeerID) -> Option<&Counter> {
self.patch self.patch
.get(client_id) .get(client_id)
.or_else(|| self.base.get(client_id)) .or_else(|| self.base.get(client_id))
} }
#[inline] #[inline]
pub fn insert(&mut self, client_id: ClientID, counter: Counter) { pub fn insert(&mut self, client_id: PeerID, counter: Counter) {
self.patch.insert(client_id, counter); self.patch.insert(client_id, counter);
self.omit_if_needless(client_id); self.omit_if_needless(client_id);
} }
@ -799,7 +799,7 @@ impl PatchedVersionVector {
self.patch.includes_id(id) || self.base.includes_id(id) self.patch.includes_id(id) || self.base.includes_id(id)
} }
pub fn iter(&self) -> impl Iterator<Item = (&ClientID, &Counter)> { pub fn iter(&self) -> impl Iterator<Item = (&PeerID, &Counter)> {
self.patch.iter().chain( self.patch.iter().chain(
self.base self.base
.iter() .iter()

View file

@ -6,8 +6,8 @@ use loro_internal::{
}; };
pub use loro_internal::{ pub use loro_internal::{
container::ContainerIdx, event, id::ClientID, EncodeMode, List, LoroError, LoroValue, Map, container::ContainerIdx, event, id::PeerID, EncodeMode, List, LoroError, LoroValue, Map, Text,
Text, VersionVector, VersionVector,
}; };
#[repr(transparent)] #[repr(transparent)]
@ -16,12 +16,12 @@ pub struct Loro(LoroCore);
impl Loro { impl Loro {
#[inline(always)] #[inline(always)]
pub fn new(cfg: Configure, client_id: Option<ClientID>) -> Self { pub fn new(cfg: Configure, client_id: Option<PeerID>) -> Self {
Self(LoroCore::new(cfg, client_id)) Self(LoroCore::new(cfg, client_id))
} }
#[inline(always)] #[inline(always)]
pub fn client_id(&self) -> ClientID { pub fn client_id(&self) -> PeerID {
self.0.client_id() self.0.client_id()
} }