mirror of
https://github.com/loro-dev/loro.git
synced 2025-02-02 02:59:51 +00:00
fix: dead loop when importing updates (#570)
Fix the dead loop issue here: https://gist.github.com/sunflowerdeath/c0e2b46b6f5d2e32d368f8e04f730237 # Reason for the Dead Loop in `find_common_ancestor` ### Original Assumptions 1. **Dependency Assumption** If a DagNode is depended upon by other nodes, the direction of this dependency will only point to before the end of the node. This assumption is used in system design. 2. **Node Overlap Assumption** DagNodes do not overlap with each other. When retrieving DagNodes from two different positions: - They are either completely identical - Or they do not overlap at all - If there is overlap, it means they are the same node ### Current Issues 1. **Issues Caused by Lazy Loading** Due to the use of lazy loading, new situations have arisen. Initially: - A certain DagNode is not depended upon by other DagNodes - Therefore, the initially retrieved form is complete 2. **Impact of Subsequent Loading** After loading additional DagNodes: - The newly loaded DagNode may depend on the internal position of the previously complete DagNode - This causes the originally complete DagNode to be split into multiple smaller DagNodes 3. **Violation of Original Assumptions** In this case, when retrieving the content of the original DagNode again: - Other DagNode dependencies may appear in the middle - There may be overlaps on the DagNode - This violates the original design assumptions # Fix Approach Remove the assumption in the original implementation that "overlapping DagNodes will not occur." When overlapping DagNodes are found, retain the shorter one and remove the longer one.
This commit is contained in:
parent
a6f3ddfcd9
commit
0325061476
35 changed files with 117 additions and 72 deletions
5
.changeset/olive-coins-wash.md
Normal file
5
.changeset/olive-coins-wash.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
"loro-crdt": patch
|
||||
---
|
||||
|
||||
Fix a deadloop case when importing updates (#570)
|
2
.github/workflows/release_wasm.yml
vendored
2
.github/workflows/release_wasm.yml
vendored
|
@ -39,7 +39,7 @@ jobs:
|
|||
uses: taiki-e/install-action@v1
|
||||
with:
|
||||
tool: nextest
|
||||
- run: rustup toolchain install stable --profile minimal
|
||||
- run: rustup toolchain install 1.83.0 --profile minimal
|
||||
- run: rustup target add wasm32-unknown-unknown
|
||||
- uses: jetli/wasm-bindgen-action@v0.2.0
|
||||
with:
|
||||
|
|
9
Cargo.lock
generated
9
Cargo.lock
generated
|
@ -146,6 +146,12 @@ version = "0.21.5"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
||||
|
||||
[[package]]
|
||||
name = "bench-utils"
|
||||
version = "0.1.0"
|
||||
|
@ -635,6 +641,7 @@ name = "examples"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
"base64 0.22.1",
|
||||
"bench-utils",
|
||||
"color-backtrace",
|
||||
"criterion 0.4.0",
|
||||
|
@ -1328,7 +1335,7 @@ dependencies = [
|
|||
"arbitrary",
|
||||
"arbtest",
|
||||
"arref",
|
||||
"base64",
|
||||
"base64 0.21.5",
|
||||
"bench-utils",
|
||||
"bytes",
|
||||
"criterion 0.5.1",
|
||||
|
|
|
@ -138,7 +138,7 @@ impl<'a, V: DeltaValue, Attr: DeltaAttr> Iter<'a, V, Attr> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, V: DeltaValue, Attr: DeltaAttr> Iterator for Iter<'a, V, Attr> {
|
||||
impl<V: DeltaValue, Attr: DeltaAttr> Iterator for Iter<'_, V, Attr> {
|
||||
type Item = DeltaItem<V, Attr>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -16,6 +16,7 @@ tracing = "0.1.40"
|
|||
rand = "0.8.5"
|
||||
zstd = "0.13.2"
|
||||
ensure-cov = { workspace = true }
|
||||
base64 = "0.22.1"
|
||||
|
||||
[dev-dependencies]
|
||||
tabled = "0.15.0"
|
||||
|
|
32
crates/examples/examples/issue_stuck.rs
Normal file
32
crates/examples/examples/issue_stuck.rs
Normal file
|
@ -0,0 +1,32 @@
|
|||
use base64::engine::general_purpose::STANDARD;
|
||||
use base64::Engine;
|
||||
use loro::LoroDoc;
|
||||
|
||||
// https://gist.github.com/sunflowerdeath/c0e2b46b6f5d2e32d368f8e04f730237
|
||||
fn main() {
|
||||
dev_utils::setup_test_log();
|
||||
let snapshot = "bG9ybwAAAAAAAAAAAAAAAK+fc78AA+8DAABMT1JPAAQiTRhgQIK0AwAA+loC29/O6L2r0tNnOMf35ey/nMj0vwE0AAwAZ6dJW90Tr9sAAAAAAAgACAEQAduvE91bSadnAQEAAAAAAAUBAAABAAYBBAECAAAFBHRleHQADgEEAgEAAgEAAgEFAgEIAAkISGVsbG9BYSBTAIQICA8XDwRJBFMA+m/crNGs++NE18d7mf3jIOm/FF2xSOwSfLEGAwMBAwEBBgMTAQIDAQIDAQIDAQEKAaLNqFRZtQqLNqEAAS4Cos8ABgEAAwAIAAYBBAECAAAFBHRleHQAEwEEAggABQM6HgQYAggFBAEGBgMAEwZBYSBBYSADQWEgA0FhIANBYSDvAHUXFwMpAwEwnAAEjAAMpADPAQEDBQECAwEoAqFPDwEE/gMPAQQCAQADAbgBAgEFAgEDAARvAIQaGgMyAwElAwsBDG8AEQH6AD8BNAFkAAwa0GQAVgIAdnYE0wH1CDaUusXFxN2EvrEBNtzZxua6/7ii1wE2/gHBOgAMALF8EuxIsV0U8wFkAwgDARsCjQAEnQARAQEAHw6LAAQE/gEbEO4AB1kASAMDAw5ZAATxAQFYAC8BClkADB8iWQAHhAYGDxcSBU8EsgAMrgEEwgD1AAMDAwMBBAEBCAMZAQIBAwMA+w0BCgSizaiUGcUSgziiUGcUcAEuA6EABgEABAAKWAL4BhIBBAIKAAYBOgYYAR4CCgUCCgMAFFACBAwAClQBXxUVBiwGVAEGHzL7AAT3BBABBAIEAAQDvgEGAgQFAgQDAAhfAIG/6SDj/Zl7x7MBZAYFCQIpA1IBBPEABBEB+wcDAQEEAQMBAgEIAZ6AAQoABgEAAQAE2wARD24ATwMDChJtAAt1BgYDEQMBJm0ABGUABHYBA7UCPwoBntcABAQrAhsu0gEH0QCECQkPFw8FTwTRAARcAARsAAThAA/SAQjvoU8oM+n1Bn0+oM+n1BDSAQcRENIBXwQBOggY0AEMB6AAjBgYAzIDARsCoAACywIfNPkABAQTBBrKrwOhDADXROP7rNGs3MsBAiUDBPIABGIAAloADyUDHQdZAIwDAxUUFQZVBFkABEsBBFsBEQZUAYkFAQEEAgYDGygD8gcCARAFoM+ndHnc7o87ndHnc7o4ASgEgwU7BQAMUAL/CBQBBAIMAAYFNBgSBhgCDAUEAQYKAwAbhAUAB9UCB7EASBgYAy8KAQShAAIKAR8uZAEMKcQBXgLwFAAWAGkABQF0AdgBCQJiArsCXQO8AykEjQQtBYcF4AWRBhEAAAAAAEcmhCABAAAABQAAAAIAZnIBDADXROP7rNGs3AAAABi9GZA8zAMAANwAAABMT1JPAAQiTRhgQIKjAAAAzwIBAG5IZWxsb0FhIAMAU/clBNuvE91bSadnFF2xSOwSfLHHe5n94yDpv9ys0az740TXAwQmBAAEAg0AAQQBAgUABgIBBQQA9w0DBAINAAUCAAQFBCYzAAoJAAYABQwFCgYJBgUQBAD2DQQGDwUGBAMGAAQDJwQAAxAFBgYjBQwDAAQFBgMEAPAKBAYJCwYAAgoEAAMCBAUDCgNEAAAAAAABAAAAAAAnKLzEAQAAAAUAAAAGAIIEdGV4dAEGAIIEdGV4dLBU8Ua7AAAAAAAAAA==";
|
||||
let update = "bG9ybwAAAAAAAAAAAAAAAC0ZfV0ABFUbAzUDASUD3KzRrPvjRNcUXbFI7BJ8scd7mf3jIOm/AQECAwECATQBAAAABQEAAAEABgEEAQIAAAUEdGV4dAAPAQQCAQADAdYBAgEFAgEDAAQDQWEg";
|
||||
let decoded_snapshot = STANDARD.decode(snapshot).unwrap();
|
||||
let decoded_update = STANDARD.decode(update).unwrap();
|
||||
|
||||
println!(
|
||||
"snapshot vv:{:#?}",
|
||||
LoroDoc::decode_import_blob_meta(&decoded_snapshot, false)
|
||||
.unwrap()
|
||||
.partial_end_vv
|
||||
);
|
||||
let update_meta = LoroDoc::decode_import_blob_meta(&decoded_update, false).unwrap();
|
||||
dbg!(update_meta);
|
||||
let doc = LoroDoc::new();
|
||||
doc.import(&decoded_snapshot).unwrap();
|
||||
// doc.detach();
|
||||
println!("Imported snapshot");
|
||||
|
||||
doc.import(&decoded_update).unwrap();
|
||||
println!("Imported update");
|
||||
doc.checkout_to_latest();
|
||||
doc.check_state_correctness_slow();
|
||||
let res = doc.export_json_updates(&Default::default(), &doc.oplog_vv());
|
||||
println!("{:#?}", serde_json::to_value(res).unwrap());
|
||||
}
|
|
@ -100,7 +100,7 @@ impl PartialEq for ActionWrapper {
|
|||
|
||||
impl Eq for ActionWrapper {}
|
||||
|
||||
impl<'a> Arbitrary<'a> for ActionWrapper {
|
||||
impl Arbitrary<'_> for ActionWrapper {
|
||||
fn arbitrary(u: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result<Self> {
|
||||
Ok(ActionWrapper::Generic(GenericAction::arbitrary(u)?))
|
||||
}
|
||||
|
|
|
@ -496,7 +496,7 @@ pub struct SsTableIter<'a> {
|
|||
back_block_idx: isize,
|
||||
}
|
||||
|
||||
impl<'a> Debug for SsTableIter<'a> {
|
||||
impl Debug for SsTableIter<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("SsTableIter")
|
||||
.field("iter", &self.iter)
|
||||
|
@ -736,7 +736,7 @@ impl<'a> SsTableIter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> KvIterator for SsTableIter<'a> {
|
||||
impl KvIterator for SsTableIter<'_> {
|
||||
fn peek_next_key(&self) -> Option<Bytes> {
|
||||
self.peek_next_key()
|
||||
}
|
||||
|
@ -770,7 +770,7 @@ impl<'a> KvIterator for SsTableIter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for SsTableIter<'a> {
|
||||
impl Iterator for SsTableIter<'_> {
|
||||
type Item = (Bytes, Bytes);
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if !self.has_next() {
|
||||
|
@ -783,7 +783,7 @@ impl<'a> Iterator for SsTableIter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> DoubleEndedIterator for SsTableIter<'a> {
|
||||
impl DoubleEndedIterator for SsTableIter<'_> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
if !self.has_next_back() {
|
||||
return None;
|
||||
|
|
|
@ -102,7 +102,6 @@ impl LoroList {
|
|||
// self.list.push_container(c)?;
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
#[inline]
|
||||
pub fn insert_list_container(
|
||||
&self,
|
||||
|
|
|
@ -34,7 +34,6 @@ impl LoroMap {
|
|||
// {
|
||||
// self.map.for_each(f)
|
||||
// }
|
||||
|
||||
/// Insert a key-value pair into the map.
|
||||
pub fn insert(&self, key: &str, value: Arc<dyn LoroValueLike>) -> LoroResult<()> {
|
||||
self.map.insert(key, value.as_loro_value())
|
||||
|
|
|
@ -88,14 +88,6 @@ impl LoroMovableList {
|
|||
self.list.push(v.as_loro_value())
|
||||
}
|
||||
|
||||
/// Push a container to the end of the list.
|
||||
// pub fn push_container<C: ContainerTrait>(&self, child: C) -> LoroResult<C> {
|
||||
// let pos = self.list.len();
|
||||
// Ok(C::from_list(
|
||||
// self.list.insert_container(pos, child.to_list())?,
|
||||
// ))
|
||||
// }
|
||||
|
||||
#[inline]
|
||||
pub fn insert_list_container(
|
||||
&self,
|
||||
|
|
|
@ -517,7 +517,6 @@ impl LoroDoc {
|
|||
// pub fn export(&self, mode: ExportMode) -> Vec<u8> {
|
||||
// self.doc.export(mode.into())
|
||||
// }
|
||||
|
||||
pub fn export_updates_in_range(&self, spans: &[IdSpan]) -> Result<Vec<u8>, LoroEncodeError> {
|
||||
self.doc.export(loro::ExportMode::UpdatesInRange {
|
||||
spans: Cow::Borrowed(spans),
|
||||
|
|
|
@ -19,7 +19,7 @@ pub struct DiffEvent {
|
|||
pub events: Vec<ContainerDiff>,
|
||||
}
|
||||
|
||||
impl<'a> From<loro::event::DiffEvent<'a>> for DiffEvent {
|
||||
impl From<loro::event::DiffEvent<'_>> for DiffEvent {
|
||||
fn from(diff_event: loro::event::DiffEvent) -> Self {
|
||||
Self {
|
||||
triggered_by: diff_event.triggered_by,
|
||||
|
@ -138,7 +138,7 @@ pub enum TreeExternalDiff {
|
|||
},
|
||||
}
|
||||
|
||||
impl<'a, 'b> From<&'b loro::event::ContainerDiff<'a>> for ContainerDiff {
|
||||
impl<'a> From<&loro::event::ContainerDiff<'a>> for ContainerDiff {
|
||||
fn from(value: &loro::event::ContainerDiff<'a>) -> Self {
|
||||
Self {
|
||||
target: value.target.into(),
|
||||
|
@ -156,7 +156,7 @@ impl<'a, 'b> From<&'b loro::event::ContainerDiff<'a>> for ContainerDiff {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a loro::Index> for Index {
|
||||
impl From<&loro::Index> for Index {
|
||||
fn from(value: &loro::Index) -> Self {
|
||||
match value {
|
||||
loro::Index::Key(key) => Index::Key {
|
||||
|
@ -180,7 +180,7 @@ impl From<Index> for loro::Index {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b> From<&'b loro::event::Diff<'a>> for Diff {
|
||||
impl From<&loro::event::Diff<'_>> for Diff {
|
||||
fn from(value: &loro::event::Diff) -> Self {
|
||||
match value {
|
||||
loro::event::Diff::List(l) => {
|
||||
|
|
|
@ -116,7 +116,7 @@ impl From<loro::undo::UndoItemMeta> for UndoItemMeta {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a UndoItemMeta> for loro::undo::UndoItemMeta {
|
||||
impl From<&UndoItemMeta> for loro::undo::UndoItemMeta {
|
||||
fn from(meta: &UndoItemMeta) -> Self {
|
||||
loro::undo::UndoItemMeta {
|
||||
value: (&meta.value).into(),
|
||||
|
|
|
@ -63,7 +63,7 @@ impl From<LoroValue> for loro::LoroValue {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a LoroValue> for loro::LoroValue {
|
||||
impl From<&LoroValue> for loro::LoroValue {
|
||||
fn from(value: &LoroValue) -> loro::LoroValue {
|
||||
match value {
|
||||
LoroValue::Null => loro::LoroValue::Null,
|
||||
|
@ -164,7 +164,7 @@ impl From<ContainerID> for loro::ContainerID {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a ContainerID> for loro::ContainerID {
|
||||
impl From<&ContainerID> for loro::ContainerID {
|
||||
fn from(value: &ContainerID) -> loro::ContainerID {
|
||||
match value {
|
||||
ContainerID::Root {
|
||||
|
@ -210,7 +210,7 @@ impl From<loro::ContainerID> for ContainerID {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a loro::ContainerID> for ContainerID {
|
||||
impl From<&loro::ContainerID> for ContainerID {
|
||||
fn from(value: &loro::ContainerID) -> ContainerID {
|
||||
match value {
|
||||
loro::ContainerID::Root {
|
||||
|
|
|
@ -63,7 +63,7 @@ pub(crate) struct ArenaGuards<'a> {
|
|||
root_c_idx: MutexGuard<'a, Vec<ContainerIdx>>,
|
||||
}
|
||||
|
||||
impl<'a> ArenaGuards<'a> {
|
||||
impl ArenaGuards<'_> {
|
||||
pub fn register_container(&mut self, id: &ContainerID) -> ContainerIdx {
|
||||
if let Some(&idx) = self.container_id_to_idx.get(id) {
|
||||
return idx;
|
||||
|
|
|
@ -105,6 +105,7 @@ impl SecureRandomGenerator for DefaultRandom {
|
|||
#[cfg(test)]
|
||||
// SAFETY: this is only used in test
|
||||
unsafe {
|
||||
#[allow(static_mut_refs)]
|
||||
let bytes = TEST_RANDOM.fetch_add(1, std::sync::atomic::Ordering::Release);
|
||||
dest.copy_from_slice(&bytes.to_le_bytes());
|
||||
}
|
||||
|
|
|
@ -118,7 +118,7 @@ impl IntoContainerId for String {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoContainerId for &'a str {
|
||||
impl IntoContainerId for &str {
|
||||
fn into_container_id(self, _arena: &SharedArena, kind: ContainerType) -> ContainerID {
|
||||
ContainerID::Root {
|
||||
name: InternalString::from(self),
|
||||
|
|
|
@ -80,7 +80,7 @@ pub enum InnerListOp {
|
|||
StyleEnd,
|
||||
}
|
||||
|
||||
impl<'a> ListOp<'a> {
|
||||
impl ListOp<'_> {
|
||||
pub fn new_del(id_start: ID, pos: usize, len: usize) -> Self {
|
||||
assert!(len != 0);
|
||||
Self::Delete(DeleteSpanWithId::new(id_start, pos as isize, len as isize))
|
||||
|
@ -433,7 +433,7 @@ impl Sliceable for DeleteSpan {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Mergable for ListOp<'a> {
|
||||
impl Mergable for ListOp<'_> {
|
||||
fn is_mergable(&self, _other: &Self, _conf: &()) -> bool
|
||||
where
|
||||
Self: Sized,
|
||||
|
@ -484,7 +484,7 @@ impl<'a> Mergable for ListOp<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> HasLength for ListOp<'a> {
|
||||
impl HasLength for ListOp<'_> {
|
||||
fn content_len(&self) -> usize {
|
||||
match self {
|
||||
ListOp::Insert { slice, .. } => slice.content_len(),
|
||||
|
@ -497,7 +497,7 @@ impl<'a> HasLength for ListOp<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Sliceable for ListOp<'a> {
|
||||
impl Sliceable for ListOp<'_> {
|
||||
fn slice(&self, from: usize, to: usize) -> Self {
|
||||
match self {
|
||||
ListOp::Insert { slice, pos } => ListOp::Insert {
|
||||
|
|
|
@ -247,7 +247,7 @@ impl StyleRangeMap {
|
|||
});
|
||||
|
||||
let (target, _) = self.tree.insert_by_path(right, Elem { len, styles });
|
||||
return &self.tree.get_elem(target.leaf).unwrap().styles;
|
||||
&self.tree.get_elem(target.leaf).unwrap().styles
|
||||
}
|
||||
|
||||
/// Return the style sets beside `index` and get the intersection of them.
|
||||
|
|
|
@ -164,7 +164,7 @@ pub(crate) struct BoolRleVecIter<'a> {
|
|||
offset: u32,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for BoolRleVecIter<'a> {
|
||||
impl Iterator for BoolRleVecIter<'_> {
|
||||
type Item = bool;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -244,35 +244,40 @@ struct OrdIdSpan<'a> {
|
|||
deps: Cow<'a, Frontiers>,
|
||||
}
|
||||
|
||||
impl<'a> HasLength for OrdIdSpan<'a> {
|
||||
impl HasLength for OrdIdSpan<'_> {
|
||||
fn content_len(&self) -> usize {
|
||||
self.len
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> HasId for OrdIdSpan<'a> {
|
||||
impl HasId for OrdIdSpan<'_> {
|
||||
fn id_start(&self) -> ID {
|
||||
self.id
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> HasLamport for OrdIdSpan<'a> {
|
||||
impl HasLamport for OrdIdSpan<'_> {
|
||||
fn lamport(&self) -> Lamport {
|
||||
self.lamport
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialOrd for OrdIdSpan<'a> {
|
||||
impl PartialOrd for OrdIdSpan<'_> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Ord for OrdIdSpan<'a> {
|
||||
impl Ord for OrdIdSpan<'_> {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.lamport_last()
|
||||
.cmp(&other.lamport_last())
|
||||
.then(self.id.peer.cmp(&other.id.peer))
|
||||
// If they have the same last id, we want the shorter one to be greater;
|
||||
// Otherwise, find_common_ancestor won't work correctly. Because we may
|
||||
// lazily load the dag node, so sometimes the longer one should be broken
|
||||
// into smaller pieces but it's already pushed to the queue.
|
||||
.then(other.len.cmp(&self.len))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -285,7 +290,7 @@ enum NodeType {
|
|||
|
||||
impl<'a> OrdIdSpan<'a> {
|
||||
#[inline]
|
||||
fn from_dag_node<D, F>(id: ID, get: &'a F) -> Option<OrdIdSpan>
|
||||
fn from_dag_node<D, F>(id: ID, get: &'a F) -> Option<OrdIdSpan<'a>>
|
||||
where
|
||||
D: DagNode + 'a,
|
||||
F: Fn(ID) -> Option<D>,
|
||||
|
@ -577,7 +582,11 @@ where
|
|||
);
|
||||
while let Some((other_node, other_type)) = queue.peek() {
|
||||
trace!("find_common_ancestor_new queue peek {:?}", other_node);
|
||||
if node == *other_node {
|
||||
if node == *other_node
|
||||
|| (node.len() == 1
|
||||
&& other_node.len() == 1
|
||||
&& node[0].id_last() == other_node[0].id_last())
|
||||
{
|
||||
if node_type != *other_type {
|
||||
node_type = NodeType::Shared;
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ pub struct DagIterator<'a, T> {
|
|||
}
|
||||
|
||||
/// Should only use it on debug, because it's slow and likely to use lots of mem
|
||||
impl<'a, T: DagNode> Iterator for DagIterator<'a, T> {
|
||||
impl<T: DagNode> Iterator for DagIterator<'_, T> {
|
||||
type Item = T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
@ -103,7 +103,7 @@ pub(crate) struct DagIteratorVV<'a, T> {
|
|||
}
|
||||
|
||||
/// Should only use it on debug, because it's slow and likely to use lots of mem
|
||||
impl<'a, T: DagNode> Iterator for DagIteratorVV<'a, T> {
|
||||
impl<T: DagNode> Iterator for DagIteratorVV<'_, T> {
|
||||
type Item = (T, VersionVector);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
@ -258,7 +258,7 @@ impl<'a, T: DagNode, D: Dag<Node = T> + Debug> DagCausalIter<'a, D> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a, T: DagNode, D: Dag<Node = T>> Iterator for DagCausalIter<'a, D> {
|
||||
impl<T: DagNode, D: Dag<Node = T>> Iterator for DagCausalIter<'_, D> {
|
||||
type Item = IterReturn<T>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
|
|
@ -66,7 +66,7 @@ pub struct EncodedRegisters<'a> {
|
|||
pub(super) position: either::Either<FxHashSet<&'a [u8]>, ValueRegister<&'a [u8]>>,
|
||||
}
|
||||
|
||||
impl<'a> ValueEncodeRegister for EncodedRegisters<'a> {
|
||||
impl ValueEncodeRegister for EncodedRegisters<'_> {
|
||||
fn key_mut(&mut self) -> &mut ValueRegister<InternalString> {
|
||||
&mut self.key
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ impl<'a> ValueEncodeRegister for EncodedRegisters<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> EncodedRegisters<'a> {
|
||||
impl EncodedRegisters<'_> {
|
||||
pub(crate) fn sort_fractional_index(&mut self) {
|
||||
let position_register =
|
||||
std::mem::replace(&mut self.position, either::Left(Default::default()))
|
||||
|
@ -105,7 +105,7 @@ pub struct DecodedArenas<'a> {
|
|||
pub state_blob_arena: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> ValueDecodedArenasTrait for DecodedArenas<'a> {
|
||||
impl ValueDecodedArenasTrait for DecodedArenas<'_> {
|
||||
fn keys(&self) -> &[InternalString] {
|
||||
&self.keys.keys
|
||||
}
|
||||
|
|
|
@ -1002,7 +1002,7 @@ mod encode {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> generic_btree::rle::HasLength for TempOp<'a> {
|
||||
impl generic_btree::rle::HasLength for TempOp<'_> {
|
||||
#[inline(always)]
|
||||
fn rle_len(&self) -> usize {
|
||||
self.op.atom_len()
|
||||
|
|
|
@ -127,7 +127,7 @@ pub(crate) struct InternalDocDiff<'a> {
|
|||
pub(crate) new_version: Cow<'a, Frontiers>,
|
||||
}
|
||||
|
||||
impl<'a> InternalDocDiff<'a> {
|
||||
impl InternalDocDiff<'_> {
|
||||
pub fn into_owned(self) -> InternalDocDiff<'static> {
|
||||
InternalDocDiff {
|
||||
origin: self.origin,
|
||||
|
|
|
@ -1705,7 +1705,7 @@ pub struct CommitWhenDrop<'a> {
|
|||
options: CommitOptions,
|
||||
}
|
||||
|
||||
impl<'a> Drop for CommitWhenDrop<'a> {
|
||||
impl Drop for CommitWhenDrop<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.doc.commit_with(std::mem::take(&mut self.options));
|
||||
}
|
||||
|
|
|
@ -145,7 +145,7 @@ impl Op {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> RemoteOp<'a> {
|
||||
impl RemoteOp<'_> {
|
||||
#[allow(unused)]
|
||||
pub(crate) fn into_static(self) -> RemoteOp<'static> {
|
||||
RemoteOp {
|
||||
|
@ -187,7 +187,7 @@ impl Sliceable for Op {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Mergable for RemoteOp<'a> {
|
||||
impl Mergable for RemoteOp<'_> {
|
||||
fn is_mergable(&self, _other: &Self, _cfg: &()) -> bool {
|
||||
// don't merge remote op, because it's already merged.
|
||||
false
|
||||
|
@ -198,7 +198,7 @@ impl<'a> Mergable for RemoteOp<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> HasLength for RemoteOp<'a> {
|
||||
impl HasLength for RemoteOp<'_> {
|
||||
fn content_len(&self) -> usize {
|
||||
self.content.atom_len()
|
||||
}
|
||||
|
@ -212,7 +212,7 @@ impl HasIndex for Op {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> HasIndex for RemoteOp<'a> {
|
||||
impl HasIndex for RemoteOp<'_> {
|
||||
type Int = Counter;
|
||||
|
||||
fn get_start_index(&self) -> Self::Int {
|
||||
|
@ -226,13 +226,13 @@ impl HasCounter for Op {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> HasCounter for RemoteOp<'a> {
|
||||
impl HasCounter for RemoteOp<'_> {
|
||||
fn ctr_start(&self) -> Counter {
|
||||
self.counter
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> HasId for RichOp<'a> {
|
||||
impl HasId for RichOp<'_> {
|
||||
fn id_start(&self) -> ID {
|
||||
ID {
|
||||
peer: self.peer,
|
||||
|
@ -241,13 +241,13 @@ impl<'a> HasId for RichOp<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> HasLength for RichOp<'a> {
|
||||
impl HasLength for RichOp<'_> {
|
||||
fn content_len(&self) -> usize {
|
||||
self.end - self.start
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> HasLamport for RichOp<'a> {
|
||||
impl HasLamport for RichOp<'_> {
|
||||
fn lamport(&self) -> Lamport {
|
||||
self.lamport + self.start as Lamport
|
||||
}
|
||||
|
@ -487,7 +487,7 @@ impl Mergable for SliceRange {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> ListSlice<'a> {
|
||||
impl ListSlice<'_> {
|
||||
#[inline(always)]
|
||||
pub fn unknown_range(len: usize) -> SliceRange {
|
||||
let start = UNKNOWN_START;
|
||||
|
@ -511,7 +511,7 @@ impl<'a> ListSlice<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> HasLength for ListSlice<'a> {
|
||||
impl HasLength for ListSlice<'_> {
|
||||
fn content_len(&self) -> usize {
|
||||
match self {
|
||||
ListSlice::RawStr { unicode_len, .. } => *unicode_len,
|
||||
|
@ -520,7 +520,7 @@ impl<'a> HasLength for ListSlice<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Sliceable for ListSlice<'a> {
|
||||
impl Sliceable for ListSlice<'_> {
|
||||
fn slice(&self, from: usize, to: usize) -> Self {
|
||||
match self {
|
||||
ListSlice::RawStr {
|
||||
|
@ -541,7 +541,7 @@ impl<'a> Sliceable for ListSlice<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Mergable for ListSlice<'a> {
|
||||
impl Mergable for ListSlice<'_> {
|
||||
fn is_mergable(&self, _other: &Self, _: &()) -> bool {
|
||||
false
|
||||
}
|
||||
|
|
|
@ -111,7 +111,7 @@ pub enum RawOpContent<'a> {
|
|||
},
|
||||
}
|
||||
|
||||
impl<'a> Clone for RawOpContent<'a> {
|
||||
impl Clone for RawOpContent<'_> {
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
Self::Map(arg0) => Self::Map(arg0.clone()),
|
||||
|
@ -127,7 +127,7 @@ impl<'a> Clone for RawOpContent<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> RawOpContent<'a> {
|
||||
impl RawOpContent<'_> {
|
||||
pub fn to_static(&self) -> RawOpContent<'static> {
|
||||
match self {
|
||||
Self::Map(arg0) => RawOpContent::Map(arg0.clone()),
|
||||
|
@ -176,7 +176,7 @@ impl<'a> RawOpContent<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> HasLength for RawOpContent<'a> {
|
||||
impl HasLength for RawOpContent<'_> {
|
||||
fn content_len(&self) -> usize {
|
||||
match self {
|
||||
RawOpContent::Map(x) => x.content_len(),
|
||||
|
@ -189,7 +189,7 @@ impl<'a> HasLength for RawOpContent<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> Mergable for RawOpContent<'a> {
|
||||
impl Mergable for RawOpContent<'_> {
|
||||
fn is_mergable(&self, other: &Self, _conf: &()) -> bool
|
||||
where
|
||||
Self: Sized,
|
||||
|
|
|
@ -380,6 +380,7 @@ impl OpLog {
|
|||
/// You can trim it by the provided counter value. It should start with the counter.
|
||||
///
|
||||
/// If frontiers are provided, it will be faster (because we don't need to calculate it from version vector
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub(crate) fn iter_from_lca_causally(
|
||||
&self,
|
||||
from: &VersionVector,
|
||||
|
|
|
@ -442,7 +442,7 @@ struct ValueDecodeArena<'a> {
|
|||
keys: &'a [InternalString],
|
||||
}
|
||||
|
||||
impl<'a> ValueDecodedArenasTrait for ValueDecodeArena<'a> {
|
||||
impl ValueDecodedArenasTrait for ValueDecodeArena<'_> {
|
||||
fn keys(&self) -> &[InternalString] {
|
||||
self.keys
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ impl fmt::Debug for Frontiers {
|
|||
|
||||
struct FrontiersDebugHelper<'a>(&'a Frontiers);
|
||||
|
||||
impl<'a> fmt::Debug for FrontiersDebugHelper<'a> {
|
||||
impl fmt::Debug for FrontiersDebugHelper<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let mut list = f.debug_list();
|
||||
match self.0 {
|
||||
|
|
|
@ -2236,7 +2236,7 @@ fn is_deleted() {
|
|||
let container_before = map.insert_container("container", LoroMap::new()).unwrap();
|
||||
container_before.insert("A", "B").unwrap();
|
||||
tree.delete(node).unwrap();
|
||||
let container_after = doc.get_map(&container_before.id());
|
||||
let container_after = doc.get_map(container_before.id());
|
||||
assert!(container_after.is_deleted());
|
||||
}
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ pub struct SliceIterator<'a, T> {
|
|||
end_offset: Option<usize>,
|
||||
}
|
||||
|
||||
impl<'a, T> SliceIterator<'a, T> {
|
||||
impl<T> SliceIterator<'_, T> {
|
||||
fn new_empty() -> Self {
|
||||
Self {
|
||||
vec: &[],
|
||||
|
|
|
@ -68,7 +68,7 @@ impl<T: HasLength + Sliceable, A: Array<Item = T>> Sliceable for SmallVec<A> {
|
|||
|
||||
for item in self.iter() {
|
||||
if index < to && from < index + item.atom_len() {
|
||||
let start = if index < from { from - index } else { 0 };
|
||||
let start = from.saturating_sub(index);
|
||||
ans.push(item.slice(start, item.atom_len().min(to - index)));
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue