Fix warnings (#484)

* fix: warnings

* fix: warnings
This commit is contained in:
Zixuan Chen 2024-09-29 21:15:19 +08:00 committed by GitHub
parent df0e061dbf
commit 5688a017d6
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
63 changed files with 368 additions and 894 deletions

View file

@ -46,6 +46,7 @@
"unexist",
"unmark",
"unmergeable",
"Unsubscriber",
"yspan"
],
"rust-analyzer.runnableEnv": {

View file

@ -1,4 +1,4 @@
use std::time::Instant;
#![allow(deprecated)]
use bench_utils::SyncKind;
use examples::{draw::DrawActor, run_async_workflow, run_realtime_collab_workflow};
@ -6,6 +6,7 @@ use flate2::write::GzEncoder;
use flate2::Compression;
use loro::{LoroDoc, ToJson};
use std::io::prelude::*;
use std::time::Instant;
use tabled::{settings::Style, Table, Tabled};
#[derive(Tabled)]

View file

@ -8,7 +8,7 @@ pub fn main() {
println!("init_duration {}", init_duration);
let start = Instant::now();
let snapshot = doc.export_snapshot();
let snapshot = doc.export(loro::ExportMode::Snapshot);
let duration = start.elapsed().as_secs_f64() * 1000.;
println!("export duration {} size={}", duration, snapshot.len());
}

View file

@ -65,7 +65,7 @@ pub fn main() {
println!("Time cost {:?}", start.elapsed());
let start = Instant::now();
let updates = doc.export_from(&Default::default());
let updates = doc.export(loro::ExportMode::all_updates());
println!("Export updates time cost {:?}", start.elapsed());
let start = Instant::now();
let doc2 = LoroDoc::new();

View file

@ -60,8 +60,12 @@ pub fn main() {
for i in 0..1000 {
random_insert(&mut list_a, 100, i);
random_insert(&mut list_b, 100, i);
doc_a.import(&doc_b.export_from(&doc_a.oplog_vv())).unwrap();
doc_b.import(&doc_a.export_from(&doc_b.oplog_vv())).unwrap();
doc_a
.import(&doc_b.export(loro::ExportMode::updates(&doc_a.oplog_vv())))
.unwrap();
doc_b
.import(&doc_a.export(loro::ExportMode::updates(&doc_b.oplog_vv())))
.unwrap();
}
doc_a
@ -106,8 +110,12 @@ pub fn main() {
for i in 0..1000 {
random_insert(&mut list_a, 100, i);
random_insert(&mut list_b, 100, i);
doc_a.import(&doc_b.export_from(&doc_a.oplog_vv())).unwrap();
doc_b.import(&doc_a.export_from(&doc_b.oplog_vv())).unwrap();
doc_a
.import(&doc_b.export(loro::ExportMode::updates(&doc_a.oplog_vv())))
.unwrap();
doc_b
.import(&doc_a.export(loro::ExportMode::updates(&doc_b.oplog_vv())))
.unwrap();
}
doc_a
@ -119,13 +127,21 @@ pub fn main() {
let mut list_b = doc_b.get_movable_list("list");
random_insert(&mut list_a, 1000, 0);
random_insert(&mut list_b, 1000, 0);
doc_a.import(&doc_b.export_from(&doc_a.oplog_vv())).unwrap();
doc_b.import(&doc_a.export_from(&doc_b.oplog_vv())).unwrap();
doc_a
.import(&doc_b.export(loro::ExportMode::updates(&doc_a.oplog_vv())))
.unwrap();
doc_b
.import(&doc_a.export(loro::ExportMode::updates(&doc_b.oplog_vv())))
.unwrap();
for i in 0..1000 {
random_set(&mut list_a, 100, i);
random_set(&mut list_b, 100, i);
doc_a.import(&doc_b.export_from(&doc_a.oplog_vv())).unwrap();
doc_b.import(&doc_a.export_from(&doc_b.oplog_vv())).unwrap();
doc_a
.import(&doc_b.export(loro::ExportMode::updates(&doc_a.oplog_vv())))
.unwrap();
doc_b
.import(&doc_a.export(loro::ExportMode::updates(&doc_b.oplog_vv())))
.unwrap();
}
doc_a
@ -137,13 +153,21 @@ pub fn main() {
let mut list_b = doc_b.get_movable_list("list");
random_insert(&mut list_a, 1000, 0);
random_insert(&mut list_b, 1000, 0);
doc_a.import(&doc_b.export_from(&doc_a.oplog_vv())).unwrap();
doc_b.import(&doc_a.export_from(&doc_b.oplog_vv())).unwrap();
doc_a
.import(&doc_b.export(loro::ExportMode::updates(&doc_a.oplog_vv())))
.unwrap();
doc_b
.import(&doc_a.export(loro::ExportMode::updates(&doc_b.oplog_vv())))
.unwrap();
for i in 0..1000 {
random_move(&mut list_a, 100, i);
random_move(&mut list_b, 100, i);
doc_a.import(&doc_b.export_from(&doc_a.oplog_vv())).unwrap();
doc_b.import(&doc_a.export_from(&doc_b.oplog_vv())).unwrap();
doc_a
.import(&doc_b.export(loro::ExportMode::updates(&doc_a.oplog_vv())))
.unwrap();
doc_b
.import(&doc_a.export(loro::ExportMode::updates(&doc_b.oplog_vv())))
.unwrap();
}
doc_a
@ -162,11 +186,11 @@ fn run(name: &'static str, apply_task: impl FnOnce() -> LoroDoc) -> BenchResult
let apply_duration = start.elapsed().as_secs_f64() * 1000.;
let start = Instant::now();
let snapshot = doc.export_snapshot();
let snapshot = doc.export(loro::ExportMode::Snapshot);
let encode_snapshot_duration = start.elapsed().as_secs_f64() * 1000.;
let start = Instant::now();
let updates = doc.export_from(&Default::default());
let updates = doc.export(loro::ExportMode::all_updates());
let encode_update_duration = start.elapsed().as_secs_f64() * 1000.;
let start = Instant::now();

View file

@ -42,9 +42,9 @@ pub fn main() {
println!(
"Updates size: {}",
ByteSize(doc.export_from(&Default::default()).len())
ByteSize(doc.export(loro::ExportMode::Snapshot).len())
);
let snapshot = doc.export_snapshot();
let snapshot = doc.export(loro::ExportMode::Snapshot);
println!("Snapshot size: {}", ByteSize(snapshot.len()));
doc.with_oplog(|oplog| {
println!(
@ -72,7 +72,8 @@ pub fn main() {
}
}
doc.import(&new_doc.export_from(&doc.oplog_vv())).unwrap();
doc.import(&new_doc.export(loro::ExportMode::updates(&doc.oplog_vv())))
.unwrap();
}
println!("Time taken to move {} nodes: {:?}", n * k, start.elapsed());
@ -99,9 +100,9 @@ pub fn main() {
println!(
"Updates size: {}",
ByteSize(doc.export_from(&Default::default()).len())
ByteSize(doc.export(loro::ExportMode::all_updates()).len())
);
let snapshot = doc.export_snapshot();
let snapshot = doc.export(loro::ExportMode::Snapshot);
println!("Snapshot size: {}", ByteSize(snapshot.len()));
doc.compact_change_store();
doc.with_oplog(|oplog| {

View file

@ -3,7 +3,7 @@ use loro::{CommitOptions, LoroCounter, LoroDoc, LoroMap};
#[derive(Debug)]
struct NewProject {
m: LoroMap,
// m: LoroMap,
c: LoroCounter,
}
@ -27,7 +27,7 @@ pub fn main() {
.insert_container("used_time", LoroCounter::new())
.unwrap();
all_projects.push(NewProject {
m: project,
// m: project,
c: counter,
});
}

View file

@ -1,3 +1,4 @@
#![allow(deprecated)]
use bench_utils::{
create_seed, gen_async_actions, gen_realtime_actions, make_actions_async, Action, ActionTrait,
};

View file

@ -14,7 +14,7 @@ use loro::{
};
use pretty_assertions::assert_eq;
use rand::{rngs::StdRng, Rng, SeedableRng};
use tracing::{info_span, trace};
use tracing::info_span;
use crate::{
container::{CounterActor, ListActor, MovableListActor, TextActor, TreeActor},
@ -285,78 +285,108 @@ impl Actor {
pub fn register(&mut self, target: ContainerType) {
match target {
ContainerType::Map => {
self.tracker.try_lock().unwrap().as_map_mut().unwrap().insert(
"map".to_string(),
Value::empty_container(
ContainerType::Map,
ContainerID::new_root("map", ContainerType::Map),
),
);
self.tracker
.try_lock()
.unwrap()
.as_map_mut()
.unwrap()
.insert(
"map".to_string(),
Value::empty_container(
ContainerType::Map,
ContainerID::new_root("map", ContainerType::Map),
),
);
self.targets.insert(
target,
ActionExecutor::MapActor(MapActor::new(self.loro.clone())),
);
}
ContainerType::List => {
self.tracker.try_lock().unwrap().as_map_mut().unwrap().insert(
"list".to_string(),
Value::empty_container(
ContainerType::List,
ContainerID::new_root("list", ContainerType::List),
),
);
self.tracker
.try_lock()
.unwrap()
.as_map_mut()
.unwrap()
.insert(
"list".to_string(),
Value::empty_container(
ContainerType::List,
ContainerID::new_root("list", ContainerType::List),
),
);
self.targets.insert(
target,
ActionExecutor::ListActor(ListActor::new(self.loro.clone())),
);
}
ContainerType::MovableList => {
self.tracker.try_lock().unwrap().as_map_mut().unwrap().insert(
"movable_list".to_string(),
Value::empty_container(
ContainerType::MovableList,
ContainerID::new_root("movable_list", ContainerType::MovableList),
),
);
self.tracker
.try_lock()
.unwrap()
.as_map_mut()
.unwrap()
.insert(
"movable_list".to_string(),
Value::empty_container(
ContainerType::MovableList,
ContainerID::new_root("movable_list", ContainerType::MovableList),
),
);
self.targets.insert(
target,
ActionExecutor::MovableListActor(MovableListActor::new(self.loro.clone())),
);
}
ContainerType::Text => {
self.tracker.try_lock().unwrap().as_map_mut().unwrap().insert(
"text".to_string(),
Value::empty_container(
ContainerType::Text,
ContainerID::new_root("text", ContainerType::Text),
),
);
self.tracker
.try_lock()
.unwrap()
.as_map_mut()
.unwrap()
.insert(
"text".to_string(),
Value::empty_container(
ContainerType::Text,
ContainerID::new_root("text", ContainerType::Text),
),
);
self.targets.insert(
target,
ActionExecutor::TextActor(TextActor::new(self.loro.clone())),
);
}
ContainerType::Tree => {
self.tracker.try_lock().unwrap().as_map_mut().unwrap().insert(
"tree".to_string(),
Value::empty_container(
ContainerType::Tree,
ContainerID::new_root("tree", ContainerType::Tree),
),
);
self.tracker
.try_lock()
.unwrap()
.as_map_mut()
.unwrap()
.insert(
"tree".to_string(),
Value::empty_container(
ContainerType::Tree,
ContainerID::new_root("tree", ContainerType::Tree),
),
);
self.targets.insert(
target,
ActionExecutor::TreeActor(TreeActor::new(self.loro.clone())),
);
}
ContainerType::Counter => {
self.tracker.try_lock().unwrap().as_map_mut().unwrap().insert(
"counter".to_string(),
Value::empty_container(
ContainerType::Counter,
ContainerID::new_root("counter", ContainerType::Counter),
),
);
self.tracker
.try_lock()
.unwrap()
.as_map_mut()
.unwrap()
.insert(
"counter".to_string(),
Value::empty_container(
ContainerType::Counter,
ContainerID::new_root("counter", ContainerType::Counter),
),
);
self.targets.insert(
target,
ActionExecutor::CounterActor(CounterActor::new(self.loro.clone())),

View file

@ -5,7 +5,6 @@ use std::{
};
use fxhash::FxHashMap;
use itertools::Itertools;
use loro::{
event::Diff, Container, ContainerID, ContainerType, LoroDoc, LoroError, LoroTree, LoroValue,
TreeExternalDiff, TreeID,

View file

@ -10,7 +10,7 @@ use arbitrary::Arbitrary;
use fxhash::FxHashSet;
use loro::{ContainerType, Frontiers, LoroError, LoroResult};
use tabled::TableIteratorExt;
use tracing::{debug, info, info_span, trace};
use tracing::{info, info_span, trace};
use crate::{actions::ActionWrapper, array_mut_ref};

View file

@ -1,3 +1,4 @@
#![allow(deprecated)]
pub mod actions;
pub mod actor;
pub mod container;

View file

@ -39,12 +39,12 @@ impl OneDocFuzzer {
}
}
Action::SyncAll => {}
Action::Checkout { site, to } => {}
Action::Checkout { .. } => {}
Action::Handle {
site,
target,
container,
action,
..
} => {
if matches!(action, ActionWrapper::Action(_)) {
return;
@ -63,9 +63,9 @@ impl OneDocFuzzer {
self.doc.checkout(&branch.frontiers).unwrap();
if let Some(action) = action.as_action_mut() {
match action {
crate::actions::ActionInner::Map(map_action) => {}
crate::actions::ActionInner::Map(..) => {}
crate::actions::ActionInner::List(list_action) => match list_action {
crate::container::list::ListAction::Insert { pos, value } => {
crate::container::list::ListAction::Insert { pos, .. } => {
let len = self.doc.get_list("list").len();
*pos %= (len as u8).saturating_add(1);
}
@ -87,7 +87,7 @@ impl OneDocFuzzer {
},
crate::actions::ActionInner::MovableList(movable_list_action) => {
match movable_list_action {
crate::actions::MovableListAction::Insert { pos, value } => {
crate::actions::MovableListAction::Insert { pos, .. } => {
let len = self.doc.get_movable_list("movable_list").len();
*pos %= (len as u8).saturating_add(1);
}
@ -236,19 +236,14 @@ impl OneDocFuzzer {
}
}
}
Action::Undo { site, op_len } => {}
Action::SyncAllUndo { site, op_len } => {}
Action::Undo { .. } => {}
Action::SyncAllUndo { .. } => {}
}
}
fn apply_action(&mut self, action: &mut Action) {
match action {
Action::Handle {
site,
target,
container,
action,
} => {
Action::Handle { site, action, .. } => {
let doc = &mut self.doc;
let branch = &mut self.branches[*site as usize];
doc.checkout(&branch.frontiers).unwrap();
@ -302,8 +297,7 @@ impl OneDocFuzzer {
.unwrap();
}
crate::container::TextActionInner::Delete => {
text.delete(text_action.pos as usize, text_action.len)
.unwrap();
text.delete(text_action.pos, text_action.len).unwrap();
}
crate::container::TextActionInner::Mark(_) => {}
}

View file

@ -1,3 +1,4 @@
#![allow(deprecated)]
use std::sync::Arc;
use loro::{ToJson as _, ID};

View file

@ -1,3 +1,4 @@
#![allow(deprecated)]
use fuzz::{
actions::{ActionWrapper::*, GenericAction},
crdt_fuzzer::{test_multi_sites, Action::*, FuzzTarget, FuzzValue::*},

View file

@ -1,4 +1,4 @@
use std::sync::Arc;
#![allow(deprecated)]
use arbitrary::Unstructured;
use fuzz::{
@ -17,6 +17,7 @@ use fuzz::{
test_multi_sites_on_one_doc, test_multi_sites_with_gc,
};
use loro::{ContainerType::*, LoroCounter, LoroDoc};
use std::sync::Arc;
#[ctor::ctor]
fn init() {

View file

@ -291,22 +291,19 @@ where
}
let ans = match (&self.current_mem, &self.current_sstable) {
(Some((mem_key, _)), Some((iter_key, _))) => match mem_key.cmp(iter_key) {
Ordering::Less => self.current_mem.take().map(|kv| {
Ordering::Less => self.current_mem.take().inspect(|_kv| {
self.current_mem = self.mem.next();
kv
}),
Ordering::Equal => {
self.current_sstable.take();
self.current_mem.take().map(|kv| {
self.current_mem.take().inspect(|_kv| {
self.current_mem = self.mem.next();
kv
})
}
Ordering::Greater => self.current_sstable.take(),
},
(Some(_), None) => self.current_mem.take().map(|kv| {
(Some(_), None) => self.current_mem.take().inspect(|_kv| {
self.current_mem = self.mem.next();
kv
}),
(None, Some(_)) => self.current_sstable.take(),
(None, None) => None,
@ -341,22 +338,19 @@ where
let ans = match (&self.back_mem, &self.back_sstable) {
(Some((mem_key, _)), Some((iter_key, _))) => match mem_key.cmp(iter_key) {
Ordering::Greater => self.back_mem.take().map(|kv| {
Ordering::Greater => self.back_mem.take().inspect(|_kv| {
self.back_mem = self.mem.next_back();
kv
}),
Ordering::Equal => {
self.back_sstable.take();
self.back_mem.take().map(|kv| {
self.back_mem.take().inspect(|_kv| {
self.back_mem = self.mem.next_back();
kv
})
}
Ordering::Less => self.back_sstable.take(),
},
(Some(_), None) => self.back_mem.take().map(|kv| {
(Some(_), None) => self.back_mem.take().inspect(|_kv| {
self.back_mem = self.mem.next_back();
kv
}),
(None, Some(_)) => self.back_sstable.take(),
(None, None) => None,

View file

@ -254,7 +254,7 @@ impl LoroDoc {
/// Import updates/snapshot exported by [`LoroDoc::export_snapshot`] or [`LoroDoc::export_from`].
#[inline]
pub fn import(&self, bytes: &[u8]) -> Result<(), LoroError> {
self.doc.import_with(bytes, "".into())
self.doc.import_with(bytes, "")
}
/// Import updates/snapshot exported by [`LoroDoc::export_snapshot`] or [`LoroDoc::export_from`].
@ -263,7 +263,7 @@ impl LoroDoc {
/// in the generated events.
#[inline]
pub fn import_with(&self, bytes: &[u8], origin: &str) -> Result<(), LoroError> {
self.doc.import_with(bytes, origin.into())
self.doc.import_with(bytes, origin)
}
pub fn import_json_updates(&self, json: &str) -> Result<(), LoroError> {
@ -279,8 +279,14 @@ impl LoroDoc {
serde_json::to_string(&json).unwrap()
}
// TODO: add export method
/// Export all the ops not included in the given `VersionVector`
#[inline]
#[allow(deprecated)]
#[deprecated(
since = "1.0.0",
note = "Use `export` with `ExportMode::Updates` instead"
)]
pub fn export_from(&self, vv: &VersionVector) -> Vec<u8> {
self.doc.export_from(&vv.into())
}
@ -288,6 +294,7 @@ impl LoroDoc {
/// Export the current state and history of the document.
#[inline]
pub fn export_snapshot(&self) -> Vec<u8> {
#[allow(deprecated)]
self.doc.export_snapshot()
}
@ -636,11 +643,11 @@ impl From<CommitOptions> for loro::CommitOptions {
}
pub trait JsonSchemaLike {
fn into_json_schema(&self) -> LoroResult<JsonSchema>;
fn to_json_schema(&self) -> LoroResult<JsonSchema>;
}
impl<T: TryInto<JsonSchema> + Clone> JsonSchemaLike for T {
fn into_json_schema(&self) -> LoroResult<JsonSchema> {
fn to_json_schema(&self) -> LoroResult<JsonSchema> {
self.clone()
.try_into()
.map_err(|_| LoroError::InvalidJsonSchema)

View file

@ -4,6 +4,12 @@ use loro::{CounterSpan, IdSpan, LoroResult, PeerID, ID};
pub struct VersionVector(RwLock<loro::VersionVector>);
impl Default for VersionVector {
fn default() -> Self {
Self::new()
}
}
impl VersionVector {
pub fn new() -> Self {
Self(RwLock::new(loro::VersionVector::default()))
@ -59,10 +65,6 @@ impl VersionVector {
self.0.read().unwrap().partial_cmp(&other.0.read().unwrap())
}
pub fn eq(&self, other: &VersionVector) -> bool {
self.0.read().unwrap().eq(&other.0.read().unwrap())
}
pub fn encode(&self) -> Vec<u8> {
self.0.read().unwrap().encode()
}
@ -73,6 +75,14 @@ impl VersionVector {
}
}
impl PartialEq for VersionVector {
fn eq(&self, other: &Self) -> bool {
self.0.read().unwrap().eq(&other.0.read().unwrap())
}
}
impl Eq for VersionVector {}
#[derive(Debug)]
pub struct Frontiers(loro::Frontiers);
@ -81,10 +91,6 @@ impl Frontiers {
Self(loro::Frontiers::default())
}
pub fn eq(&self, other: &Frontiers) -> bool {
self.0.eq(&other.0)
}
pub fn from_id(id: ID) -> Self {
Self(loro::Frontiers::from(id))
}
@ -103,6 +109,20 @@ impl Frontiers {
}
}
impl PartialEq for Frontiers {
fn eq(&self, other: &Self) -> bool {
self.0.eq(&other.0)
}
}
impl Eq for Frontiers {}
impl Default for Frontiers {
fn default() -> Self {
Self::new()
}
}
pub struct VersionVectorDiff {
/// need to add these spans to move from right to left
pub left: HashMap<PeerID, CounterSpan>,

View file

@ -1,3 +1,4 @@
#![allow(dead_code)]
use crate::{
dag::{Dag, DagNode},
id::ID,
@ -14,7 +15,7 @@ struct SortBase {
impl PartialOrd for SortBase {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.lamport.partial_cmp(&other.lamport)
Some(self.cmp(other))
}
}

View file

@ -3,7 +3,5 @@ pub(crate) use bfs::calc_critical_version_bfs as calc_critical_version;
// Only for testing
mod dfs;
mod view;
pub(crate) use dfs::calc_critical_version_dfs;
pub(crate) use dfs::get_end_list;
pub(crate) use view::allocation_mermaid;

View file

@ -1,90 +0,0 @@
use crate::{
dag::{Dag, DagNode},
id::ID,
};
use fxhash::{FxHashMap, FxHashSet};
pub fn get_all_points<T: DagNode, D: Dag<Node = T>>(
graph: &D,
points: &mut FxHashSet<ID>,
current: &ID,
) {
points.insert(*current);
for to_id in graph.get(*current).unwrap().deps() {
get_all_points(graph, points, to_id);
}
}
pub(crate) fn allocation_mermaid<T: DagNode, D: Dag<Node = T>>(
graph: &D,
start_id_list: &[ID],
end_id_list: &[ID],
) -> String {
let mut s = String::new();
s.push_str("graph TD\n");
let mut counter: u32 = 2;
let mut points: FxHashSet<ID> = FxHashSet::default();
for start in start_id_list {
get_all_points(graph, &mut points, &start);
}
let mut counter_map: FxHashMap<ID, u32> = FxHashMap::default();
for x in points {
counter_map.insert(x, counter);
counter += 1;
}
for start in start_id_list {
s.push_str(&format!(
"\t1(virtual_start) --> {}(id:{} lamport:{})\n",
counter_map.get(&start).unwrap(),
start,
graph.get(*start).unwrap().lamport()
));
}
for end in end_id_list {
s.push_str(&format!(
"\t{}(id:{} lamport:{}) --> 114514(virtual_end)\n",
counter_map.get(&end).unwrap(),
end,
graph.get(*end).unwrap().lamport()
));
counter += 1;
}
let mut edge: FxHashSet<(ID, ID)> = FxHashSet::default();
for start in start_id_list {
dfs(
graph,
ID {
peer: 0,
counter: -1,
},
*start,
&mut edge,
);
}
for (from, to) in edge {
s.push_str(&format!(
"\t{}(id:{} lamport:{}) --> {}(id:{} lamport:{})\n",
counter_map.get(&from).unwrap(),
from,
graph.get(from).unwrap().lamport(),
counter_map.get(&to).unwrap(),
to,
graph.get(to).unwrap().lamport()
));
}
s
}
pub(crate) fn dfs<T: DagNode, D: Dag<Node = T>>(
graph: &D,
from: ID,
id: ID,
edge: &mut FxHashSet<(ID, ID)>,
) {
if from.counter != -1 {
edge.insert((from, id));
}
for dep in graph.get(id).unwrap().deps() {
dfs(graph, id, *dep, edge);
}
}

View file

@ -9,7 +9,6 @@ use std::{
use append_only_bytes::BytesSlice;
use fxhash::FxHashMap;
use loro_common::PeerID;
use tracing::trace;
use crate::{
change::Lamport,
@ -96,7 +95,11 @@ impl SharedArena {
if id.is_root() {
self.inner.root_c_idx.try_lock().unwrap().push(idx);
self.inner.parents.try_lock().unwrap().insert(idx, None);
self.inner.depth.try_lock().unwrap().push(NonZeroU16::new(1));
self.inner
.depth
.try_lock()
.unwrap()
.push(NonZeroU16::new(1));
} else {
self.inner.depth.try_lock().unwrap().push(None);
}
@ -266,7 +269,12 @@ impl SharedArena {
#[inline]
pub fn with_text_slice(&self, range: Range<usize>, mut f: impl FnMut(&str)) {
f(self.inner.str.try_lock().unwrap().slice_str_by_unicode(range))
f(self
.inner
.str
.try_lock()
.unwrap()
.slice_str_by_unicode(range))
}
#[inline]
@ -292,7 +300,8 @@ impl SharedArena {
}
pub fn can_import_snapshot(&self) -> bool {
self.inner.str.try_lock().unwrap().is_empty() && self.inner.values.try_lock().unwrap().is_empty()
self.inner.str.try_lock().unwrap().is_empty()
&& self.inner.values.try_lock().unwrap().is_empty()
}
fn inner_convert_op(
@ -475,6 +484,7 @@ impl SharedArena {
None
}
#[allow(unused)]
pub(crate) fn log_all_values(&self) {
let values = self.inner.values.try_lock().unwrap();
for (i, v) in values.iter().enumerate() {

View file

@ -26,12 +26,9 @@ use crate::{
utils::query_by_len::{EntityIndexQueryWithEventIndex, IndexQueryWithEntityIndex, QueryByLen},
};
use self::{
cursor_cache::CursorCache,
query::{
EntityQuery, EntityQueryT, EventIndexQuery, EventIndexQueryT, UnicodeQuery, UnicodeQueryT,
Utf16Query, Utf16QueryT,
},
use self::query::{
EntityQuery, EntityQueryT, EventIndexQuery, EventIndexQueryT, UnicodeQuery, UnicodeQueryT,
Utf16Query, Utf16QueryT,
};
use super::{
@ -729,22 +726,6 @@ pub(crate) fn utf8_to_unicode_index(s: &str, utf8_index: usize) -> Result<usize,
}
}
fn pos_to_unicode_index(s: &str, pos: usize, kind: PosType) -> Option<usize> {
match kind {
PosType::Bytes => utf8_to_unicode_index(s, pos).ok(),
PosType::Unicode => Some(pos),
PosType::Utf16 => utf16_to_unicode_index(s, pos).ok(),
PosType::Entity => Some(pos),
PosType::Event => {
if cfg!(feature = "wasm") {
utf16_to_unicode_index(s, pos).ok()
} else {
Some(pos)
}
}
}
}
#[derive(Clone, Debug, Copy, PartialEq, Eq, Default)]
pub(crate) struct PosCache {
pub(super) unicode_len: i32,
@ -910,6 +891,7 @@ mod query {
Unicode,
#[allow(unused)]
Utf16,
#[allow(unused)]
Entity,
Event,
}
@ -1081,272 +1063,6 @@ mod query {
}
}
mod cursor_cache {
use std::sync::atomic::AtomicUsize;
use super::{
pos_to_unicode_index, unicode_to_utf16_index, unicode_to_utf8_index, PosType,
RichtextTreeTrait,
};
use generic_btree::{rle::HasLength, BTree, Cursor, LeafIndex};
#[derive(Debug, Clone)]
struct CursorCacheItem {
pos: usize,
pos_type: PosType,
leaf: LeafIndex,
}
#[derive(Debug, Clone)]
struct EntityIndexCacheItem {
pos: usize,
pos_type: PosType,
entity_index: usize,
leaf: LeafIndex,
}
#[derive(Debug, Clone, Default)]
pub(super) struct CursorCache {
cursor: Option<CursorCacheItem>,
entity: Option<EntityIndexCacheItem>,
}
static CACHE_HIT: AtomicUsize = AtomicUsize::new(0);
static CACHE_MISS: AtomicUsize = AtomicUsize::new(0);
impl CursorCache {
// TODO: some of the invalidation can be changed into shifting pos
pub fn invalidate(&mut self) {
self.cursor.take();
self.entity.take();
}
pub fn invalidate_entity_cache_after(&mut self, entity_index: usize) {
if let Some(c) = self.entity.as_mut() {
if entity_index < c.entity_index {
self.entity = None;
}
}
}
pub fn record_cursor(
&mut self,
pos: usize,
kind: PosType,
cursor: Cursor,
_tree: &BTree<RichtextTreeTrait>,
) {
match kind {
PosType::Unicode | PosType::Entity => {
self.cursor = Some(CursorCacheItem {
pos: pos - cursor.offset,
pos_type: kind,
leaf: cursor.leaf,
});
}
PosType::Utf16 => todo!(),
PosType::Event => todo!(),
PosType::Bytes => todo!(),
}
}
pub fn record_entity_index(
&mut self,
pos: usize,
kind: PosType,
entity_index: usize,
cursor: Cursor,
tree: &BTree<RichtextTreeTrait>,
) -> Result<(), usize> {
match kind {
PosType::Bytes => {
if cursor.offset == 0 {
self.entity = Some(EntityIndexCacheItem {
pos,
pos_type: kind,
entity_index,
leaf: cursor.leaf,
});
} else {
let elem = tree.get_elem(cursor.leaf).unwrap();
let Some(s) = elem.as_str() else {
return Ok(());
};
let utf8offset = unicode_to_utf8_index(s, cursor.offset).unwrap();
if pos < utf8offset {
return Err(pos);
}
self.entity = Some(EntityIndexCacheItem {
pos: pos - utf8offset,
pos_type: kind,
entity_index: entity_index - cursor.offset,
leaf: cursor.leaf,
});
}
Ok(())
}
PosType::Unicode | PosType::Entity => {
self.entity = Some(EntityIndexCacheItem {
pos: pos - cursor.offset,
pos_type: kind,
entity_index: entity_index - cursor.offset,
leaf: cursor.leaf,
});
Ok(())
}
PosType::Event if cfg!(not(feature = "wasm")) => {
self.entity = Some(EntityIndexCacheItem {
pos: pos - cursor.offset,
pos_type: kind,
entity_index: entity_index - cursor.offset,
leaf: cursor.leaf,
});
Ok(())
}
_ => {
// utf16
if cursor.offset == 0 {
self.entity = Some(EntityIndexCacheItem {
pos,
pos_type: kind,
entity_index,
leaf: cursor.leaf,
});
} else {
let elem = tree.get_elem(cursor.leaf).unwrap();
let Some(s) = elem.as_str() else {
return Ok(());
};
let utf16offset = unicode_to_utf16_index(s, cursor.offset).unwrap();
if pos < utf16offset {
return Err(pos);
}
self.entity = Some(EntityIndexCacheItem {
pos: pos - utf16offset,
pos_type: kind,
entity_index: entity_index - cursor.offset,
leaf: cursor.leaf,
});
}
Ok(())
}
}
}
pub fn get_cursor(
&self,
pos: usize,
pos_type: PosType,
tree: &BTree<RichtextTreeTrait>,
) -> Option<Cursor> {
for c in self.cursor.iter() {
if c.pos_type != pos_type {
continue;
}
let elem = tree.get_elem(c.leaf).unwrap();
let Some(s) = elem.as_str() else { continue };
if pos < c.pos {
continue;
}
let offset = pos - c.pos;
let Some(offset) = pos_to_unicode_index(s, offset, pos_type) else {
continue;
};
if offset <= elem.rle_len() {
cache_hit();
return Some(Cursor {
leaf: c.leaf,
offset,
});
}
}
cache_miss();
None
}
pub fn get_entity_index(
&self,
pos: usize,
pos_type: PosType,
tree: &BTree<RichtextTreeTrait>,
has_style: bool,
) -> Option<(usize, Cursor)> {
if has_style {
return None;
}
for c in self.entity.iter() {
if c.pos_type != pos_type {
continue;
}
if pos < c.pos {
continue;
}
let offset = pos - c.pos;
let leaf = tree.get_leaf(c.leaf.into());
let s = leaf.elem().as_str()?;
let Some(offset) = pos_to_unicode_index(s, offset, pos_type) else {
continue;
};
if offset < leaf.elem().rle_len() {
cache_hit();
return Some((
offset + c.entity_index,
Cursor {
leaf: c.leaf,
offset,
},
));
}
cache_hit();
return Some((
offset + c.entity_index,
Cursor {
leaf: c.leaf,
offset,
},
));
}
cache_miss();
None
}
#[allow(unused)]
pub fn diagnose() {
let hit = CACHE_HIT.load(std::sync::atomic::Ordering::Relaxed);
let miss = CACHE_MISS.load(std::sync::atomic::Ordering::Relaxed);
println!(
"hit: {}, miss: {}, hit rate: {}",
hit,
miss,
hit as f64 / (hit + miss) as f64
);
}
}
fn cache_hit() {
#[cfg(debug_assertions)]
{
CACHE_HIT.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
}
}
fn cache_miss() {
#[cfg(debug_assertions)]
{
CACHE_MISS.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
}
}
}
impl RichtextState {
pub(crate) fn from_chunks<I: Iterator<Item = impl Into<RichtextStateChunk>>>(i: I) -> Self {
Self {
@ -2263,7 +1979,6 @@ impl RichtextState {
}
pub fn diagnose(&self) {
CursorCache::diagnose();
println!(
"rope_nodes: {}, style_nodes: {}, text_len: {}",
self.tree.node_len(),

View file

@ -19,9 +19,9 @@ use smallvec::{smallvec, SmallVec};
use tracing::trace;
mod iter;
mod mermaid;
#[cfg(any(test, feature = "test_utils"))]
#[cfg(feature = "test_utils")]
mod test;
#[cfg(any(test, feature = "test_utils"))]
#[cfg(feature = "test_utils")]
pub use test::{fuzz_alloc_tree, Interaction};
use crate::{
@ -65,6 +65,7 @@ pub(crate) trait DagUtils: Dag {
/// Slow, should probably only use on dev
#[allow(unused)]
fn get_vv(&self, id: ID) -> VersionVector;
#[allow(unused)]
fn find_path(&self, from: &[ID], to: &[ID]) -> VersionVectorDiff;
fn contains(&self, id: ID) -> bool;
fn iter_causal(&self, from: &[ID], target: IdSpanVector) -> DagCausalIter<'_, Self>

View file

@ -1,3 +1,4 @@
#![allow(unused)]
use arbitrary::Arbitrary;
use im::HashSet;
use loro_common::HasCounter;
@ -84,12 +85,6 @@ struct TestDag {
client_id: PeerID,
}
impl TestDag {
fn is_first(&self) -> bool {
*self.version_vec.get(&self.client_id).unwrap_or(&0) == 0
}
}
impl Dag for TestDag {
type Node = TestNode;
@ -383,6 +378,8 @@ mod iter {
}
mod dfs {
#[allow(unused)]
#[allow(dead_code)]
use super::*;
use crate::{allocation::calc_critical_version_dfs, delta::DeltaValue};
use rand::{rngs::StdRng, SeedableRng};
@ -449,10 +446,19 @@ mod dfs {
}
mod bfs {
use super::*;
use crate::{allocation::calc_critical_version, delta::DeltaValue};
use arref::array_mut_ref;
use loro_common::ID;
use rand::{rngs::StdRng, SeedableRng};
use crate::{
allocation::calc_critical_version,
dag::{
test::{TestDag, TestNode},
Dag, Interaction,
},
delta::DeltaValue as _,
};
#[test]
fn test_bfs_small() {
let mut a = TestDag::new(0);
@ -560,6 +566,8 @@ mod mermaid {
}
mod get_version_vector {
#[allow(unused)]
#[allow(dead_code)]
use super::*;
#[test]

View file

@ -20,7 +20,7 @@ use std::ops::{Index, IndexMut};
/// Utility function to check if a range is empty that works on older rust versions
#[inline(always)]
fn is_empty_range(start: usize, end: usize) -> bool {
!(start < end)
start >= end
}
#[inline(always)]
@ -123,6 +123,7 @@ impl IndexMut<isize> for OffsetVec {
}
}
#[allow(clippy::too_many_arguments)]
fn find_middle_snake(
old: &[u32],
old_start: usize,
@ -186,6 +187,7 @@ fn find_middle_snake(
None
}
#[allow(clippy::too_many_arguments)]
fn conquer<D: DiffHandler>(
proxy: &mut OperateProxy<D>,
old: &[u32],

View file

@ -1,4 +1,4 @@
pub mod diff;
pub(crate) use diff::myers_diff;
pub(crate) use diff::DiffHandler;
pub(crate) use diff::OperateProxy;
pub mod diff_impl;
pub(crate) use diff_impl::myers_diff;
pub(crate) use diff_impl::DiffHandler;
pub(crate) use diff_impl::OperateProxy;

View file

@ -13,8 +13,8 @@ use itertools::Itertools;
use enum_dispatch::enum_dispatch;
use fxhash::{FxHashMap, FxHashSet};
use loro_common::{
CompactIdLp, ContainerID, Counter, HasCounter, HasCounterSpan, HasIdSpan, IdFull, IdLp, IdSpan,
LoroValue, PeerID, ID,
CompactIdLp, ContainerID, Counter, HasCounter, HasCounterSpan, IdFull, IdLp, IdSpan, LoroValue,
PeerID, ID,
};
use loro_delta::DeltaRope;
use smallvec::SmallVec;

View file

@ -108,7 +108,7 @@ impl DiffCalculatorTrait for TreeDiffCalculator {
fn calculate_diff(
&mut self,
idx: ContainerIdx,
_idx: ContainerIdx,
oplog: &OpLog,
info: DiffCalcVersionInfo,
mut on_new_container: impl FnMut(&ContainerID),
@ -172,7 +172,7 @@ impl TreeDiffCalculator {
tracing::info!("checkout: to == current_vv");
return;
}
let min_lamport = self.get_min_lamport_by_frontiers(&to_frontiers, oplog);
let min_lamport = self.get_min_lamport_by_frontiers(to_frontiers, oplog);
// retreat
let mut retreat_ops = vec![];
for (_target, ops) in tree_cache.tree.iter() {
@ -191,7 +191,7 @@ impl TreeDiffCalculator {
}
// forward and apply
let max_lamport = self.get_max_lamport_by_frontiers(&to_frontiers, oplog);
let max_lamport = self.get_max_lamport_by_frontiers(to_frontiers, oplog);
let mut forward_ops = vec![];
let group = h
.get_importing_cache(&self.container, mark)

View file

@ -2,7 +2,7 @@ use std::{char, sync::Arc};
use fxhash::FxHashMap;
use itertools::Itertools;
use tracing::{instrument, trace};
use tracing::trace;
use crate::diff::DiffHandler;

View file

@ -694,7 +694,7 @@ impl TreeHandler {
match &self.inner {
MaybeDetached::Detached(d) => {
let d = d.try_lock().unwrap();
d.value.map.get(target).is_none()
!d.value.map.contains_key(target)
}
MaybeDetached::Attached(a) => a.with_state(|state| {
let a = state.as_tree_state().unwrap();
@ -904,7 +904,7 @@ impl TreeHandler {
};
a.with_state(|state| {
let a = state.as_tree_state_mut().unwrap();
a.delete_position(parent, &target)
a.delete_position(parent, target)
})
}

View file

@ -791,7 +791,7 @@ impl MovableListHistoryCache {
.find(|e| {
let counter = match &e.counter_or_value {
Either::Left(c) => *c,
Either::Right(v) => -1,
Either::Right(_) => -1,
};
vv.get(&e.peer).copied().unwrap_or(0) > counter
})

View file

@ -109,7 +109,7 @@ fn parse_jsonpath(path: &str) -> Result<Vec<JSONPathToken>, JsonPathError> {
// Handle array index, slice, filter, or wildcard
let mut content = String::new();
let mut in_quotes = false;
while let Some(&c) = iter.next() {
for &c in iter.by_ref() {
if c == ']' && !in_quotes {
break;
}
@ -535,7 +535,7 @@ impl PathValue for ListHandler {
self.len()
}
fn get_child_by_id(&self, id: ContainerID) -> Option<Handler> {
fn get_child_by_id(&self, _id: ContainerID) -> Option<Handler> {
unimplemented!()
}
@ -577,7 +577,7 @@ impl PathValue for MovableListHandler {
self.len()
}
fn get_child_by_id(&self, id: ContainerID) -> Option<Handler> {
fn get_child_by_id(&self, _id: ContainerID) -> Option<Handler> {
unimplemented!()
}
@ -613,7 +613,7 @@ impl PathValue for TextHandler {
}
impl PathValue for TreeHandler {
fn get_by_key(&self, key: &str) -> Option<ValueOrHandler> {
fn get_by_key(&self, _key: &str) -> Option<ValueOrHandler> {
None
}
@ -621,7 +621,7 @@ impl PathValue for TreeHandler {
None
}
fn for_each_for_path(&self, f: &mut dyn FnMut(ValueOrHandler) -> ControlFlow<()>) {
fn for_each_for_path(&self, _f: &mut dyn FnMut(ValueOrHandler) -> ControlFlow<()>) {
unimplemented!()
}
@ -629,7 +629,7 @@ impl PathValue for TreeHandler {
unimplemented!()
}
fn get_child_by_id(&self, id: ContainerID) -> Option<Handler> {
fn get_child_by_id(&self, _id: ContainerID) -> Option<Handler> {
unimplemented!()
}

View file

@ -11,7 +11,7 @@ use either::Either;
use enum_as_inner::EnumAsInner;
use loro_common::{CompactIdLp, ContainerType, CounterSpan, IdFull, IdLp, IdSpan};
use rle::{HasIndex, HasLength, Mergable, Sliceable};
use serde::{ser::SerializeSeq, Deserialize, Serialize};
use serde::{Deserialize, Serialize};
use std::{borrow::Cow, ops::Range};
mod content;

View file

@ -220,6 +220,7 @@ impl OpLog {
Some(change.slice(offset, change.atom_len()))
}
#[allow(unused)]
fn check_id_is_not_duplicated(&self, id: ID) -> Result<(), LoroError> {
let cur_end = self.dag.vv().get(&id.peer).cloned().unwrap_or(0);
if cur_end > id.counter {
@ -264,16 +265,6 @@ impl OpLog {
Ok(())
}
fn check_deps(&self, deps: &Frontiers) -> Result<(), ID> {
for dep in deps.iter() {
if !self.dag.vv().includes_id(*dep) {
return Err(*dep);
}
}
Ok(())
}
pub(crate) fn next_id(&self, peer: PeerID) -> ID {
let cnt = self.dag.vv().get(&peer).copied().unwrap_or(0);
ID::new(peer, cnt)

View file

@ -2,7 +2,7 @@ use self::block_encode::{decode_block, decode_header, encode_block, ChangesBlock
use super::{loro_dag::AppDagNodeInner, AppDagNode};
use crate::{
arena::SharedArena,
change::{Change, Timestamp},
change::Change,
estimated_size::EstimatedSize,
kv_store::KvStore,
op::Op,
@ -12,7 +12,6 @@ use crate::{
};
use block_encode::decode_block_range;
use bytes::Bytes;
use fxhash::FxHashMap;
use itertools::Itertools;
use loro_common::{
Counter, HasCounterSpan, HasId, HasIdSpan, HasLamportSpan, IdLp, IdSpan, Lamport, LoroError,
@ -31,7 +30,6 @@ use tracing::{debug, info_span, trace, warn};
mod block_encode;
mod block_meta_encode;
mod delta_rle_encode;
pub(super) mod iter;
#[cfg(not(test))]
@ -289,7 +287,7 @@ impl ChangeStore {
}
}
pub fn iter_blocks(&self, id_span: IdSpan) -> Vec<(Arc<ChangesBlock>, usize, usize)> {
pub(crate) fn iter_blocks(&self, id_span: IdSpan) -> Vec<(Arc<ChangesBlock>, usize, usize)> {
if id_span.counter.start == id_span.counter.end {
return vec![];
}
@ -552,7 +550,7 @@ fn encode_blocks_in_store<W: std::io::Write>(
) {
let mut inner = new_store.inner.try_lock().unwrap();
for (_id, block) in inner.mem_parsed_kv.iter_mut() {
let bytes = block.to_bytes(&arena);
let bytes = block.to_bytes(arena);
leb128::write::unsigned(w, bytes.bytes.len() as u64).unwrap();
w.write_all(&bytes.bytes).unwrap();
}
@ -607,11 +605,6 @@ mod mut_external_kv {
let mut max_lamport = None;
let mut max_timestamp = 0;
drop(kv_store);
trace!(
"frontiers = {:#?}\n start_frontiers={:#?}",
&frontiers,
&start_frontiers
);
for id in frontiers.iter() {
let c = self.get_change(*id).unwrap();
debug_assert_ne!(c.atom_len(), 0);
@ -633,11 +626,6 @@ mod mut_external_kv {
Ok(BatchDecodeInfo {
vv,
frontiers,
next_lamport: match max_lamport {
Some(l) => l + 1,
None => 0,
},
max_timestamp,
start_version: if start_vv.is_empty() {
None
} else {
@ -647,19 +635,6 @@ mod mut_external_kv {
Some((start_vv, start_frontiers))
},
})
// todo!("replace with kv store");
// let mut kv = self.mem_kv.try_lock().unwrap();
// assert!(kv.is_empty());
// let mut reader = blocks;
// while !reader.is_empty() {
// let size = leb128::read::unsigned(&mut reader).unwrap();
// let block_bytes = &reader[0..size as usize];
// let block = ChangesBlock::from_bytes(Bytes::copy_from_slice(block_bytes))?;
// kv.insert(block.id(), Arc::new(block));
// reader = &reader[size as usize..];
// }
// Ok(())
}
/// Flush the cached change to kv_store
@ -1136,8 +1111,6 @@ mod mut_inner_kv {
pub(crate) struct BatchDecodeInfo {
pub vv: VersionVector,
pub frontiers: Frontiers,
pub next_lamport: Lamport,
pub max_timestamp: Timestamp,
pub start_version: Option<(VersionVector, Frontiers)>,
}

View file

@ -402,7 +402,7 @@ fn decode_header_from_doc(doc: &EncodedBlock) -> Result<ChangesBlockHeader, Loro
..
} = doc;
let ans: ChangesBlockHeader = decode_changes_header(
&header,
header,
*n_changes as usize,
*counter_start as Counter,
*counter_len as Counter,

View file

@ -28,7 +28,6 @@ pub(crate) fn encode_changes(
// First Counter + Change Len
let mut lengths_bytes = Vec::new();
let mut counter = vec![];
let mut n = 0;
for (i, c) in block.iter().enumerate() {
counter.push(c.id.counter);
@ -53,7 +52,6 @@ pub(crate) fn encode_changes(
let peer_idx = peer_register.register(&dep.peer);
encoded_deps.peer_idx.append(peer_idx as u32).unwrap();
encoded_deps.counter.append(dep.counter as i64).unwrap();
n += 1;
}
}
@ -151,9 +149,9 @@ pub(crate) fn decode_changes_header(
}
let mut counters = Vec::with_capacity(n_changes);
let mut last = first_counter;
for i in 0..n_changes {
for len in lengths.iter() {
counters.push(last);
last += lengths[i];
last += *len;
}
let lamport_decoder = DeltaOfDeltaDecoder::new(bytes).unwrap();

View file

@ -1,59 +0,0 @@
pub struct UnsignedDeltaEncoder {
v: Vec<u8>,
last: u64,
count: usize,
}
impl UnsignedDeltaEncoder {
pub fn new(estimate_bytes: usize) -> Self {
Self {
v: Vec::with_capacity(estimate_bytes),
last: 0,
count: 0,
}
}
pub fn push(&mut self, value: u64) {
let delta = value - self.last;
self.last = value;
leb128::write::unsigned(&mut self.v, delta).unwrap();
self.count += 1;
}
pub fn finish(self) -> (Vec<u8>, usize) {
(self.v, self.count)
}
}
pub struct UnsignedDeltaDecoder<'a> {
v: &'a [u8],
count: usize,
last: u64,
}
impl<'a> UnsignedDeltaDecoder<'a> {
pub fn new(v: &'a [u8], count: usize) -> Self {
Self { v, count, last: 0 }
}
#[allow(unused)]
pub fn rest(mut self) -> &'a [u8] {
while self.next().is_some() {}
self.v
}
}
impl<'a> Iterator for UnsignedDeltaDecoder<'a> {
type Item = u64;
fn next(&mut self) -> Option<Self::Item> {
if self.count == 0 {
return None;
}
self.count -= 1;
let delta = leb128::read::unsigned(&mut self.v).unwrap();
self.last += delta;
Some(self.last)
}
}

View file

@ -3,7 +3,6 @@ use std::{collections::BTreeMap, ops::Deref};
use crate::{change::Change, version::ImVersionVector, OpLog, VersionVector};
use fxhash::FxHashMap;
use loro_common::{Counter, CounterSpan, HasCounterSpan, HasIdSpan, LoroResult, PeerID, ID};
use smallvec::SmallVec;
#[derive(Debug)]
pub enum PendingChange {
@ -41,7 +40,6 @@ impl OpLog {
&mut self,
remote_changes: Vec<Change>,
) -> LoroResult<()> {
let mut result = Ok(());
for change in remote_changes {
let local_change = PendingChange::Unknown(change);
match remote_change_apply_state(self.vv(), self.trimmed_vv(), &local_change) {
@ -53,17 +51,12 @@ impl OpLog {
.entry(miss_dep.counter)
.or_default()
.push(local_change),
ChangeState::DependingOnTrimmedHistory(_ids) => {
result = LoroResult::Err(
loro_common::LoroError::ImportUpdatesThatDependsOnOutdatedVersion,
);
}
ChangeState::Applied => unreachable!("already applied"),
ChangeState::CanApplyDirectly => unreachable!("can apply directly"),
}
}
result
Ok(())
}
}
@ -111,9 +104,6 @@ impl OpLog {
.entry(miss_dep.counter)
.or_default()
.push(pending_change),
ChangeState::DependingOnTrimmedHistory(_) => {
unreachable!()
}
}
}
}
@ -145,12 +135,11 @@ enum ChangeState {
CanApplyDirectly,
// The id of first missing dep
AwaitingMissingDependency(ID),
DependingOnTrimmedHistory(Box<Vec<ID>>),
}
fn remote_change_apply_state(
vv: &VersionVector,
trimmed_vv: &ImVersionVector,
_trimmed_vv: &ImVersionVector,
change: &Change,
) -> ChangeState {
let peer = change.id.peer;

View file

@ -1548,7 +1548,7 @@ impl DocState {
Some(value)
}
pub fn gc_store(&self) -> Option<&Arc<GcStore>> {
pub(crate) fn gc_store(&self) -> Option<&Arc<GcStore>> {
self.store.gc_store()
}
}

View file

@ -59,10 +59,17 @@ impl DocAnalysis {
Self { containers }
}
#[allow(unused)]
pub fn len(&self) -> usize {
self.containers.len()
}
#[allow(unused)]
#[must_use]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn dropped_len(&self) -> usize {
self.containers
.iter()

View file

@ -298,13 +298,8 @@ impl ContainerStore {
}
mod encode {
use loro_common::{ContainerID, ContainerType, Counter, InternalString, LoroError, LoroResult};
use serde::{Deserialize, Serialize};
use serde_columnar::{
AnyRleDecoder, AnyRleEncoder, BoolRleDecoder, BoolRleEncoder, DeltaRleDecoder,
DeltaRleEncoder,
};
use std::{borrow::Cow, io::Write};
use std::borrow::Cow;
#[derive(Serialize, Deserialize)]
struct EncodedStateStore<'a> {
@ -336,15 +331,6 @@ mod encode {
/// ┌───────────────────────────────────────────────────┐
/// │ Offsets │
/// └───────────────────────────────────────────────────┘
#[derive(Default)]
pub(super) struct CidOffsetEncoder {
types: AnyRleEncoder<u8>,
is_root_bools: BoolRleEncoder,
strings: Vec<InternalString>,
peer_ids: AnyRleEncoder<u64>,
counters: DeltaRleEncoder,
offsets: DeltaRleEncoder,
}
#[derive(Serialize, Deserialize)]
struct EncodedCid<'a> {
@ -356,157 +342,6 @@ mod encode {
counters: Cow<'a, [u8]>,
offsets: Cow<'a, [u8]>,
}
impl CidOffsetEncoder {
pub fn new() -> Self {
Self::default()
}
pub fn push(&mut self, cid: &ContainerID, offset: usize) {
self.types.append(cid.container_type().to_u8()).unwrap();
self.is_root_bools.append(cid.is_root()).unwrap();
match cid {
ContainerID::Root { name, .. } => {
self.strings.push(name.clone());
}
ContainerID::Normal { peer, counter, .. } => {
self.peer_ids.append(*peer).unwrap();
self.counters.append(*counter).unwrap();
}
}
self.offsets.append(offset).unwrap();
}
pub fn write_to_io<W: Write>(self, w: W) {
let mut strings = Vec::with_capacity(self.strings.iter().map(|s| s.len() + 4).sum());
for s in self.strings {
leb128::write::unsigned(&mut strings, s.len() as u64).unwrap();
strings.extend(s.as_bytes());
}
let t = EncodedCid {
types: self.types.finish().unwrap().into(),
is_root_bools: self.is_root_bools.finish().unwrap().into(),
strings: strings.into(),
peer_ids: self.peer_ids.finish().unwrap().into(),
counters: self.counters.finish().unwrap().into(),
offsets: self.offsets.finish().unwrap().into(),
};
postcard::to_io(&t, w).unwrap();
}
}
pub(super) fn decode_cids(bytes: &[u8]) -> LoroResult<Vec<(ContainerID, usize)>> {
let EncodedCid {
types,
is_root_bools,
strings: strings_bytes,
peer_ids: peers_bytes,
counters,
offsets,
} = postcard::from_bytes(bytes).map_err(|e| {
LoroError::DecodeError(format!("Decode cids error {}", e).into_boxed_str())
})?;
let mut ans = Vec::new();
let types: AnyRleDecoder<u8> = AnyRleDecoder::new(&types);
let is_root_iter = BoolRleDecoder::new(&is_root_bools);
let mut strings = Vec::new();
{
// decode strings
let mut strings_bytes: &[u8] = &strings_bytes;
while !strings_bytes.is_empty() {
let len = leb128::read::unsigned(&mut strings_bytes).unwrap();
let s = std::str::from_utf8(&strings_bytes[..len as usize]).unwrap();
strings.push(InternalString::from(s));
strings_bytes = &strings_bytes[len as usize..];
}
}
let mut counters: DeltaRleDecoder<i32> = DeltaRleDecoder::new(&counters);
let mut offsets: DeltaRleDecoder<usize> = DeltaRleDecoder::new(&offsets);
let mut peer_iter: AnyRleDecoder<u64> = AnyRleDecoder::new(&peers_bytes);
let mut s_iter = strings.into_iter();
for (t, is_root) in types.zip(is_root_iter) {
let ty = ContainerType::try_from_u8(t.unwrap()).unwrap();
let offset = offsets.next().unwrap().unwrap();
match is_root.unwrap() {
true => {
let s = s_iter.next();
ans.push((
ContainerID::Root {
name: s.unwrap(),
container_type: ty,
},
offset,
))
}
false => ans.push((
ContainerID::Normal {
peer: peer_iter.next().unwrap().unwrap(),
counter: counters.next().unwrap().unwrap() as Counter,
container_type: ty,
},
offset,
)),
}
}
Ok(ans)
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_container_store() {
let mut encoder = CidOffsetEncoder::new();
let input = [
(
ContainerID::Root {
name: "map".into(),
container_type: ContainerType::Map,
},
0,
),
(
ContainerID::Root {
name: "list".into(),
container_type: ContainerType::List,
},
1,
),
(
ContainerID::Normal {
peer: 1,
counter: 0,
container_type: ContainerType::Map,
},
2,
),
(
ContainerID::Normal {
peer: 2,
counter: 1,
container_type: ContainerType::List,
},
4,
),
];
for (cid, offset) in input.iter() {
encoder.push(cid, *offset);
}
let mut bytes = Vec::new();
encoder.write_to_io(&mut bytes);
let cids = decode_cids(&bytes).unwrap();
assert_eq!(&cids, &input)
}
}
}
#[cfg(test)]

View file

@ -273,6 +273,7 @@ impl ContainerWrapper {
self.flushed = flushed;
}
#[allow(unused)]
pub(crate) fn parent(&self) -> Option<&ContainerID> {
self.parent.as_ref()
}

View file

@ -384,7 +384,7 @@ impl ContainerState for ListState {
match &value.values {
either::Either::Left(range) => {
for i in range.to_range() {
let value = arena.get_value(i as usize).unwrap();
let value = arena.get_value(i).unwrap();
arr.push(value);
}
}
@ -637,10 +637,6 @@ mod test {
use super::*;
fn id(name: &str) -> ContainerID {
ContainerID::new_root(name, crate::ContainerType::List)
}
#[test]
fn test() {
let mut list = ListState::new(ContainerIdx::from_index_and_type(

View file

@ -234,7 +234,7 @@ impl MapState {
let result = self.map.insert(key.clone(), value);
match (result, value_yes) {
(Some(x), true) => {
if let None = x.value {
if x.value.is_none() {
self.size += 1;
}
}
@ -242,7 +242,7 @@ impl MapState {
self.size += 1;
}
(Some(x), false) => {
if let Some(_) = x.value {
if x.value.is_some() {
self.size -= 1;
}
}

View file

@ -748,7 +748,7 @@ impl MovableListState {
}
#[inline]
pub(crate) fn list(&self) -> &BTree<MovableListTreeTrait> {
fn list(&self) -> &BTree<MovableListTreeTrait> {
self.inner.list()
}
@ -1011,12 +1011,6 @@ impl ContainerState for MovableListState {
self.inner.check_consistency();
}
let start_value = if cfg!(debug_assertions) {
Some(self.get_value())
} else {
None
};
let mut event: ListDiff = DeltaRope::new();
let mut maybe_moved: FxHashMap<CompactIdLp, (usize, LoroValue)> = FxHashMap::default();
let need_compare = matches!(mode, DiffMode::Import);

View file

@ -1009,7 +1009,7 @@ mod snapshot {
spans.push(EncodedTextSpan {
peer_idx: peers.register(&id.peer),
counter: id.counter,
lamport_sub_counter: id.lamport as i32 - id.counter as i32,
lamport_sub_counter: id.lamport as i32 - id.counter,
len: t.unicode_len(),
})
}
@ -1019,7 +1019,7 @@ mod snapshot {
spans.push(EncodedTextSpan {
peer_idx: peers.register(&id.peer),
counter: id.counter,
lamport_sub_counter: id.lamport as i32 - id.counter as i32,
lamport_sub_counter: id.lamport as i32 - id.counter,
len: 0,
});
marks.push(EncodedMark {
@ -1033,7 +1033,7 @@ mod snapshot {
spans.push(EncodedTextSpan {
peer_idx: peers.register(&id.peer),
counter: id.counter + 1,
lamport_sub_counter: id.lamport as i32 - id.counter as i32,
lamport_sub_counter: id.lamport as i32 - id.counter,
len: -1,
})
}
@ -1100,7 +1100,7 @@ mod snapshot {
let id_full = IdFull::new(
peers[peer_idx],
counter,
(lamport_sub_counter + counter as i32) as u32,
(lamport_sub_counter + counter) as u32,
);
let chunk = match len {
0 => {
@ -1111,7 +1111,7 @@ mod snapshot {
info,
} = mark_iter.next().unwrap();
let style_op = Arc::new(StyleOp {
lamport: (lamport_sub_counter + counter as i32) as u32,
lamport: (lamport_sub_counter + counter) as u32,
peer: id_full.peer,
cnt: id_full.counter,
key: keys[key_idx].clone(),

View file

@ -1338,6 +1338,7 @@ impl ContainerState for TreeState {
}
// convert map container to LoroValue
#[allow(clippy::ptr_arg)]
pub(crate) fn get_meta_value(nodes: &mut Vec<LoroValue>, state: &mut DocState) {
for node in nodes.iter_mut() {
let map = Arc::make_mut(node.as_map_mut().unwrap());

View file

@ -61,6 +61,7 @@ impl KvWrapper {
}
}
#[allow(unused)]
pub(crate) fn contains_key(&self, key: &[u8]) -> bool {
self.kv.try_lock().unwrap().contains_key(key)
}

View file

@ -254,8 +254,8 @@ struct Subscriber<Callback> {
impl<EmitterKey, Callback> SubscriberSet<EmitterKey, Callback>
where
EmitterKey: 'static + Ord + Clone + Debug,
Callback: 'static,
EmitterKey: 'static + Ord + Clone + Debug + Send + Sync,
Callback: 'static + Send + Sync,
{
pub fn new() -> Self {
Self(Arc::new(Mutex::new(SubscriberSetState {
@ -316,6 +316,7 @@ where
(subscription, move || active.store(true, Ordering::Relaxed))
}
#[allow(unused)]
pub fn remove(&self, emitter: &EmitterKey) -> impl IntoIterator<Item = Callback> {
let mut lock = self.0.try_lock().unwrap();
let subscribers = lock.subscribers.remove(emitter);
@ -395,13 +396,19 @@ fn post_inc(next_subscriber_id: &mut usize) -> usize {
/// is cancelled and the callback will no longer be invoked.
#[must_use]
pub struct Subscription {
unsubscribe: Option<Box<dyn FnOnce() + 'static>>,
unsubscribe: Option<Box<dyn FnOnce() + 'static + Send + Sync>>,
}
impl std::fmt::Debug for Subscription {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Subscription").finish()
}
}
impl Subscription {
/// Creates a new subscription with a callback that gets invoked when
/// this subscription is dropped.
pub fn new(unsubscribe: impl 'static + FnOnce()) -> Self {
pub fn new(unsubscribe: impl 'static + Send + Sync + FnOnce()) -> Self {
Self {
unsubscribe: Some(Box::new(unsubscribe)),
}

View file

@ -1,7 +1,6 @@
use loro_common::ID;
use loro_internal::{version::Frontiers, HandlerTrait, LoroDoc, TextHandler, ToJson};
use serde_json::json;
use tracing::trace;
#[test]
fn auto_commit() {

View file

@ -1256,9 +1256,9 @@ fn test_text_update() {
fn test_map_contains_key() {
let doc = LoroDoc::new_auto_commit();
let map = doc.get_map("m");
assert_eq!(map.contains_key("bro"), false);
assert!(!map.contains_key("bro"));
map.insert("bro", 114514).unwrap();
assert_eq!(map.contains_key("bro"), true);
assert!(map.contains_key("bro"));
map.delete("bro").unwrap();
assert_eq!(map.contains_key("bro"), false);
assert!(!map.contains_key("bro"));
}

View file

@ -19,7 +19,7 @@ use loro_internal::{
id::{Counter, PeerID, TreeID, ID},
json::JsonSchema,
loro::{CommitOptions, ExportMode},
loro_common::{check_root_container_name, IdSpan},
loro_common::check_root_container_name,
subscription::SubID,
undo::{UndoItemMeta, UndoOrRedo},
version::Frontiers,
@ -29,7 +29,7 @@ use loro_internal::{
};
use rle::HasLength;
use serde::{Deserialize, Serialize};
use std::{cell::RefCell, cmp::Ordering, option, rc::Rc, sync::Arc};
use std::{cell::RefCell, cmp::Ordering, rc::Rc, sync::Arc};
use wasm_bindgen::{__rt::IntoJsResult, prelude::*, throw_val};
use wasm_bindgen_derive::TryFromJsValue;
@ -1453,7 +1453,13 @@ impl LoroDoc {
/// ```
#[wasm_bindgen(js_name = "vvToFrontiers")]
pub fn vv_to_frontiers(&self, vv: &VersionVector) -> JsResult<JsIDs> {
let f = self.0.oplog().try_lock().unwrap().dag().vv_to_frontiers(&vv.0);
let f = self
.0
.oplog()
.try_lock()
.unwrap()
.dag()
.vv_to_frontiers(&vv.0);
Ok(frontiers_to_ids(&f))
}
@ -3147,6 +3153,7 @@ pub struct LoroTree {
extern crate alloc;
/// The handler of a tree node.
#[allow(missing_docs)]
#[derive(TryFromJsValue, Clone)]
#[wasm_bindgen]
pub struct LoroTreeNode {
@ -4372,7 +4379,7 @@ export type TreeID = `${number}@${PeerID}`;
interface LoroDoc {
/**
* Export updates from the specific version to the current version
*
*
* @deprecated Use `export` instead
*
* @example
@ -4408,46 +4415,46 @@ interface LoroDoc {
*/
getContainerById(id: ContainerID): Container;
/**
/**
* Subscribe to updates from local edits.
*
*
* This method allows you to listen for local changes made to the document.
* It's useful for syncing changes with other instances or saving updates.
*
*
* @param f - A callback function that receives a Uint8Array containing the update data.
* @returns A function to unsubscribe from the updates.
*
*
* @example
* ```ts
* const loro = new Loro();
* const text = loro.getText("text");
*
*
* const unsubscribe = loro.subscribeLocalUpdates((update) => {
* console.log("Local update received:", update);
* // You can send this update to other Loro instances
* });
*
*
* text.insert(0, "Hello");
* loro.commit();
*
*
* // Later, when you want to stop listening:
* unsubscribe();
* ```
*
*
* @example
* ```ts
* const loro1 = new Loro();
* const loro2 = new Loro();
*
*
* // Set up two-way sync
* loro1.subscribeLocalUpdates((updates) => {
* loro2.import(updates);
* });
*
*
* loro2.subscribeLocalUpdates((updates) => {
* loro1.import(updates);
* });
*
*
* // Now changes in loro1 will be reflected in loro2 and vice versa
* ```
*/
@ -4507,7 +4514,7 @@ export interface Change {
length: number,
/**
* The timestamp in seconds.
*
*
* [Unix time](https://en.wikipedia.org/wiki/Unix_time)
* It is the number of seconds that have elapsed since 00:00:00 UTC on 1 January 1970.
*/
@ -4720,7 +4727,7 @@ export type JsonChange = {
id: JsonOpID
/**
* The timestamp in seconds.
*
*
* [Unix time](https://en.wikipedia.org/wiki/Unix_time)
* It is the number of seconds that have elapsed since 00:00:00 UTC on 1 January 1970.
*/

View file

@ -1575,11 +1575,18 @@ impl ContainerTrait for LoroTree {
}
}
/// A tree node in the [LoroTree].
#[derive(Debug, Clone)]
pub struct TreeNode {
/// ID of the tree node.
pub id: TreeID,
/// ID of the parent tree node.
/// If the ndoe is deleted this value is TreeParentId::Deleted.
/// If you checkout to a version before the node is created, this value is TreeParentId::Unexist.
pub parent: TreeParentId,
/// Fraction index of the node
pub fractional_index: FractionalIndex,
/// The current index of the node in its parent's children list.
pub index: usize,
}

View file

@ -11,7 +11,8 @@ fn test_commit_message() {
// The commit message can be synced to other peers as well
let doc2 = LoroDoc::new();
doc2.import(&doc.export_snapshot()).unwrap();
doc2.import(&doc.export(loro::ExportMode::Snapshot))
.unwrap();
let change = doc.get_change(ID::new(doc.peer_id(), 1)).unwrap();
assert_eq!(change.message(), "edits");
}
@ -49,7 +50,7 @@ fn test_syncing_commit_message() {
doc2.set_peer_id(2).unwrap();
// Export changes from doc1 and import to doc2
let changes = doc1.export_from(&Default::default());
let changes = doc1.export(loro::ExportMode::all_updates());
doc2.import(&changes).unwrap();
// Verify the commit message was synced
@ -74,7 +75,7 @@ fn test_commit_message_sync_via_snapshot() {
doc1.commit_with(CommitOptions::new().commit_msg("second edit"));
// Create a snapshot of doc1
let snapshot = doc1.export_snapshot();
let snapshot = doc1.export(loro::ExportMode::Snapshot);
// Create a new doc from the snapshot
let doc2 = LoroDoc::new();

View file

@ -1,12 +1,7 @@
use std::sync::{Arc, Mutex};
use super::gen_action;
use loro::{
json::JsonChange, undo::UndoItemMeta, Frontiers, JsonSchema, LoroDoc, LoroError, UndoManager,
ID,
};
use loro::{Frontiers, LoroDoc, LoroError, UndoManager, ID};
use loro_internal::vv;
use tracing::{trace, trace_span};
#[test]
fn disallow_editing_on_detached_mode_by_default() {

View file

@ -1,6 +1,4 @@
use loro::{
ExportMode, Frontiers, LoroDoc, LoroList, LoroMap, LoroValue, ToJson, ValueOrContainer, ID,
};
use loro::{LoroDoc, LoroList, LoroMap, LoroValue, ToJson, ValueOrContainer};
use serde_json::json;
fn to_json(v: Vec<ValueOrContainer>) -> serde_json::Value {
@ -239,7 +237,7 @@ fn test_books_not_expensive() -> anyhow::Result<()> {
fn test_everything() -> anyhow::Result<()> {
let doc = setup_test_doc();
let ans = doc.jsonpath("$..*")?;
assert!(ans.len() > 0);
assert!(!ans.is_empty());
Ok(())
}

View file

@ -1,8 +1,7 @@
use std::borrow::Cow;
use super::gen_action;
use loro::{ExportMode, LoroDoc, ToJson};
use serde_json::json;
use loro::{ExportMode, LoroDoc};
#[test]
fn test_snapshot_at_with_multiple_actions() -> anyhow::Result<()> {

View file

@ -10,5 +10,7 @@ fn issue_0() {
let bytes = include_bytes!("./issue_0.bin");
let doc = LoroDoc::new();
doc.import_batch(&[bytes.into()]).unwrap();
#[allow(deprecated)]
doc.export_snapshot();
doc.export(loro::ExportMode::Snapshot);
}

View file

@ -1,3 +1,4 @@
#![allow(deprecated)]
use std::{
cmp::Ordering,
ops::ControlFlow,
@ -1027,9 +1028,10 @@ fn test_gc_sync() {
let doc = LoroDoc::new();
doc.set_peer_id(1).unwrap();
apply_random_ops(&doc, 123, 11);
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(
ID::new(doc.peer_id(), 10).into(),
));
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
doc.peer_id(),
10,
)));
let new_doc = LoroDoc::new();
new_doc.set_peer_id(2).unwrap();
@ -1072,9 +1074,10 @@ fn test_gc_empty() {
fn test_gc_import_outdated_updates() {
let doc = LoroDoc::new();
apply_random_ops(&doc, 123, 11);
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(
ID::new(doc.peer_id(), 5).into(),
));
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
doc.peer_id(),
5,
)));
let new_doc = LoroDoc::new();
new_doc.import(&bytes).unwrap();
@ -1090,9 +1093,10 @@ fn test_gc_import_outdated_updates() {
fn test_gc_import_pending_updates_that_is_outdated() {
let doc = LoroDoc::new();
apply_random_ops(&doc, 123, 11);
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(
ID::new(doc.peer_id(), 5).into(),
));
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
doc.peer_id(),
5,
)));
let new_doc = LoroDoc::new();
new_doc.import(&bytes).unwrap();
@ -1112,9 +1116,10 @@ fn test_gc_import_pending_updates_that_is_outdated() {
fn test_calling_exporting_snapshot_on_gc_doc() {
let doc = LoroDoc::new();
apply_random_ops(&doc, 123, 11);
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(
ID::new(doc.peer_id(), 5).into(),
));
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
doc.peer_id(),
5,
)));
let new_doc = LoroDoc::new();
new_doc.import(&bytes).unwrap();
let snapshot = new_doc.export(loro::ExportMode::Snapshot);
@ -1128,9 +1133,10 @@ fn test_calling_exporting_snapshot_on_gc_doc() {
fn sync_two_trimmed_docs() {
let doc = LoroDoc::new();
apply_random_ops(&doc, 123, 11);
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(
ID::new(doc.peer_id(), 10).into(),
));
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
doc.peer_id(),
10,
)));
let doc_a = LoroDoc::new();
doc_a.import(&bytes).unwrap();
@ -1163,9 +1169,10 @@ fn test_map_checkout_on_trimmed_doc() {
doc.get_map("map").insert("3", 3).unwrap();
doc.get_map("map").insert("2", 4).unwrap();
let new_doc_bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(
ID::new(doc.peer_id(), 1).into(),
));
let new_doc_bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
doc.peer_id(),
1,
)));
let new_doc = LoroDoc::new();
new_doc.import(&new_doc_bytes).unwrap();
@ -1264,6 +1271,7 @@ fn test_loro_export_local_updates() {
}
}
#[test]
fn test_movable_list_checkout_on_trimmed_doc() -> LoroResult<()> {
let doc = LoroDoc::new();
let list = doc.get_movable_list("list");
@ -1274,9 +1282,10 @@ fn test_movable_list_checkout_on_trimmed_doc() -> LoroResult<()> {
list.mov(1, 0)?;
list.delete(0, 1)?;
list.set(0, 0)?;
let new_doc_bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(
ID::new(doc.peer_id(), 2).into(),
));
let new_doc_bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
doc.peer_id(),
2,
)));
let new_doc = LoroDoc::new();
new_doc.import(&new_doc_bytes).unwrap();
@ -1321,9 +1330,10 @@ fn test_tree_checkout_on_trimmed_doc() -> LoroResult<()> {
let child2 = tree.create(None).unwrap();
tree.mov(child2, root)?;
let new_doc_bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(
ID::new(doc.peer_id(), 1).into(),
));
let new_doc_bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
doc.peer_id(),
1,
)));
let new_doc = LoroDoc::new();
new_doc.import(&new_doc_bytes).unwrap();
@ -1716,7 +1726,7 @@ fn change_peer_id() {
let doc = LoroDoc::new();
let received_peer_id = Arc::new(AtomicU64::new(0));
let received_peer_id_clone = received_peer_id.clone();
let sub = doc.subscribe_peer_id_change(Box::new(move |peer_id, counter| {
let sub = doc.subscribe_peer_id_change(Box::new(move |peer_id, _counter| {
received_peer_id_clone.store(peer_id, Ordering::SeqCst);
}));

View file

@ -1,3 +1,4 @@
#![allow(deprecated)]
use loro::{LoroDoc, LoroError, ToJson};
use serde_json::json;