refactor: rename gc snapshot to trimmed snapshot

This commit is contained in:
Zixuan Chen 2024-10-01 16:39:43 +08:00
parent c8a60977c1
commit 5ce0b8bd7e
No known key found for this signature in database
18 changed files with 208 additions and 194 deletions

View file

@ -52,8 +52,11 @@ pub fn main() {
let snapshot = doc.export(loro::ExportMode::Snapshot);
println!("Snapshot Size {}", ByteSize(snapshot.len()));
println!("mem: {}", get_mem_usage());
let gc_snapshot = doc.export(loro::ExportMode::gc_snapshot(&doc.oplog_frontiers()));
println!("GC Shallow Snapshot Size {}", ByteSize(gc_snapshot.len()));
let trimmed_snapshot = doc.export(loro::ExportMode::trimmed_snapshot(&doc.oplog_frontiers()));
println!(
"GC Shallow Snapshot Size {}",
ByteSize(trimmed_snapshot.len())
);
println!("mem: {}", get_mem_usage());
examples::utils::bench_fast_snapshot(&doc);

View file

@ -85,7 +85,7 @@ pub fn bench_fast_snapshot(doc: &LoroDoc) {
{
println!("======== New snapshot mode with GC =========");
let start = Instant::now();
let snapshot = doc.export(loro::ExportMode::gc_snapshot(&doc.oplog_frontiers()));
let snapshot = doc.export(loro::ExportMode::trimmed_snapshot(&doc.oplog_frontiers()));
let elapsed = start.elapsed();
println!("Fast Snapshot size: {}", ByteSize(snapshot.len()));
println!("Export fast snapshot time: {:?}", elapsed);

View file

@ -364,7 +364,7 @@ pub fn test_multi_sites_with_gc(
ensure_cov::notify_cov("fuzz_gc");
let mut fuzzer = CRDTFuzzer::new(site_num, fuzz_targets);
let mut applied = Vec::new();
let target_gc_index = actions.len() / 2;
let target_trimmed_index = actions.len() / 2;
for (i, action) in actions.iter_mut().enumerate() {
fuzzer.pre_process(action);
info_span!("ApplyAction", ?action).in_scope(|| {
@ -374,15 +374,15 @@ pub fn test_multi_sites_with_gc(
fuzzer.apply_action(action);
});
if i == target_gc_index {
if i == target_trimmed_index {
info_span!("GC 1 => 0").in_scope(|| {
fuzzer.actors[1].loro.attach();
let f = fuzzer.actors[1].loro.oplog_frontiers();
if !f.is_empty() {
ensure_cov::notify_cov("export_gc_snapshot");
ensure_cov::notify_cov("export_trimmed_snapshot");
let bytes = fuzzer.actors[1]
.loro
.export(loro::ExportMode::gc_snapshot(&f));
.export(loro::ExportMode::trimmed_snapshot(&f));
fuzzer.actors[0].loro.import(&bytes).unwrap();
}
})
@ -510,9 +510,9 @@ pub fn test_multi_sites_with_gc(
if COUNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed) % 1_000 == 0 {
let must_meet = [
"fuzz_gc",
"export_gc_snapshot",
"gc_snapshot::need_calc",
"gc_snapshot::dont_need_calc",
"export_trimmed_snapshot",
"trimmed_snapshot::need_calc",
"trimmed_snapshot::dont_need_calc",
"loro_internal::history_cache::find_text_chunks_in",
"loro_internal::history_cache::find_list_chunks_in",
"loro_internal::import",

View file

@ -9410,7 +9410,7 @@ fn fast_snapshot_5() {
}
#[test]
fn gc_fuzz() {
fn trimmed_fuzz() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9431,7 +9431,7 @@ fn gc_fuzz() {
}
#[test]
fn gc_fuzz_1() {
fn trimmed_fuzz_1() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9455,7 +9455,7 @@ fn gc_fuzz_1() {
}
#[test]
fn gc_fuzz_2() {
fn trimmed_fuzz_2() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9479,7 +9479,7 @@ fn gc_fuzz_2() {
}
#[test]
fn gc_fuzz_3() {
fn trimmed_fuzz_3() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9492,7 +9492,7 @@ fn gc_fuzz_3() {
}
#[test]
fn gc_fuzz_4() {
fn trimmed_fuzz_4() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9531,7 +9531,7 @@ fn gc_fuzz_4() {
}
#[test]
fn gc_fuzz_5() {
fn trimmed_fuzz_5() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9556,7 +9556,7 @@ fn gc_fuzz_5() {
}
#[test]
fn gc_fuzz_6() {
fn trimmed_fuzz_6() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9606,7 +9606,7 @@ fn gc_fuzz_6() {
}
#[test]
fn gc_fuzz_8() {
fn trimmed_fuzz_8() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9656,7 +9656,7 @@ fn gc_fuzz_8() {
}
#[test]
fn gc_fuzz_7() {
fn trimmed_fuzz_7() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9680,7 +9680,7 @@ fn gc_fuzz_7() {
}
#[test]
fn gc_fuzz_9() {
fn trimmed_fuzz_9() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9738,7 +9738,7 @@ fn gc_fuzz_9() {
}
#[test]
fn gc_fuzz_10() {
fn trimmed_fuzz_10() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9792,7 +9792,7 @@ fn gc_fuzz_10() {
}
#[test]
fn gc_arb_test() {
fn trimmed_arb_test() {
fn prop(u: &mut Unstructured<'_>, site_num: u8) -> arbitrary::Result<()> {
let xs = u.arbitrary::<Vec<Action>>()?;
if let Err(e) = std::panic::catch_unwind(|| {
@ -9810,7 +9810,7 @@ fn gc_arb_test() {
}
#[test]
fn gc_fuzz_11() {
fn trimmed_fuzz_11() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9878,7 +9878,7 @@ fn gc_fuzz_11() {
}
#[test]
fn gc_fuzz_12() {
fn trimmed_fuzz_12() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -9945,7 +9945,7 @@ fn gc_fuzz_12() {
}
#[test]
fn gc_fuzz_13() {
fn trimmed_fuzz_13() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -10004,7 +10004,7 @@ fn gc_fuzz_13() {
}
#[test]
fn gc_fuzz_14() {
fn trimmed_fuzz_14() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -10053,7 +10053,7 @@ fn gc_fuzz_14() {
}
#[test]
fn gc_fuzz_15() {
fn trimmed_fuzz_15() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -10130,7 +10130,7 @@ fn gc_fuzz_15() {
}
#[test]
fn gc_fuzz_16() {
fn trimmed_fuzz_16() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -10193,7 +10193,7 @@ fn gc_fuzz_16() {
}
#[test]
fn gc_fuzz_17() {
fn trimmed_fuzz_17() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -10273,7 +10273,7 @@ fn gc_fuzz_17() {
}
#[test]
fn gc_fuzz_18() {
fn trimmed_fuzz_18() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -10367,7 +10367,7 @@ fn gc_fuzz_18() {
}
#[test]
fn gc_fuzz_19() {
fn trimmed_fuzz_19() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -10568,7 +10568,7 @@ fn gc_fuzz_19() {
}
#[test]
fn gc_fuzz_20() {
fn trimmed_fuzz_20() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -10791,7 +10791,7 @@ fn gc_fuzz_20() {
}
#[test]
fn gc_fuzz_21() {
fn trimmed_fuzz_21() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -10955,7 +10955,7 @@ fn gc_fuzz_21() {
}
#[test]
fn gc_fuzz_22() {
fn trimmed_fuzz_22() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -11049,7 +11049,7 @@ fn gc_fuzz_22() {
}
#[test]
fn gc_fuzz_24() {
fn trimmed_fuzz_24() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -11255,7 +11255,7 @@ fn detached_editing_failed_case_0() {
}
#[test]
fn gc_fuzz_23() {
fn trimmed_fuzz_23() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -11373,7 +11373,7 @@ fn gc_fuzz_23() {
}
#[test]
fn gc_fuzz_25() {
fn trimmed_fuzz_25() {
test_multi_sites_with_gc(
5,
vec![FuzzTarget::All],
@ -11409,7 +11409,7 @@ fn gc_fuzz_25() {
}
#[test]
fn gc_fuzz_unknown() {
fn trimmed_fuzz_unknown() {
test_multi_sites(
5,
vec![FuzzTarget::All],

View file

@ -490,9 +490,11 @@ impl LoroDoc {
})
}
pub fn export_gc_snapshot(&self, frontiers: &Frontiers) -> Vec<u8> {
pub fn export_trimmed_snapshot(&self, frontiers: &Frontiers) -> Vec<u8> {
self.doc
.export(loro::ExportMode::GcSnapshot(Cow::Owned(frontiers.into())))
.export(loro::ExportMode::TrimmedSnapshot(Cow::Owned(
frontiers.into(),
)))
}
pub fn export_state_only(&self, frontiers: Option<Arc<Frontiers>>) -> Vec<u8> {
@ -703,7 +705,7 @@ pub enum ExportMode {
Snapshot,
Updates { from: VersionVector },
UpdatesInRange { spans: Vec<IdSpan> },
GcSnapshot { frontiers: Frontiers },
TrimmedSnapshot { frontiers: Frontiers },
StateOnly { frontiers: Option<Frontiers> },
}
@ -717,8 +719,8 @@ impl From<ExportMode> for loro::ExportMode<'_> {
ExportMode::UpdatesInRange { spans } => loro::ExportMode::UpdatesInRange {
spans: Cow::Owned(spans),
},
ExportMode::GcSnapshot { frontiers } => {
loro::ExportMode::GcSnapshot(Cow::Owned(frontiers.into()))
ExportMode::TrimmedSnapshot { frontiers } => {
loro::ExportMode::TrimmedSnapshot(Cow::Owned(frontiers.into()))
}
ExportMode::StateOnly { frontiers } => {
loro::ExportMode::StateOnly(frontiers.map(|x| Cow::Owned(x.into())))

View file

@ -7,9 +7,9 @@ use std::borrow::Cow;
pub(crate) use encode_reordered::{
decode_op, encode_op, get_op_prop, EncodedDeleteStartId, IterableEncodedDeleteStartId,
};
mod fast_snapshot;
mod gc;
pub(crate) mod json_schema;
mod outdated_fast_snapshot;
mod trimmed_snapshot;
use crate::op::OpWithId;
use crate::version::Frontiers;
@ -27,7 +27,7 @@ pub enum ExportMode<'a> {
Snapshot,
Updates { from: Cow<'a, VersionVector> },
UpdatesInRange { spans: Cow<'a, [IdSpan]> },
GcSnapshot(Cow<'a, Frontiers>),
TrimmedSnapshot(Cow<'a, Frontiers>),
StateOnly(Option<Cow<'a, Frontiers>>),
SnapshotAt { version: Cow<'a, Frontiers> },
}
@ -61,17 +61,17 @@ impl<'a> ExportMode<'a> {
}
}
pub fn gc_snapshot(frontiers: &'a Frontiers) -> Self {
ExportMode::GcSnapshot(Cow::Borrowed(frontiers))
pub fn trimmed_snapshot(frontiers: &'a Frontiers) -> Self {
ExportMode::TrimmedSnapshot(Cow::Borrowed(frontiers))
}
pub fn gc_snapshot_owned(frontiers: Frontiers) -> Self {
ExportMode::GcSnapshot(Cow::Owned(frontiers))
pub fn trimmed_snapshot_owned(frontiers: Frontiers) -> Self {
ExportMode::TrimmedSnapshot(Cow::Owned(frontiers))
}
pub fn gc_snapshot_from_id(id: ID) -> Self {
pub fn trimmed_snapshot_from_id(id: ID) -> Self {
let frontiers = Frontiers::from_id(id);
ExportMode::GcSnapshot(Cow::Owned(frontiers))
ExportMode::TrimmedSnapshot(Cow::Owned(frontiers))
}
pub fn state_only(frontiers: Option<&'a Frontiers>) -> Self {
@ -250,8 +250,10 @@ pub(crate) fn decode_oplog(
EncodeMode::OutdatedRle | EncodeMode::OutdatedSnapshot => {
encode_reordered::decode_updates(oplog, body)
}
EncodeMode::FastSnapshot => fast_snapshot::decode_oplog(oplog, body),
EncodeMode::FastUpdates => fast_snapshot::decode_updates(oplog, body.to_vec().into()),
EncodeMode::FastSnapshot => outdated_fast_snapshot::decode_oplog(oplog, body),
EncodeMode::FastUpdates => {
outdated_fast_snapshot::decode_updates(oplog, body.to_vec().into())
}
EncodeMode::Auto => unreachable!(),
}
}
@ -340,37 +342,37 @@ pub(crate) fn export_snapshot(doc: &LoroDoc) -> Vec<u8> {
pub(crate) fn export_fast_snapshot(doc: &LoroDoc) -> Vec<u8> {
encode_with(EncodeMode::FastSnapshot, &mut |ans| {
fast_snapshot::encode_snapshot(doc, ans);
outdated_fast_snapshot::encode_snapshot(doc, ans);
})
}
pub(crate) fn export_fast_snapshot_at(doc: &LoroDoc, frontiers: &Frontiers) -> Vec<u8> {
encode_with(EncodeMode::FastSnapshot, &mut |ans| {
fast_snapshot::encode_snapshot_at(doc, frontiers, ans).unwrap();
outdated_fast_snapshot::encode_snapshot_at(doc, frontiers, ans).unwrap();
})
}
pub(crate) fn export_fast_updates(doc: &LoroDoc, vv: &VersionVector) -> Vec<u8> {
encode_with(EncodeMode::FastUpdates, &mut |ans| {
fast_snapshot::encode_updates(doc, vv, ans);
outdated_fast_snapshot::encode_updates(doc, vv, ans);
})
}
pub(crate) fn export_fast_updates_in_range(oplog: &OpLog, spans: &[IdSpan]) -> Vec<u8> {
encode_with(EncodeMode::FastUpdates, &mut |ans| {
fast_snapshot::encode_updates_in_range(oplog, spans, ans);
outdated_fast_snapshot::encode_updates_in_range(oplog, spans, ans);
})
}
pub(crate) fn export_gc_snapshot(doc: &LoroDoc, f: &Frontiers) -> Vec<u8> {
pub(crate) fn export_trimmed_snapshot(doc: &LoroDoc, f: &Frontiers) -> Vec<u8> {
encode_with(EncodeMode::FastSnapshot, &mut |ans| {
gc::export_gc_snapshot(doc, f, ans).unwrap();
trimmed_snapshot::export_trimmed_snapshot(doc, f, ans).unwrap();
})
}
pub(crate) fn export_state_only_snapshot(doc: &LoroDoc, f: &Frontiers) -> Vec<u8> {
encode_with(EncodeMode::FastSnapshot, &mut |ans| {
gc::export_state_only_snapshot(doc, f, ans).unwrap();
trimmed_snapshot::export_state_only_snapshot(doc, f, ans).unwrap();
})
}
@ -399,7 +401,9 @@ pub(crate) fn decode_snapshot(
) -> Result<(), LoroError> {
match mode {
EncodeMode::OutdatedSnapshot => encode_reordered::decode_snapshot(doc, body),
EncodeMode::FastSnapshot => fast_snapshot::decode_snapshot(doc, body.to_vec().into()),
EncodeMode::FastSnapshot => {
outdated_fast_snapshot::decode_snapshot(doc, body.to_vec().into())
}
_ => unreachable!(),
}
}

View file

@ -15,7 +15,10 @@
//!
use std::io::{Read, Write};
use crate::{encoding::gc, oplog::ChangeStore, version::Frontiers, LoroDoc, OpLog, VersionVector};
use crate::{
encoding::trimmed_snapshot, oplog::ChangeStore, version::Frontiers, LoroDoc, OpLog,
VersionVector,
};
use bytes::{Buf, Bytes};
use loro_common::{IdSpan, LoroEncodeError, LoroError, LoroResult};
use tracing::trace;
@ -26,7 +29,7 @@ pub(crate) const EMPTY_MARK: &[u8] = b"E";
pub(super) struct Snapshot {
pub oplog_bytes: Bytes,
pub state_bytes: Option<Bytes>,
pub gc_bytes: Bytes,
pub trimmed_bytes: Bytes,
}
pub(super) fn _encode_snapshot<W: Write>(s: Snapshot, w: &mut W) {
@ -39,9 +42,9 @@ pub(super) fn _encode_snapshot<W: Write>(s: Snapshot, w: &mut W) {
w.write_all(&(state_bytes.len() as u32).to_le_bytes())
.unwrap();
w.write_all(&state_bytes).unwrap();
w.write_all(&(s.gc_bytes.len() as u32).to_le_bytes())
w.write_all(&(s.trimmed_bytes.len() as u32).to_le_bytes())
.unwrap();
w.write_all(&s.gc_bytes).unwrap();
w.write_all(&s.trimmed_bytes).unwrap();
}
pub(super) fn _decode_snapshot_bytes(bytes: Bytes) -> LoroResult<Snapshot> {
@ -55,12 +58,12 @@ pub(super) fn _decode_snapshot_bytes(bytes: Bytes) -> LoroResult<Snapshot> {
} else {
Some(state_bytes)
};
let gc_bytes_len = read_u32_le(&mut r) as usize;
let gc_bytes = r.get_mut().copy_to_bytes(gc_bytes_len);
let trimmed_bytes_len = read_u32_le(&mut r) as usize;
let trimmed_bytes = r.get_mut().copy_to_bytes(trimmed_bytes_len);
Ok(Snapshot {
oplog_bytes,
state_bytes,
gc_bytes,
trimmed_bytes: trimmed_bytes,
})
}
@ -98,12 +101,12 @@ pub(crate) fn decode_snapshot(doc: &LoroDoc, bytes: Bytes) -> LoroResult<()> {
let Snapshot {
oplog_bytes,
state_bytes,
gc_bytes,
trimmed_bytes,
} = _decode_snapshot_bytes(bytes)?;
oplog.decode_change_store(oplog_bytes)?;
let need_calc = state_bytes.is_none();
let state_frontiers;
if gc_bytes.is_empty() {
if trimmed_bytes.is_empty() {
ensure_cov::notify_cov("loro_internal::import::snapshot::normal");
if let Some(bytes) = state_bytes {
state.store.decode(bytes)?;
@ -111,23 +114,24 @@ pub(crate) fn decode_snapshot(doc: &LoroDoc, bytes: Bytes) -> LoroResult<()> {
state_frontiers = oplog.frontiers().clone();
} else {
ensure_cov::notify_cov("loro_internal::import::snapshot::gc");
let gc_state_frontiers = state
.store
.decode_gc(gc_bytes.clone(), oplog.dag().trimmed_frontiers().clone())?;
let trimmed_state_frontiers = state.store.decode_gc(
trimmed_bytes.clone(),
oplog.dag().trimmed_frontiers().clone(),
)?;
state
.store
.decode_state_by_two_bytes(gc_bytes, state_bytes.unwrap_or_default())?;
.decode_state_by_two_bytes(trimmed_bytes, state_bytes.unwrap_or_default())?;
let gc_store = state.gc_store().cloned();
let trimmed_store = state.trimmed_store().cloned();
oplog.with_history_cache(|h| {
h.set_gc_store(gc_store);
h.set_trimmed_store(trimmed_store);
});
if need_calc {
ensure_cov::notify_cov("gc_snapshot::need_calc");
state_frontiers = gc_state_frontiers.unwrap();
ensure_cov::notify_cov("trimmed_snapshot::need_calc");
state_frontiers = trimmed_state_frontiers.unwrap();
} else {
ensure_cov::notify_cov("gc_snapshot::dont_need_calc");
ensure_cov::notify_cov("trimmed_snapshot::dont_need_calc");
state_frontiers = oplog.frontiers().clone();
}
}
@ -163,13 +167,13 @@ pub(crate) fn encode_snapshot<W: std::io::Write>(doc: &LoroDoc, w: &mut W) {
let was_detached = doc.is_detached();
let mut state = doc.app_state().try_lock().unwrap();
let oplog = doc.oplog().try_lock().unwrap();
let is_gc = state.store.gc_store().is_some();
let is_gc = state.store.trimmed_store().is_some();
if is_gc {
// TODO: PERF: this can be optimized by reusing the bytes of gc store
let f = oplog.trimmed_frontiers().clone();
drop(oplog);
drop(state);
gc::export_gc_snapshot(doc, &f, w).unwrap();
trimmed_snapshot::export_trimmed_snapshot(doc, &f, w).unwrap();
return;
}
assert!(!state.is_in_txn());
@ -195,7 +199,7 @@ pub(crate) fn encode_snapshot<W: std::io::Write>(doc: &LoroDoc, w: &mut W) {
Snapshot {
oplog_bytes,
state_bytes: Some(state_bytes),
gc_bytes: Bytes::new(),
trimmed_bytes: Bytes::new(),
},
w,
);
@ -217,7 +221,7 @@ pub(crate) fn encode_snapshot_at<W: std::io::Write>(
{
let mut state = doc.app_state().try_lock().unwrap();
let oplog = doc.oplog().try_lock().unwrap();
let is_gc = state.store.gc_store().is_some();
let is_gc = state.store.trimmed_store().is_some();
if is_gc {
unimplemented!()
}
@ -243,7 +247,7 @@ pub(crate) fn encode_snapshot_at<W: std::io::Write>(
Snapshot {
oplog_bytes,
state_bytes: Some(state_bytes),
gc_bytes: Bytes::new(),
trimmed_bytes: Bytes::new(),
},
w,
);

View file

@ -7,7 +7,7 @@ use tracing::{debug, trace};
use crate::{
container::list::list_op::InnerListOp,
dag::{Dag, DagUtils},
encoding::fast_snapshot::{Snapshot, _encode_snapshot},
encoding::outdated_fast_snapshot::{Snapshot, _encode_snapshot},
state::container_store::FRONTIERS_KEY,
version::Frontiers,
LoroDoc,
@ -19,13 +19,13 @@ const MAX_OPS_NUM_TO_ENCODE_WITHOUT_LATEST_STATE: usize = 16;
const MAX_OPS_NUM_TO_ENCODE_WITHOUT_LATEST_STATE: usize = 256;
#[tracing::instrument(skip_all)]
pub(crate) fn export_gc_snapshot<W: std::io::Write>(
pub(crate) fn export_trimmed_snapshot<W: std::io::Write>(
doc: &LoroDoc,
start_from: &Frontiers,
w: &mut W,
) -> LoroResult<Frontiers> {
let oplog = doc.oplog().try_lock().unwrap();
let start_from = calc_gc_doc_start(&oplog, start_from);
let start_from = calc_trimmed_doc_start(&oplog, start_from);
let mut start_vv = oplog.dag().frontiers_to_vv(&start_from).unwrap();
for id in start_from.iter() {
// we need to include the ops in start_from, this can make things easier
@ -68,7 +68,7 @@ pub(crate) fn export_gc_snapshot<W: std::io::Write>(
let mut alive_c_bytes: BTreeSet<Vec<u8>> =
alive_containers.iter().map(|x| x.to_bytes()).collect();
state.store.flush();
let gc_state_kv = state.store.get_kv().clone();
let trimmed_state_kv = state.store.get_kv().clone();
drop(state);
doc.checkout_without_emitting(&latest_frontiers).unwrap();
let state_bytes = if ops_num > MAX_OPS_NUM_TO_ENCODE_WITHOUT_LATEST_STATE {
@ -89,21 +89,21 @@ pub(crate) fn export_gc_snapshot<W: std::io::Write>(
}
let new_kv = state.store.get_kv().clone();
new_kv.remove_same(&gc_state_kv);
new_kv.remove_same(&trimmed_state_kv);
new_kv.retain_keys(&alive_c_bytes);
Some(new_kv.export())
} else {
None
};
gc_state_kv.retain_keys(&alive_c_bytes);
gc_state_kv.insert(FRONTIERS_KEY, start_from.encode().into());
let gc_state_bytes = gc_state_kv.export();
trimmed_state_kv.retain_keys(&alive_c_bytes);
trimmed_state_kv.insert(FRONTIERS_KEY, start_from.encode().into());
let trimmed_state_bytes = trimmed_state_kv.export();
let snapshot = Snapshot {
oplog_bytes,
state_bytes,
gc_bytes: gc_state_bytes,
trimmed_bytes: trimmed_state_bytes,
};
_encode_snapshot(snapshot, w);
@ -125,7 +125,7 @@ pub(crate) fn export_state_only_snapshot<W: std::io::Write>(
w: &mut W,
) -> LoroResult<Frontiers> {
let oplog = doc.oplog().try_lock().unwrap();
let start_from = calc_gc_doc_start(&oplog, start_from);
let start_from = calc_trimmed_doc_start(&oplog, start_from);
let mut start_vv = oplog.dag().frontiers_to_vv(&start_from).unwrap();
for id in start_from.iter() {
// we need to include the ops in start_from, this can make things easier
@ -152,18 +152,18 @@ pub(crate) fn export_state_only_snapshot<W: std::io::Write>(
let alive_containers = state.ensure_all_alive_containers();
let alive_c_bytes: BTreeSet<Vec<u8>> = alive_containers.iter().map(|x| x.to_bytes()).collect();
state.store.flush();
let gc_state_kv = state.store.get_kv().clone();
let trimmed_state_kv = state.store.get_kv().clone();
drop(state);
let state_bytes = None;
gc_state_kv.retain_keys(&alive_c_bytes);
gc_state_kv.insert(FRONTIERS_KEY, start_from.encode().into());
let gc_state_bytes = gc_state_kv.export();
// println!("gc_state_bytes.len = {:?}", gc_state_bytes.len());
trimmed_state_kv.retain_keys(&alive_c_bytes);
trimmed_state_kv.insert(FRONTIERS_KEY, start_from.encode().into());
let trimmed_state_bytes = trimmed_state_kv.export();
// println!("trimmed_state_bytes.len = {:?}", trimmed_state_bytes.len());
// println!("oplog_bytes.len = {:?}", oplog_bytes.len());
let snapshot = Snapshot {
oplog_bytes,
state_bytes,
gc_bytes: gc_state_bytes,
trimmed_bytes: trimmed_state_bytes,
};
_encode_snapshot(snapshot, w);
@ -183,7 +183,7 @@ pub(crate) fn export_state_only_snapshot<W: std::io::Write>(
///
/// It should be the LCA of the user given version and the latest version.
/// Otherwise, users cannot replay the history from the initial version till the latest version.
fn calc_gc_doc_start(oplog: &crate::OpLog, frontiers: &Frontiers) -> Frontiers {
fn calc_trimmed_doc_start(oplog: &crate::OpLog, frontiers: &Frontiers) -> Frontiers {
// start is the real start frontiers
let (mut start, _) = oplog
.dag()

View file

@ -232,7 +232,7 @@ impl ContainerHistoryCache {
if for_checkout {
let c = self.for_checkout.as_mut().unwrap();
for (k, v) in m.iter() {
c.map.record_gc_state_entry(*idx, k, v);
c.map.record_trimmed_state_entry(*idx, k, v);
}
}
}
@ -241,7 +241,7 @@ impl ContainerHistoryCache {
if for_checkout {
let c = self.for_checkout.as_mut().unwrap();
let item = l.get_list_item(elem.pos).unwrap();
c.movable_list.record_gc_state(
c.movable_list.record_trimmed_state(
item.id,
idlp.peer,
idlp.lamport.into(),
@ -256,7 +256,7 @@ impl ContainerHistoryCache {
let tree = c.entry(*idx).or_insert_with(|| {
HistoryCacheForImporting::Tree(Default::default())
});
tree.as_tree_mut().unwrap().record_gc_state(
tree.as_tree_mut().unwrap().record_trimmed_state(
t.tree_nodes()
.into_iter()
.map(|node| MoveLamportAndID {
@ -308,8 +308,8 @@ impl ContainerHistoryCache {
self.for_checkout = None;
}
pub(crate) fn set_gc_store(&mut self, gc_store: Option<Arc<GcStore>>) {
self.gc = gc_store;
pub(crate) fn set_trimmed_store(&mut self, trimmed_store: Option<Arc<GcStore>>) {
self.gc = trimmed_store;
}
pub(crate) fn find_text_chunks_in(
@ -517,7 +517,7 @@ impl HistoryCacheTrait for MapHistoryCache {
}
impl MapHistoryCache {
fn record_gc_state_entry(&mut self, idx: ContainerIdx, k: &InternalString, v: &MapValue) {
fn record_trimmed_state_entry(&mut self, idx: ContainerIdx, k: &InternalString, v: &MapValue) {
let key_idx = self.keys.register(k);
self.map.insert(MapHistoryCacheEntry {
container: idx,
@ -638,7 +638,7 @@ impl TreeOpGroup {
&self.tree_for_diff
}
pub(crate) fn record_gc_state(&mut self, nodes: Vec<MoveLamportAndID>) {
pub(crate) fn record_trimmed_state(&mut self, nodes: Vec<MoveLamportAndID>) {
let mut tree = self.tree_for_diff.try_lock().unwrap();
for node in nodes.iter() {
self.ops.insert(
@ -740,7 +740,7 @@ impl HistoryCacheTrait for MovableListHistoryCache {
}
impl MovableListHistoryCache {
pub(crate) fn record_gc_state(
pub(crate) fn record_trimmed_state(
&mut self,
id: IdFull,
elem_peer: PeerID,

View file

@ -33,9 +33,9 @@ use crate::{
diff_calc::DiffCalculator,
encoding::{
decode_snapshot, export_fast_snapshot, export_fast_snapshot_at, export_fast_updates,
export_fast_updates_in_range, export_gc_snapshot, export_snapshot,
export_state_only_snapshot, json_schema::json::JsonSchema, parse_header_and_body,
EncodeMode, ParsedHeaderAndBody,
export_fast_updates_in_range, export_snapshot, export_state_only_snapshot,
export_trimmed_snapshot, json_schema::json::JsonSchema, parse_header_and_body, EncodeMode,
ParsedHeaderAndBody,
},
event::{str_to_path, EventTriggerKind, Index, InternalDocDiff},
handler::{Handler, MovableListHandler, TextHandler, TreeHandler, ValueOrHandler},
@ -105,7 +105,7 @@ impl LoroDoc {
Arc::downgrade(&txn),
config.clone(),
);
let gc = new_state.try_lock().unwrap().gc_store().cloned();
let gc = new_state.try_lock().unwrap().trimmed_store().cloned();
let doc = LoroDoc {
oplog: Arc::new(Mutex::new(self.oplog().try_lock().unwrap().fork(
arena.clone(),
@ -1474,7 +1474,7 @@ impl LoroDoc {
ExportMode::UpdatesInRange { spans } => {
export_fast_updates_in_range(&self.oplog.try_lock().unwrap(), spans.as_ref())
}
ExportMode::GcSnapshot(f) => export_gc_snapshot(self, &f),
ExportMode::TrimmedSnapshot(f) => export_trimmed_snapshot(self, &f),
ExportMode::StateOnly(f) => match f {
Some(f) => export_state_only_snapshot(self, &f),
None => export_state_only_snapshot(self, &self.oplog_frontiers()),

View file

@ -1548,8 +1548,8 @@ impl DocState {
Some(value)
}
pub(crate) fn gc_store(&self) -> Option<&Arc<GcStore>> {
self.store.gc_store()
pub(crate) fn trimmed_store(&self) -> Option<&Arc<GcStore>> {
self.store.trimmed_store()
}
}

View file

@ -38,7 +38,7 @@ mod inner_store;
pub(crate) struct ContainerStore {
arena: SharedArena,
store: InnerStore,
gc_store: Option<Arc<GcStore>>,
trimmed_store: Option<Arc<GcStore>>,
conf: Configure,
peer: Arc<AtomicU64>,
}
@ -73,13 +73,13 @@ impl ContainerStore {
store: InnerStore::new(arena.clone()),
arena,
conf,
gc_store: None,
trimmed_store: None,
peer,
}
}
pub fn can_import_snapshot(&self) -> bool {
if self.gc_store.is_some() {
if self.trimmed_store.is_some() {
return false;
}
@ -99,8 +99,8 @@ impl ContainerStore {
.map(|x| x.get_state(idx, ctx!(self)))
}
pub fn gc_store(&self) -> Option<&Arc<GcStore>> {
self.gc_store.as_ref()
pub fn trimmed_store(&self) -> Option<&Arc<GcStore>> {
self.trimmed_store.as_ref()
}
pub fn get_value(&mut self, idx: ContainerIdx) -> Option<LoroValue> {
@ -118,7 +118,7 @@ impl ContainerStore {
}
pub fn trimmed_frontiers(&self) -> Option<&Frontiers> {
self.gc_store.as_ref().map(|x| &x.trimmed_frontiers)
self.trimmed_store.as_ref().map(|x| &x.trimmed_frontiers)
}
pub(crate) fn decode(&mut self, bytes: Bytes) -> LoroResult<Option<Frontiers>> {
@ -127,13 +127,13 @@ impl ContainerStore {
pub(crate) fn decode_gc(
&mut self,
gc_bytes: Bytes,
trimmed_bytes: Bytes,
start_frontiers: Frontiers,
) -> LoroResult<Option<Frontiers>> {
assert!(self.gc_store.is_none());
assert!(self.trimmed_store.is_none());
let mut inner = InnerStore::new(self.arena.clone());
let f = inner.decode(gc_bytes)?;
self.gc_store = Some(Arc::new(GcStore {
let f = inner.decode(trimmed_bytes)?;
self.trimmed_store = Some(Arc::new(GcStore {
trimmed_frontiers: start_frontiers,
store: Mutex::new(inner),
}));
@ -142,10 +142,11 @@ impl ContainerStore {
pub(crate) fn decode_state_by_two_bytes(
&mut self,
gc_bytes: Bytes,
trimmed_bytes: Bytes,
state_bytes: Bytes,
) -> LoroResult<()> {
self.store.decode_twice(gc_bytes.clone(), state_bytes)?;
self.store
.decode_twice(trimmed_bytes.clone(), state_bytes)?;
Ok(())
}
@ -236,7 +237,7 @@ impl ContainerStore {
arena,
conf: config,
peer,
gc_store: None,
trimmed_store: None,
}
}

View file

@ -1018,7 +1018,7 @@ impl LoroDoc {
/// - `{ mode: "snapshot" }`: Export a full snapshot of the document.
/// - `{ mode: "update", start_vv: VersionVector }`: Export updates from the given version vector.
/// - `{ mode: "updates-in-range", spans: { id: ID, len: number }[] }`: Export updates within the specified ID spans.
/// - `{ mode: "gc-snapshot", frontiers: Frontiers }`: Export a garbage-collected snapshot up to the given frontiers.
/// - `{ mode: "trimmed-snapshot", frontiers: Frontiers }`: Export a garbage-collected snapshot up to the given frontiers.
///
/// @returns A byte array containing the exported data.
///
@ -1038,7 +1038,7 @@ impl LoroDoc {
/// const updateBytes = doc.export({ mode: "update", start_vv: vv });
///
/// // Export a garbage-collected snapshot
/// const gcBytes = doc.export({ mode: "gc-snapshot", frontiers: doc.oplogFrontiers() });
/// const gcBytes = doc.export({ mode: "trimmed-snapshot", frontiers: doc.oplogFrontiers() });
///
/// // Export updates within specific ID spans
/// const spanBytes = doc.export({
@ -4307,7 +4307,7 @@ fn js_to_export_mode(js_mode: JsExportMode) -> JsResult<ExportMode<'static>> {
Ok(ExportMode::updates_owned(start_vv.0.clone()))
}
"snapshot" => Ok(ExportMode::Snapshot),
"gc-snapshot" => {
"trimmed-snapshot" => {
let frontiers: JsValue =
js_sys::Reflect::get(&js_value, &JsValue::from_str("frontiers"))?;
let frontiers: Vec<JsID> = js_sys::try_iter(&frontiers)?
@ -4315,7 +4315,7 @@ fn js_to_export_mode(js_mode: JsExportMode) -> JsResult<ExportMode<'static>> {
.map(|res| res.map(JsID::from))
.collect::<Result<_, _>>()?;
let frontiers = ids_to_frontiers(frontiers)?;
Ok(ExportMode::gc_snapshot_owned(frontiers))
Ok(ExportMode::trimmed_snapshot_owned(frontiers))
}
"updates-in-range" => {
let spans = js_sys::Reflect::get(&js_value, &JsValue::from_str("spans"))?;
@ -4744,7 +4744,7 @@ export type ExportMode = {
} | {
mode: "snapshot",
} | {
mode: "gc-snapshot",
mode: "trimmed-snapshot",
frontiers: Frontiers,
} | {
mode: "updates-in-range",

View file

@ -15,16 +15,16 @@ fn test_gc() -> anyhow::Result<()> {
let frontiers = doc.oplog_frontiers();
gen_action(&doc, 123, 10);
doc.commit();
let gc_bytes = doc.export(loro::ExportMode::gc_snapshot(&frontiers));
let trimmed_bytes = doc.export(loro::ExportMode::trimmed_snapshot(&frontiers));
let new_doc = LoroDoc::new();
new_doc.import(&gc_bytes)?;
new_doc.import(&trimmed_bytes)?;
assert_eq!(doc.get_deep_value(), new_doc.get_deep_value());
Ok(())
}
#[test]
fn test_gc_1() -> anyhow::Result<()> {
fn test_trimmed_1() -> anyhow::Result<()> {
let doc = LoroDoc::new();
doc.set_peer_id(1)?;
doc.get_text("text").insert(0, "1")?;
@ -34,10 +34,10 @@ fn test_gc_1() -> anyhow::Result<()> {
let frontiers = doc.oplog_frontiers();
doc.get_text("text").insert(3, "4")?;
doc.commit();
let gc_bytes = doc.export(loro::ExportMode::gc_snapshot(&frontiers));
let trimmed_bytes = doc.export(loro::ExportMode::trimmed_snapshot(&frontiers));
let new_doc = LoroDoc::new();
new_doc.import(&gc_bytes)?;
new_doc.import(&trimmed_bytes)?;
assert_eq!(doc.get_deep_value(), new_doc.get_deep_value());
Ok(())
}
@ -53,7 +53,7 @@ fn test_checkout_to_text_that_were_created_before_gc() -> anyhow::Result<()> {
doc.commit();
let frontiers = doc.oplog_frontiers();
doc.get_text("text").delete(0, 3)?;
let bytes = doc.export(loro::ExportMode::gc_snapshot(&frontiers));
let bytes = doc.export(loro::ExportMode::trimmed_snapshot(&frontiers));
let new_doc = LoroDoc::new();
new_doc.import(&bytes)?;
new_doc.checkout(&frontiers)?;
@ -72,7 +72,7 @@ fn test_checkout_to_list_that_were_created_before_gc() -> anyhow::Result<()> {
doc.commit();
let frontiers = doc.oplog_frontiers();
doc.get_list("list").delete(0, 3)?;
let bytes = doc.export(loro::ExportMode::gc_snapshot(&frontiers));
let bytes = doc.export(loro::ExportMode::trimmed_snapshot(&frontiers));
let new_doc = LoroDoc::new();
new_doc.import(&bytes)?;
new_doc.checkout(&frontiers)?;
@ -94,7 +94,7 @@ fn test_checkout_to_movable_list_that_were_created_before_gc() -> anyhow::Result
doc.commit();
let frontiers = doc.oplog_frontiers();
doc.get_movable_list("list").delete(0, 3)?;
let bytes = doc.export(loro::ExportMode::gc_snapshot(&frontiers));
let bytes = doc.export(loro::ExportMode::trimmed_snapshot(&frontiers));
let new_doc = LoroDoc::new();
new_doc.import(&bytes)?;
new_doc.checkout(&frontiers)?;
@ -106,12 +106,12 @@ fn test_checkout_to_movable_list_that_were_created_before_gc() -> anyhow::Result
}
#[test]
fn gc_on_the_given_version_when_feasible() -> anyhow::Result<()> {
fn trimmed_on_the_given_version_when_feasible() -> anyhow::Result<()> {
let doc = LoroDoc::new();
doc.set_peer_id(1)?;
gen_action(&doc, 123, 64);
doc.commit();
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(1, 31)));
let bytes = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(1, 31)));
let new_doc = LoroDoc::new();
new_doc.import(&bytes)?;
assert_eq!(new_doc.trimmed_vv().get(&1).copied().unwrap(), 31);
@ -131,8 +131,8 @@ fn export_snapshot_on_a_trimmed_doc() -> anyhow::Result<()> {
gen_action(&doc, 123, 32);
doc.commit();
// Export using GcSnapshot mode
let bytes = doc.export(loro::ExportMode::gc_snapshot(&frontiers));
// Export using TrimmedSnapshot mode
let bytes = doc.export(loro::ExportMode::trimmed_snapshot(&frontiers));
// Import into a new document
let trimmed_doc = LoroDoc::new();
@ -162,7 +162,7 @@ fn test_richtext_gc() -> anyhow::Result<()> {
text.mark(0..2, "bold", "value")?; // 3, 4
doc.commit();
text.insert(3, "456")?; // 5, 6, 7
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(1, 3)));
let bytes = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(1, 3)));
let new_doc = LoroDoc::new();
new_doc.import(&bytes)?;
new_doc.checkout(&Frontiers::from(ID::new(1, 4)))?;
@ -183,7 +183,7 @@ fn import_updates_depend_on_trimmed_history_should_raise_error() -> anyhow::Resu
doc2.commit();
gen_action(&doc, 123, 2);
doc.commit();
let gc_snapshot = doc.export(loro::ExportMode::gc_snapshot(&doc.oplog_frontiers()));
let trimmed_snapshot = doc.export(loro::ExportMode::trimmed_snapshot(&doc.oplog_frontiers()));
doc.get_text("hello").insert(0, "world").unwrap();
doc2.import(&doc.export(loro::ExportMode::Updates {
from: Cow::Borrowed(&doc2.oplog_vv()),
@ -191,7 +191,7 @@ fn import_updates_depend_on_trimmed_history_should_raise_error() -> anyhow::Resu
.unwrap();
let new_doc = LoroDoc::new();
new_doc.import(&gc_snapshot).unwrap();
new_doc.import(&trimmed_snapshot).unwrap();
let ran = Arc::new(AtomicBool::new(false));
let ran_clone = ran.clone();
@ -214,11 +214,11 @@ fn import_updates_depend_on_trimmed_history_should_raise_error() -> anyhow::Resu
}
#[test]
fn the_vv_on_gc_doc() -> anyhow::Result<()> {
fn the_vv_on_trimmed_doc() -> anyhow::Result<()> {
let doc = LoroDoc::new();
gen_action(&doc, 0, 10);
doc.commit();
let snapshot = doc.export(loro::ExportMode::gc_snapshot(&doc.oplog_frontiers()));
let snapshot = doc.export(loro::ExportMode::trimmed_snapshot(&doc.oplog_frontiers()));
let new_doc = LoroDoc::new();
new_doc.import(&snapshot).unwrap();
assert!(!new_doc.trimmed_vv().is_empty());
@ -242,7 +242,7 @@ fn the_vv_on_gc_doc() -> anyhow::Result<()> {
}
#[test]
fn no_event_when_exporting_gc_snapshot() -> anyhow::Result<()> {
fn no_event_when_exporting_trimmed_snapshot() -> anyhow::Result<()> {
let doc = LoroDoc::new();
doc.set_peer_id(1)?;
gen_action(&doc, 0, 10);
@ -250,12 +250,12 @@ fn no_event_when_exporting_gc_snapshot() -> anyhow::Result<()> {
let _id = doc.subscribe_root(Arc::new(|_diff| {
panic!("should not emit event");
}));
let _snapshot = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(1, 3)));
let _snapshot = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(1, 3)));
Ok(())
}
#[test]
fn test_cursor_that_cannot_be_found_when_exporting_gc_snapshot() -> anyhow::Result<()> {
fn test_cursor_that_cannot_be_found_when_exporting_trimmed_snapshot() -> anyhow::Result<()> {
let doc = LoroDoc::new();
doc.set_peer_id(1)?;
doc.get_text("text").insert(0, "Hello world")?;
@ -265,7 +265,7 @@ fn test_cursor_that_cannot_be_found_when_exporting_gc_snapshot() -> anyhow::Resu
.unwrap();
doc.get_text("text").delete(0, 5)?;
doc.commit();
let snapshot = doc.export(loro::ExportMode::gc_snapshot(&doc.oplog_frontiers()));
let snapshot = doc.export(loro::ExportMode::trimmed_snapshot(&doc.oplog_frontiers()));
let new_doc = LoroDoc::new();
new_doc.import(&snapshot)?;
let result = new_doc.get_cursor_pos(&c);
@ -284,7 +284,7 @@ fn test_cursor_that_cannot_be_found_when_exporting_gc_snapshot() -> anyhow::Resu
}
#[test]
fn test_cursor_that_can_be_found_when_exporting_gc_snapshot() -> anyhow::Result<()> {
fn test_cursor_that_can_be_found_when_exporting_trimmed_snapshot() -> anyhow::Result<()> {
let doc = LoroDoc::new();
doc.set_peer_id(1)?;
doc.get_text("text").insert(0, "Hello world")?;
@ -295,7 +295,7 @@ fn test_cursor_that_can_be_found_when_exporting_gc_snapshot() -> anyhow::Result<
.unwrap();
doc.get_text("text").delete(0, 5)?;
doc.commit();
let snapshot = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(1, 10)));
let snapshot = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(1, 10)));
let new_doc = LoroDoc::new();
new_doc.import(&snapshot)?;
let result = new_doc.get_cursor_pos(&c);

View file

@ -1,12 +1,12 @@
use loro::LoroDoc;
mod detached_editing_test;
mod gc_test;
#[cfg(feature = "jsonpath")]
mod jsonpath_test;
mod snapshot_at_test;
mod undo_test;
mod text_update_test;
mod trimmed_test;
mod undo_test;
fn gen_action(doc: &LoroDoc, seed: u64, mut ops_len: usize) {
let mut rng = StdRng::seed_from_u64(seed);

View file

@ -1024,11 +1024,11 @@ fn apply_random_ops(doc: &LoroDoc, seed: u64, mut op_len: usize) {
}
#[test]
fn test_gc_sync() {
fn test_trimmed_sync() {
let doc = LoroDoc::new();
doc.set_peer_id(1).unwrap();
apply_random_ops(&doc, 123, 11);
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
let bytes = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(
doc.peer_id(),
10,
)));
@ -1052,10 +1052,10 @@ fn test_gc_sync() {
}
#[test]
fn test_gc_empty() {
fn test_trimmed_empty() {
let doc = LoroDoc::new();
apply_random_ops(&doc, 123, 11);
let bytes = doc.export(loro::ExportMode::gc_snapshot(&Frontiers::default()));
let bytes = doc.export(loro::ExportMode::trimmed_snapshot(&Frontiers::default()));
let new_doc = LoroDoc::new();
new_doc.import(&bytes).unwrap();
assert_eq!(doc.get_deep_value(), new_doc.get_deep_value());
@ -1071,10 +1071,10 @@ fn test_gc_empty() {
}
#[test]
fn test_gc_import_outdated_updates() {
fn test_trimmed_import_outdated_updates() {
let doc = LoroDoc::new();
apply_random_ops(&doc, 123, 11);
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
let bytes = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(
doc.peer_id(),
5,
)));
@ -1090,10 +1090,10 @@ fn test_gc_import_outdated_updates() {
}
#[test]
fn test_gc_import_pending_updates_that_is_outdated() {
fn test_trimmed_import_pending_updates_that_is_outdated() {
let doc = LoroDoc::new();
apply_random_ops(&doc, 123, 11);
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
let bytes = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(
doc.peer_id(),
5,
)));
@ -1113,10 +1113,10 @@ fn test_gc_import_pending_updates_that_is_outdated() {
}
#[test]
fn test_calling_exporting_snapshot_on_gc_doc() {
fn test_calling_exporting_snapshot_on_trimmed_doc() {
let doc = LoroDoc::new();
apply_random_ops(&doc, 123, 11);
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
let bytes = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(
doc.peer_id(),
5,
)));
@ -1133,7 +1133,7 @@ fn test_calling_exporting_snapshot_on_gc_doc() {
fn sync_two_trimmed_docs() {
let doc = LoroDoc::new();
apply_random_ops(&doc, 123, 11);
let bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
let bytes = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(
doc.peer_id(),
10,
)));
@ -1169,7 +1169,7 @@ fn test_map_checkout_on_trimmed_doc() {
doc.get_map("map").insert("3", 3).unwrap();
doc.get_map("map").insert("2", 4).unwrap();
let new_doc_bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
let new_doc_bytes = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(
doc.peer_id(),
1,
)));
@ -1282,7 +1282,7 @@ fn test_movable_list_checkout_on_trimmed_doc() -> LoroResult<()> {
list.mov(1, 0)?;
list.delete(0, 1)?;
list.set(0, 0)?;
let new_doc_bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
let new_doc_bytes = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(
doc.peer_id(),
2,
)));
@ -1330,7 +1330,7 @@ fn test_tree_checkout_on_trimmed_doc() -> LoroResult<()> {
let child2 = tree.create(None).unwrap();
tree.mov(child2, root)?;
let new_doc_bytes = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(
let new_doc_bytes = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(
doc.peer_id(),
1,
)));
@ -1471,17 +1471,17 @@ fn test_tree_with_other_ops_checkout_on_trimmed_doc() -> LoroResult<()> {
map.insert("0", 0)?;
map.insert("1", 1)?;
doc.commit();
let gc_frontiers = doc.oplog_frontiers();
let trimmed_frontiers = doc.oplog_frontiers();
map.insert("2", 2)?;
tree.mov(child2, child1)?;
tree.delete(child1)?;
let new_doc_bytes = doc.export(loro::ExportMode::gc_snapshot(&gc_frontiers));
let new_doc_bytes = doc.export(loro::ExportMode::trimmed_snapshot(&trimmed_frontiers));
let new_doc = LoroDoc::new();
new_doc.import(&new_doc_bytes).unwrap();
new_doc.checkout(&gc_frontiers)?;
new_doc.checkout(&trimmed_frontiers)?;
let value = new_doc.get_deep_value();
assert_eq!(
value,
@ -1529,7 +1529,7 @@ fn test_tree_with_other_ops_checkout_on_trimmed_doc() -> LoroResult<()> {
}
#[test]
fn test_gc_can_remove_unreachable_states() -> LoroResult<()> {
fn test_trimmed_can_remove_unreachable_states() -> LoroResult<()> {
let doc = LoroDoc::new();
doc.set_peer_id(1)?;
let map = doc.get_map("map");
@ -1568,7 +1568,7 @@ fn test_gc_can_remove_unreachable_states() -> LoroResult<()> {
doc.checkout_to_latest();
{
let snapshot = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(1, 3)));
let snapshot = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(1, 3)));
let new_doc = LoroDoc::new();
new_doc.import(&snapshot)?;
let a = new_doc.analyze();
@ -1581,7 +1581,7 @@ fn test_gc_can_remove_unreachable_states() -> LoroResult<()> {
}
{
let snapshot = doc.export(loro::ExportMode::gc_snapshot_from_id(ID::new(1, 4)));
let snapshot = doc.export(loro::ExportMode::trimmed_snapshot_from_id(ID::new(1, 4)));
let new_doc = LoroDoc::new();
new_doc.import(&snapshot)?;
assert_eq!(new_doc.analyze().dropped_len(), 0);

View file

@ -502,11 +502,11 @@ describe("export", () => {
expect(doc2.toJSON()).toStrictEqual({ text: "123" });
})
it("test export gc-snapshot", () => {
it("test export trimmed-snapshot", () => {
const doc = new LoroDoc();
doc.getText("text").insert(0, "123");
doc.commit();
const snapshot = doc.export({ mode: "gc-snapshot", frontiers: doc.oplogFrontiers() });
const snapshot = doc.export({ mode: "trimmed-snapshot", frontiers: doc.oplogFrontiers() });
const doc2 = new LoroDoc();
doc2.import(snapshot);
expect(doc2.toJSON()).toStrictEqual({ text: "123" });

View file

@ -1,13 +1,13 @@
import { describe, expect, expectTypeOf, it } from "vitest";
import {
Container,
getType,
isContainer,
LoroDoc,
LoroList,
LoroMap,
LoroText,
LoroTree,
Container,
getType,
isContainer,
LoroDoc,
LoroList,
LoroMap,
LoroText,
LoroTree,
} from "../src";
describe("gc", () => {
@ -17,11 +17,11 @@ describe("gc", () => {
doc.getList("list").insert(0, "A");
doc.getList("list").insert(1, "B");
doc.getList("list").insert(2, "C");
const bytes = doc.export({ mode: "gc-snapshot", frontiers: doc.oplogFrontiers() });
const bytes = doc.export({ mode: "trimmed-snapshot", frontiers: doc.oplogFrontiers() });
const newDoc = new LoroDoc();
newDoc.import(bytes);
expect(newDoc.toJSON()).toEqual(doc.toJSON());
doc.getList("list").delete(1, 1); // Delete "B"
doc.getMap("map").set("key", "value"); // Add a new key-value pair to a map
@ -38,12 +38,12 @@ describe("gc", () => {
const docB = doc.fork();
const v = docB.version();
docB.getList("list").insert(1, "C");
const updates = docB.export({mode: "update", start_vv: v});
const updates = docB.export({ mode: "update", start_vv: v });
doc.getList("list").insert(1, "B");
doc.getList("list").insert(2, "C");
doc.commit();
const bytes = doc.export({ mode: "gc-snapshot", frontiers: doc.oplogFrontiers() });
const bytes = doc.export({ mode: "trimmed-snapshot", frontiers: doc.oplogFrontiers() });
const gcDoc = new LoroDoc();
gcDoc.import(bytes);