feat: decode import blob meta (#307)

* feat: decode import blob meta

* chore: rm logs
This commit is contained in:
Zixuan Chen 2024-04-01 17:25:43 +08:00 committed by GitHub
parent 3c124f454b
commit 3da28459b0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 448 additions and 51 deletions

View file

@ -389,7 +389,6 @@ dependencies = [
"append-only-bytes",
"arbitrary",
"arref",
"debug-log",
"enum-as-inner 0.5.1",
"enum_dispatch",
"fxhash",
@ -414,6 +413,7 @@ dependencies = [
"smallvec",
"tabled",
"thiserror",
"tracing",
]
[[package]]
@ -432,7 +432,6 @@ version = "0.2.0"
dependencies = [
"append-only-bytes",
"arref",
"debug-log",
"enum-as-inner 0.6.0",
"fxhash",
"num",
@ -598,6 +597,12 @@ dependencies = [
"siphasher",
]
[[package]]
name = "pin-project-lite"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02"
[[package]]
name = "postcard"
version = "1.0.8"
@ -921,6 +926,37 @@ dependencies = [
"syn 2.0.50",
]
[[package]]
name = "tracing"
version = "0.1.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
dependencies = [
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.50",
]
[[package]]
name = "tracing-core"
version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
dependencies = [
"once_cell",
]
[[package]]
name = "typenum"
version = "1.17.0"

View file

@ -133,15 +133,6 @@ dependencies = [
"syn 2.0.25",
]
[[package]]
name = "debug-log"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "caf861b629ec23fc562cccc8b47cc98a54d192ecf4e7b575ce30659d8c814c3a"
dependencies = [
"once_cell",
]
[[package]]
name = "derive_arbitrary"
version = "1.3.1"
@ -348,6 +339,7 @@ dependencies = [
"enum-as-inner 0.6.0",
"fxhash",
"loro-rle",
"nonmax",
"serde",
"serde_columnar",
"string_cache",
@ -361,7 +353,6 @@ dependencies = [
"append-only-bytes",
"arbitrary",
"arref",
"debug-log",
"enum-as-inner 0.5.1",
"enum_dispatch",
"fxhash",
@ -386,6 +377,7 @@ dependencies = [
"smallvec",
"tabled",
"thiserror",
"tracing",
]
[[package]]
@ -412,7 +404,6 @@ version = "0.2.0"
dependencies = [
"append-only-bytes",
"arref",
"debug-log",
"enum-as-inner 0.6.0",
"fxhash",
"num",
@ -437,6 +428,12 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
[[package]]
name = "nonmax"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "610a5acd306ec67f907abe5567859a3c693fb9886eb1f012ab8f2a47bef3db51"
[[package]]
name = "num"
version = "0.4.0"
@ -573,6 +570,12 @@ dependencies = [
"siphasher",
]
[[package]]
name = "pin-project-lite"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02"
[[package]]
name = "postcard"
version = "1.0.2"
@ -895,6 +898,37 @@ dependencies = [
"syn 2.0.25",
]
[[package]]
name = "tracing"
version = "0.1.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
dependencies = [
"pin-project-lite",
"tracing-attributes",
"tracing-core",
]
[[package]]
name = "tracing-attributes"
version = "0.1.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.25",
]
[[package]]
name = "tracing-core"
version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
dependencies = [
"once_cell",
]
[[package]]
name = "typenum"
version = "1.16.0"

View file

@ -1,11 +1,13 @@
mod encode_reordered;
use crate::op::OpWithId;
use crate::version::Frontiers;
use crate::LoroDoc;
use crate::{oplog::OpLog, LoroError, VersionVector};
use loro_common::{IdLpSpan, LoroResult};
use num_traits::{FromPrimitive, ToPrimitive};
use rle::{HasLength, Sliceable};
use serde::{Deserialize, Serialize};
const MAGIC_BYTES: [u8; 4] = *b"loro";
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
@ -230,3 +232,31 @@ pub(crate) fn decode_snapshot(
_ => unreachable!(),
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImportBlobMetadata {
/// The partial start version vector.
///
/// Import blob includes all the ops from `partial_start_vv` to `partial_end_vv`.
/// However, it does not constitute a complete version vector, as it only contains counters
/// from peers included within the import blob.
pub partial_start_vv: VersionVector,
/// The partial end version vector.
///
/// Import blob includes all the ops from `partial_start_vv` to `partial_end_vv`.
/// However, it does not constitute a complete version vector, as it only contains counters
/// from peers included within the import blob.
pub partial_end_vv: VersionVector,
pub start_timestamp: i64,
pub start_frontiers: Frontiers,
pub end_timestamp: i64,
pub change_num: u32,
pub is_snapshot: bool,
}
impl LoroDoc {
/// Decodes the metadata for an imported blob from the provided bytes.
pub fn decode_import_blob_meta(blob: &[u8]) -> LoroResult<ImportBlobMetadata> {
encode_reordered::decode_import_blob_meta(blob)
}
}

View file

@ -13,7 +13,7 @@ use serde_columnar::columnar;
use crate::{
arena::SharedArena,
change::{Change, Lamport},
change::{Change, Lamport, Timestamp},
container::{idx::ContainerIdx, list::list_op::DeleteSpanWithId, richtext::TextStyleInfoFlag},
encoding::{
encode_reordered::value::{ValueKind, ValueWriter},
@ -31,6 +31,8 @@ use self::{
value::ValueReader,
};
use super::{parse_header_and_body, ImportBlobMetadata};
/// If any section of the document is longer than this, we will not decode it.
/// It will return an data corruption error instead.
const MAX_DECODED_SIZE: usize = 1 << 30;
@ -39,6 +41,24 @@ const MAX_DECODED_SIZE: usize = 1 << 30;
const MAX_COLLECTION_SIZE: usize = 1 << 28;
pub(crate) fn encode_updates(oplog: &OpLog, vv: &VersionVector) -> Vec<u8> {
// skip the ops that current oplog does not have
let actual_start_vv: VersionVector = vv
.iter()
.filter_map(|(&peer, &end_counter)| {
if end_counter == 0 {
return None;
}
let this_end = oplog.vv().get(&peer).cloned().unwrap_or(0);
if this_end <= end_counter {
return Some((peer, this_end));
}
Some((peer, end_counter))
})
.collect();
let vv = &actual_start_vv;
let mut peer_register: ValueRegister<PeerID> = ValueRegister::new();
let mut key_register: ValueRegister<InternalString> = ValueRegister::new();
let (start_counters, diff_changes) = init_encode(oplog, vv, &mut peer_register);
@ -91,7 +111,8 @@ pub(crate) fn encode_updates(oplog: &OpLog, vv: &VersionVector) -> Vec<u8> {
);
let frontiers = oplog
.frontiers()
.dag
.vv_to_frontiers(&actual_start_vv)
.iter()
.map(|x| (peer_register.register(&x.peer), x.counter))
.collect();
@ -109,7 +130,7 @@ pub(crate) fn encode_updates(oplog: &OpLog, vv: &VersionVector) -> Vec<u8> {
dep_arena,
&[],
)),
frontiers,
start_frontiers: frontiers,
};
serde_columnar::to_vec(&doc).unwrap()
@ -147,6 +168,62 @@ pub(crate) fn decode_updates(oplog: &mut OpLog, bytes: &[u8]) -> LoroResult<()>
Ok(())
}
pub fn decode_import_blob_meta(bytes: &[u8]) -> LoroResult<ImportBlobMetadata> {
let parsed = parse_header_and_body(bytes)?;
let is_snapshot = parsed.mode.is_snapshot();
let iterators = serde_columnar::iter_from_bytes::<EncodedDoc>(parsed.body)?;
let DecodedArenas { peer_ids, .. } = decode_arena(&iterators.arenas)?;
let start_vv: VersionVector = iterators
.start_counters
.iter()
.enumerate()
.filter_map(|(peer_idx, counter)| {
if *counter == 0 {
None
} else {
Some(ID::new(peer_ids.peer_ids[peer_idx], *counter - 1))
}
})
.collect();
let frontiers = iterators
.start_frontiers
.iter()
.map(|x| ID::new(peer_ids.peer_ids[x.0], x.1))
.collect();
let mut end_vv_counters = iterators.start_counters;
let mut change_num = 0;
let mut start_timestamp = Timestamp::MAX;
let mut end_timestamp = Timestamp::MIN;
for EncodedChange {
peer_idx,
len,
timestamp,
..
} in iterators.changes
{
end_vv_counters[peer_idx] += len as Counter;
start_timestamp = start_timestamp.min(timestamp);
end_timestamp = end_timestamp.max(timestamp);
change_num += 1;
}
Ok(ImportBlobMetadata {
is_snapshot,
start_frontiers: frontiers,
partial_start_vv: start_vv,
partial_end_vv: VersionVector::from_iter(
end_vv_counters
.iter()
.enumerate()
.map(|(peer_idx, counter)| ID::new(peer_ids.peer_ids[peer_idx], *counter - 1)),
),
start_timestamp,
end_timestamp,
change_num,
})
}
fn import_changes_to_oplog(
changes: Vec<Change>,
oplog: &mut OpLog,
@ -459,11 +536,6 @@ pub(crate) fn encode_snapshot(oplog: &OpLog, state: &DocState, vv: &VersionVecto
&mut key_register,
);
let frontiers = oplog
.frontiers()
.iter()
.map(|x| (peer_register.register(&x.peer), x.counter))
.collect();
let doc = EncodedDoc {
ops: encoded_ops,
delete_starts: del_starts,
@ -478,7 +550,7 @@ pub(crate) fn encode_snapshot(oplog: &OpLog, state: &DocState, vv: &VersionVecto
dep_arena,
&state_bytes,
)),
frontiers,
start_frontiers: Vec::new(),
};
serde_columnar::to_vec(&doc).unwrap()
@ -641,19 +713,6 @@ pub(crate) fn decode_snapshot(doc: &LoroDoc, bytes: &[u8]) -> LoroResult<()> {
deps,
state_blob_arena,
} = decode_arena(&iter.arenas)?;
let frontiers: Frontiers = iter
.frontiers
.iter()
.map(|x| {
let peer = peer_ids
.peer_ids
.get(x.0)
.ok_or(LoroError::DecodeDataCorruptionError)?;
let ans: Result<ID, LoroError> = Ok(ID::new(*peer, x.1));
ans
})
.try_collect()?;
let ExtractedOps {
ops_map,
mut ops,
@ -679,7 +738,7 @@ pub(crate) fn decode_snapshot(doc: &LoroDoc, bytes: &[u8]) -> LoroResult<()> {
decode_snapshot_states(
&mut state,
frontiers,
oplog.frontiers().clone(),
iter.states,
containers,
state_blob_arena,
@ -1023,7 +1082,7 @@ mod encode {
peer_register: &mut ValueRegister<PeerID>,
) -> (Vec<i32>, Vec<Cow<'a, Change>>) {
let self_vv = oplog.vv();
let start_vv = vv.trim(&oplog.vv());
let start_vv = vv.trim(oplog.vv());
let mut start_counters = Vec::new();
let mut diff_changes: Vec<Cow<'a, Change>> = Vec::new();
@ -1362,7 +1421,10 @@ struct EncodedDoc<'a> {
states: Vec<EncodedStateInfo>,
/// The first counter value for each change of each peer in `changes`
start_counters: Vec<Counter>,
frontiers: Vec<(PeerIdx, Counter)>,
/// The frontiers at the start of this encoded delta.
///
/// It's empty when the encoding mode is snapshot.
start_frontiers: Vec<(PeerIdx, Counter)>,
#[columnar(borrow)]
raw_values: Cow<'a, [u8]>,

View file

@ -24,7 +24,7 @@ pub mod change;
pub mod configure;
pub mod container;
pub mod dag;
mod encoding;
pub mod encoding;
pub mod id;
pub mod op;
pub mod version;

View file

@ -750,7 +750,7 @@ impl VersionVector {
shrink_frontiers(last_ids, dag)
}
pub(crate) fn trim(&self, vv: &&VersionVector) -> VersionVector {
pub(crate) fn trim(&self, vv: &VersionVector) -> VersionVector {
let mut ans = VersionVector::new();
for (client_id, &counter) in self.iter() {
if let Some(&other_counter) = vv.get(client_id) {
@ -800,6 +800,10 @@ fn shrink_frontiers(mut last_ids: Vec<ID>, dag: &AppDag) -> Frontiers {
let mut frontiers = Frontiers::default();
let mut frontiers_vv = Vec::new();
if last_ids.is_empty() {
return frontiers;
}
if last_ids.len() == 1 {
frontiers.push(last_ids[0]);
return frontiers;
@ -857,7 +861,11 @@ impl From<Vec<ID>> for VersionVector {
impl FromIterator<ID> for VersionVector {
fn from_iter<T: IntoIterator<Item = ID>>(iter: T) -> Self {
let mut vv = VersionVector::new();
let iter = iter.into_iter();
let mut vv = VersionVector(FxHashMap::with_capacity_and_hasher(
iter.size_hint().0,
Default::default(),
));
for id in iter {
vv.set_last(id);
}
@ -866,6 +874,12 @@ impl FromIterator<ID> for VersionVector {
}
}
impl FromIterator<(PeerID, Counter)> for VersionVector {
fn from_iter<T: IntoIterator<Item = (PeerID, Counter)>>(iter: T) -> Self {
VersionVector(FxHashMap::from_iter(iter))
}
}
// Note: It will be encoded into binary format, so the order of its fields should not be changed.
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord, Serialize, Deserialize)]
pub(crate) struct TotalOrderStamp {

View file

@ -2,12 +2,16 @@ use std::sync::Arc;
use js_sys::{Array, Object, Reflect, Uint8Array};
use loro_internal::delta::{DeltaItem, ResolvedMapDelta};
use loro_internal::encoding::ImportBlobMetadata;
use loro_internal::event::Diff;
use loro_internal::handler::{Handler, ValueOrHandler};
use loro_internal::{LoroDoc, LoroValue};
use wasm_bindgen::JsValue;
use crate::{Container, JsContainer, LoroList, LoroMap, LoroText, LoroTree};
use crate::{
frontiers_to_ids, Container, JsContainer, JsImportBlobMetadata, LoroList, LoroMap, LoroText,
LoroTree,
};
use wasm_bindgen::__rt::IntoJsResult;
use wasm_bindgen::convert::RefFromWasmAbi;
@ -198,6 +202,35 @@ pub fn convert(value: LoroValue) -> JsValue {
}
}
impl From<ImportBlobMetadata> for JsImportBlobMetadata {
fn from(meta: ImportBlobMetadata) -> Self {
let start_vv = super::VersionVector(meta.partial_start_vv);
let end_vv = super::VersionVector(meta.partial_end_vv);
let start_vv: JsValue = start_vv.into();
let end_vv: JsValue = end_vv.into();
let start_timestamp: JsValue = JsValue::from_f64(meta.start_timestamp as f64);
let end_timestamp: JsValue = JsValue::from_f64(meta.end_timestamp as f64);
let is_snapshot: JsValue = JsValue::from_bool(meta.is_snapshot);
let change_num: JsValue = JsValue::from_f64(meta.change_num as f64);
let ans = Object::new();
js_sys::Reflect::set(
&ans,
&JsValue::from_str("partialStartVersionVector"),
&start_vv,
)
.unwrap();
js_sys::Reflect::set(&ans, &JsValue::from_str("partialEndVersionVector"), &end_vv).unwrap();
let js_frontiers: JsValue = frontiers_to_ids(&meta.start_frontiers).into();
js_sys::Reflect::set(&ans, &JsValue::from_str("startFrontiers"), &js_frontiers).unwrap();
js_sys::Reflect::set(&ans, &JsValue::from_str("startTimestamp"), &start_timestamp).unwrap();
js_sys::Reflect::set(&ans, &JsValue::from_str("endTimestamp"), &end_timestamp).unwrap();
js_sys::Reflect::set(&ans, &JsValue::from_str("isSnapshot"), &is_snapshot).unwrap();
js_sys::Reflect::set(&ans, &JsValue::from_str("changeNum"), &change_num).unwrap();
let ans: JsValue = ans.into();
ans.into()
}
}
fn map_delta_to_js(value: &ResolvedMapDelta, doc: &Arc<LoroDoc>) -> JsValue {
let obj = Object::new();
for (key, value) in value.updated.iter() {

View file

@ -5,6 +5,7 @@ use loro_internal::{
change::Lamport,
configure::{StyleConfig, StyleConfigMap},
container::{richtext::ExpandType, ContainerID},
encoding::ImportBlobMetadata,
event::Index,
handler::{
Handler, ListHandler, MapHandler, TextDelta, TextHandler, TreeHandler, ValueOrHandler,
@ -70,6 +71,8 @@ extern "C" {
pub type JsOrigin;
#[wasm_bindgen(typescript_type = "{ peer: PeerID, counter: number }")]
pub type JsID;
#[wasm_bindgen(typescript_type = "{ peer: PeerID, counter: number }[]")]
pub type JsIDs;
#[wasm_bindgen(typescript_type = "{ start: number, end: number }")]
pub type JsRange;
#[wasm_bindgen(typescript_type = "number|bool|string|null")]
@ -118,6 +121,8 @@ extern "C" {
pub type JsPartialOrd;
#[wasm_bindgen(typescript_type = "'Tree'|'Map'|'List'|'Text'")]
pub type JsContainerKind;
#[wasm_bindgen(typescript_type = "ImportBlobMetadata")]
pub type JsImportBlobMetadata;
}
mod observer {
@ -173,17 +178,18 @@ fn js_id_to_id(id: JsID) -> Result<ID, JsValue> {
Ok(id)
}
fn frontiers_to_ids(frontiers: &Frontiers) -> Vec<JsID> {
let mut ans = Vec::with_capacity(frontiers.len());
fn frontiers_to_ids(frontiers: &Frontiers) -> JsIDs {
let js_arr = Array::new();
for id in frontiers.iter() {
let obj = Object::new();
Reflect::set(&obj, &"peer".into(), &id.peer.to_string().into()).unwrap();
Reflect::set(&obj, &"counter".into(), &id.counter.into()).unwrap();
let value: JsValue = obj.into_js_result().unwrap();
ans.push(value.into());
js_arr.push(&value);
}
ans
let value: JsValue = js_arr.into();
value.into()
}
fn js_value_to_container_id(
@ -655,7 +661,7 @@ impl Loro {
///
/// If you checkout to a specific version, this value will change.
#[inline]
pub fn frontiers(&self) -> Vec<JsID> {
pub fn frontiers(&self) -> JsIDs {
frontiers_to_ids(&self.0.state_frontiers())
}
@ -663,7 +669,8 @@ impl Loro {
///
/// If you checkout to a specific version, this value will not change.
#[inline(always)]
pub fn oplog_frontiers(&self) -> Vec<JsID> {
#[wasm_bindgen(js_name = "oplogFrontiers")]
pub fn oplog_frontiers(&self) -> JsIDs {
frontiers_to_ids(&self.0.oplog_frontiers())
}
@ -1061,7 +1068,7 @@ impl Loro {
/// const frontiers = doc.vvToFrontiers(version);
/// ```
#[wasm_bindgen(js_name = "vvToFrontiers")]
pub fn vv_to_frontiers(&self, vv: &VersionVector) -> JsResult<Vec<JsID>> {
pub fn vv_to_frontiers(&self, vv: &VersionVector) -> JsResult<JsIDs> {
let f = self.0.oplog().lock().unwrap().dag().vv_to_frontiers(&vv.0);
Ok(frontiers_to_ids(&f))
}
@ -2614,6 +2621,22 @@ impl Container {
}
}
/// Decode the metadata of the import blob.
///
/// This method is useful to get the following metadata of the import blob:
///
/// - startVersionVector
/// - endVersionVector
/// - startTimestamp
/// - endTimestamp
/// - isSnapshot
/// - changeNum
#[wasm_bindgen(js_name = "decodeImportBlobMeta")]
pub fn decode_import_blob_meta(blob: &[u8]) -> JsResult<JsImportBlobMetadata> {
let meta: ImportBlobMetadata = LoroDoc::decode_import_blob_meta(blob)?;
Ok(meta.into())
}
#[wasm_bindgen(typescript_custom_section)]
const TYPES: &'static str = r#"
/**
@ -2730,4 +2753,29 @@ export type Value =
| Value[];
export type Container = LoroList | LoroMap | LoroText | LoroTree;
export interface ImportBlobMetadata {
/**
* The version vector of the start of the import.
*
* Import blob includes all the ops from `partial_start_vv` to `partial_end_vv`.
* However, it does not constitute a complete version vector, as it only contains counters
* from peers included within the import blob.
*/
partialStartVersionVector: VersionVector;
/**
* The version vector of the end of the import.
*
* Import blob includes all the ops from `partial_start_vv` to `partial_end_vv`.
* However, it does not constitute a complete version vector, as it only contains counters
* from peers included within the import blob.
*/
partialEndVersionVector: VersionVector;
startFrontiers: OpId[],
startTimestamp: number;
endTimestamp: number;
isSnapshot: boolean;
changeNum: number;
}
"#;

View file

@ -3,6 +3,7 @@ use either::Either;
use event::{DiffEvent, Subscriber};
use loro_internal::change::Timestamp;
use loro_internal::container::IntoContainerId;
use loro_internal::encoding::ImportBlobMetadata;
use loro_internal::handler::HandlerTrait;
use loro_internal::handler::ValueOrHandler;
use loro_internal::LoroDoc as InnerLoroDoc;
@ -58,6 +59,11 @@ impl LoroDoc {
self.doc.config()
}
/// Decodes the metadata for an imported blob from the provided bytes.
pub fn decode_import_blob_meta(bytes: &[u8]) -> LoroResult<ImportBlobMetadata> {
InnerLoroDoc::decode_import_blob_meta(bytes)
}
/// Set whether to record the timestamp of each change. Default is `false`.
///
/// If enabled, the Unix timestamp will be recorded for each change automatically.

View file

@ -1,7 +1,7 @@
use std::{cmp::Ordering, sync::Arc};
use loro::{FrontiersNotIncluded, LoroDoc, LoroError, LoroList, LoroMap, LoroText, ToJson};
use loro_internal::{handler::TextDelta, id::ID, LoroResult};
use loro_internal::{handler::TextDelta, id::ID, vv, LoroResult};
use serde_json::json;
#[test]
@ -439,6 +439,72 @@ fn prelim_support() -> LoroResult<()> {
Ok(())
}
#[test]
fn decode_import_blob_meta() -> LoroResult<()> {
let doc_1 = LoroDoc::new();
doc_1.set_peer_id(1)?;
doc_1.get_text("text").insert(0, "123")?;
{
let bytes = doc_1.export_from(&Default::default());
let meta = LoroDoc::decode_import_blob_meta(&bytes).unwrap();
assert!(meta.partial_start_vv.is_empty());
assert_eq!(meta.partial_end_vv, vv!(1 => 3));
assert_eq!(meta.start_timestamp, 0);
assert_eq!(meta.end_timestamp, 0);
assert!(!meta.is_snapshot);
assert!(meta.start_frontiers.is_empty());
assert_eq!(meta.change_num, 1);
let bytes = doc_1.export_snapshot();
let meta = LoroDoc::decode_import_blob_meta(&bytes).unwrap();
assert!(meta.partial_start_vv.is_empty());
assert_eq!(meta.partial_end_vv, vv!(1 => 3));
assert_eq!(meta.start_timestamp, 0);
assert_eq!(meta.end_timestamp, 0);
assert!(meta.is_snapshot);
assert!(meta.start_frontiers.is_empty());
assert_eq!(meta.change_num, 1);
}
let doc_2 = LoroDoc::new();
doc_2.set_peer_id(2)?;
doc_2.import(&doc_1.export_snapshot()).unwrap();
doc_2.get_text("text").insert(0, "123")?;
doc_2.get_text("text").insert(0, "123")?;
{
let bytes = doc_2.export_from(&doc_1.oplog_vv());
let meta = LoroDoc::decode_import_blob_meta(&bytes).unwrap();
assert_eq!(meta.partial_start_vv, vv!());
assert_eq!(meta.partial_end_vv, vv!(2 => 6));
assert_eq!(meta.start_timestamp, 0);
assert_eq!(meta.end_timestamp, 0);
assert!(!meta.is_snapshot);
assert_eq!(meta.start_frontiers, vec![ID::new(1, 2)].into());
assert_eq!(meta.change_num, 1);
let bytes = doc_2.export_from(&vv!(1 => 1));
let meta = LoroDoc::decode_import_blob_meta(&bytes).unwrap();
assert_eq!(meta.partial_start_vv, vv!(1 => 1));
assert_eq!(meta.partial_end_vv, vv!(1 => 3, 2 => 6));
assert_eq!(meta.start_timestamp, 0);
assert_eq!(meta.end_timestamp, 0);
assert!(!meta.is_snapshot);
assert_eq!(meta.start_frontiers, vec![ID::new(1, 0)].into());
assert_eq!(meta.change_num, 2);
let bytes = doc_2.export_snapshot();
let meta = LoroDoc::decode_import_blob_meta(&bytes).unwrap();
assert_eq!(meta.partial_start_vv, vv!());
assert_eq!(meta.partial_end_vv, vv!(1 => 3, 2 => 6));
assert_eq!(meta.start_timestamp, 0);
assert_eq!(meta.end_timestamp, 0);
assert!(meta.is_snapshot);
assert!(meta.start_frontiers.is_empty());
assert_eq!(meta.change_num, 2);
}
Ok(())
}
#[test]
fn init_example() {
// create meta/users/0/new_user/{name: string, bio: Text}

View file

@ -1,5 +1,11 @@
import { describe, expect, it } from "vitest";
import { Loro, LoroMap, OpId, VersionVector } from "../src";
import {
decodeImportBlobMeta,
Loro,
LoroMap,
OpId,
VersionVector,
} from "../src";
describe("Frontiers", () => {
it("two clients", () => {
@ -134,3 +140,65 @@ describe("Version", () => {
expect(change.length).toBe(1);
});
});
it("get import blob metadata", () => {
const doc0 = new Loro();
doc0.setPeerId(0n);
const text = doc0.getText("text");
text.insert(0, "0");
doc0.commit();
{
const bytes = doc0.exportFrom();
const meta = decodeImportBlobMeta(bytes);
expect(meta.changeNum).toBe(1);
expect(meta.partialStartVersionVector.get("0")).toBeFalsy();
expect(meta.partialEndVersionVector.get("0")).toBe(1);
expect(meta.startTimestamp).toBe(0);
expect(meta.endTimestamp).toBe(0);
expect(meta.isSnapshot).toBeFalsy();
console.log(meta.startFrontiers);
expect(meta.startFrontiers.length).toBe(0);
}
const doc1 = new Loro();
doc1.setPeerId(1);
doc1.getText("text").insert(0, "123");
doc1.import(doc0.exportFrom());
{
const bytes = doc1.exportFrom();
const meta = decodeImportBlobMeta(bytes);
expect(meta.changeNum).toBe(2);
expect(meta.partialStartVersionVector.get("0")).toBeFalsy();
expect(meta.partialEndVersionVector.get("0")).toBe(1);
expect(meta.partialEndVersionVector.get("1")).toBe(3);
expect(meta.startTimestamp).toBe(0);
expect(meta.endTimestamp).toBe(0);
expect(meta.isSnapshot).toBeFalsy();
expect(meta.startFrontiers.length).toBe(0);
}
{
const bytes = doc1.exportSnapshot();
const meta = decodeImportBlobMeta(bytes);
expect(meta.changeNum).toBe(2);
expect(meta.partialStartVersionVector.get("0")).toBeFalsy();
expect(meta.partialEndVersionVector.get("0")).toBe(1);
expect(meta.partialEndVersionVector.get("1")).toBe(3);
expect(meta.startTimestamp).toBe(0);
expect(meta.endTimestamp).toBe(0);
expect(meta.isSnapshot).toBeTruthy();
expect(meta.startFrontiers.length).toBe(0);
}
{
const bytes = doc1.exportFrom(doc0.oplogVersion());
const meta = decodeImportBlobMeta(bytes);
expect(meta.changeNum).toBe(1);
expect(meta.partialStartVersionVector.get("0")).toBeUndefined();
expect(meta.partialStartVersionVector.get("1")).toBeFalsy();
expect(meta.partialEndVersionVector.get("0")).toBeUndefined();
expect(meta.partialEndVersionVector.get("1")).toBe(3);
expect(meta.startTimestamp).toBe(0);
expect(meta.endTimestamp).toBe(0);
expect(meta.isSnapshot).toBeFalsy();
expect(meta.startFrontiers).toStrictEqual([{ peer: "0", counter: 0 }]);
}
});