feat: add import status (#494)

This commit is contained in:
Leon Zhao 2024-10-05 08:18:12 +08:00 committed by GitHub
parent e1bf4a858d
commit 405cbe047c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
16 changed files with 261 additions and 109 deletions

View file

@ -8,7 +8,7 @@ use std::{
use arbitrary::Arbitrary;
use fxhash::FxHashSet;
use loro::{ContainerType, Frontiers, LoroError, LoroResult};
use loro::{ContainerType, Frontiers, ImportStatus, LoroError, LoroResult};
use tabled::TableIteratorExt;
use tracing::{info, info_span, trace};
@ -271,7 +271,7 @@ impl CRDTFuzzer {
}
}
fn handle_import_result(e: LoroResult<()>) {
fn handle_import_result(e: LoroResult<ImportStatus>) {
match e {
Ok(_) => {}
Err(LoroError::ImportUpdatesThatDependsOnOutdatedVersion) => {

View file

@ -590,7 +590,8 @@ impl TryFrom<&str> for TreeID {
#[cfg(feature = "wasm")]
pub mod wasm {
use crate::{LoroError, TreeID};
use crate::{IdSpanVector, LoroError, TreeID};
use js_sys::Map;
use wasm_bindgen::JsValue;
impl From<TreeID> for JsValue {
fn from(value: TreeID) -> Self {

View file

@ -149,6 +149,11 @@ impl CounterSpan {
}
}
pub fn extend_include(&mut self, new_start: Counter, new_end: Counter) {
self.set_start(new_start);
self.set_end(new_end);
}
/// if we can merge element on the left, this method return the last atom of it
fn prev_pos(&self) -> i32 {
if self.start < self.end {
@ -509,6 +514,29 @@ impl From<ID> for IdSpan {
}
}
#[cfg(feature = "wasm")]
mod wasm {
use js_sys::Object;
use wasm_bindgen::JsValue;
use super::CounterSpan;
impl From<CounterSpan> for JsValue {
fn from(value: CounterSpan) -> Self {
let obj = Object::new();
js_sys::Reflect::set(
&obj,
&JsValue::from_str("start"),
&JsValue::from(value.start),
)
.unwrap();
js_sys::Reflect::set(&obj, &JsValue::from_str("end"), &JsValue::from(value.end))
.unwrap();
obj.into()
}
}
}
#[cfg(test)]
mod test_id_span {
use super::*;

View file

@ -6,9 +6,9 @@ use std::{
};
use loro::{
cursor::CannotFindRelativePosition, DocAnalysis, FrontiersNotIncluded, IdSpan, JsonPathError,
JsonSchema, Lamport, LoroDoc as InnerLoroDoc, LoroEncodeError, LoroError, LoroResult, PeerID,
Timestamp, ID,
cursor::CannotFindRelativePosition, DocAnalysis, FrontiersNotIncluded, IdSpan, ImportStatus,
JsonPathError, JsonSchema, Lamport, LoroDoc as InnerLoroDoc, LoroEncodeError, LoroError,
LoroResult, PeerID, Timestamp, ID,
};
use crate::{
@ -253,7 +253,7 @@ impl LoroDoc {
/// Import updates/snapshot exported by [`LoroDoc::export_snapshot`] or [`LoroDoc::export_from`].
#[inline]
pub fn import(&self, bytes: &[u8]) -> Result<(), LoroError> {
pub fn import(&self, bytes: &[u8]) -> Result<ImportStatus, LoroError> {
self.doc.import_with(bytes, "")
}
@ -262,11 +262,11 @@ impl LoroDoc {
/// It marks the import with a custom `origin` string. It can be used to track the import source
/// in the generated events.
#[inline]
pub fn import_with(&self, bytes: &[u8], origin: &str) -> Result<(), LoroError> {
pub fn import_with(&self, bytes: &[u8], origin: &str) -> Result<ImportStatus, LoroError> {
self.doc.import_with(bytes, origin)
}
pub fn import_json_updates(&self, json: &str) -> Result<(), LoroError> {
pub fn import_json_updates(&self, json: &str) -> Result<ImportStatus, LoroError> {
self.doc.import_json_updates(json)
}

View file

@ -8,13 +8,17 @@ pub(crate) mod value_register;
pub(crate) use outdated_encode_reordered::{
decode_op, encode_op, get_op_prop, EncodedDeleteStartId, IterableEncodedDeleteStartId,
};
use outdated_encode_reordered::{import_changes_to_oplog, ImportChangesResult};
pub(crate) use value::OwnedValue;
use crate::op::OpWithId;
use crate::version::Frontiers;
use crate::version::{Frontiers, VersionRange, VersionVectorDiff};
use crate::LoroDoc;
use crate::{oplog::OpLog, LoroError, VersionVector};
use loro_common::{IdLpSpan, IdSpan, LoroEncodeError, LoroResult, PeerID, ID};
use loro_common::{
CounterSpan, HasCounter, HasCounterSpan, IdLpSpan, IdSpan, IdSpanVector, LoroEncodeError,
LoroResult, PeerID, ID,
};
use num_traits::{FromPrimitive, ToPrimitive};
use rle::{HasLength, Sliceable};
use serde::{Deserialize, Serialize};
@ -166,6 +170,12 @@ impl TryFrom<[u8; 2]> for EncodeMode {
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct ImportStatus {
pub success: IdSpanVector,
pub pending: Option<IdSpanVector>,
}
/// The encoder used to encode the container states.
///
/// Each container state can be represented by a sequence of operations.
@ -243,16 +253,44 @@ pub(crate) fn encode_oplog(oplog: &OpLog, vv: &VersionVector, mode: EncodeMode)
pub(crate) fn decode_oplog(
oplog: &mut OpLog,
parsed: ParsedHeaderAndBody,
) -> Result<(), LoroError> {
) -> Result<ImportStatus, LoroError> {
let before_vv = oplog.vv().clone();
let ParsedHeaderAndBody { mode, body, .. } = parsed;
match mode {
let changes = match mode {
EncodeMode::OutdatedRle | EncodeMode::OutdatedSnapshot => {
outdated_encode_reordered::decode_updates(oplog, body)
}
EncodeMode::FastSnapshot => fast_snapshot::decode_oplog(oplog, body),
EncodeMode::FastUpdates => fast_snapshot::decode_updates(oplog, body.to_vec().into()),
EncodeMode::Auto => unreachable!(),
}?;
let ImportChangesResult {
latest_ids,
pending_changes,
changes_that_deps_on_trimmed_history,
} = import_changes_to_oplog(changes, oplog);
let mut pending = IdSpanVector::default();
pending_changes.iter().for_each(|c| {
let peer = c.id.peer;
let start = c.ctr_start();
let end = c.ctr_end();
pending
.entry(peer)
.or_insert_with(|| CounterSpan::new(start, end))
.extend_include(start, end);
});
// TODO: PERF: should we use hashmap to filter latest_ids with the same peer first?
oplog.try_apply_pending(latest_ids);
oplog.import_unknown_lamport_pending_changes(pending_changes)?;
let after_vv = oplog.vv();
if !changes_that_deps_on_trimmed_history.is_empty() {
return Err(LoroError::ImportUpdatesThatDependsOnOutdatedVersion);
}
Ok(ImportStatus {
success: before_vv.diff(after_vv).right,
pending: (!pending.is_empty()).then_some(pending),
})
}
pub(crate) struct ParsedHeaderAndBody<'a> {
@ -427,12 +465,16 @@ pub(crate) fn decode_snapshot(
doc: &LoroDoc,
mode: EncodeMode,
body: &[u8],
) -> Result<(), LoroError> {
) -> Result<ImportStatus, LoroError> {
match mode {
EncodeMode::OutdatedSnapshot => outdated_encode_reordered::decode_snapshot(doc, body),
EncodeMode::FastSnapshot => fast_snapshot::decode_snapshot(doc, body.to_vec().into()),
_ => unreachable!(),
}
};
Ok(ImportStatus {
success: doc.oplog_vv().diff(&Default::default()).left,
pending: None,
})
}
#[derive(Debug, Clone, Serialize, Deserialize)]

View file

@ -15,7 +15,9 @@
//!
use std::io::{Read, Write};
use crate::{encoding::trimmed_snapshot, oplog::ChangeStore, LoroDoc, OpLog, VersionVector};
use crate::{
change::Change, encoding::trimmed_snapshot, oplog::ChangeStore, LoroDoc, OpLog, VersionVector,
};
use bytes::{Buf, Bytes};
use loro_common::{IdSpan, LoroError, LoroResult};
use tracing::trace;
@ -208,7 +210,7 @@ pub(crate) fn encode_snapshot<W: std::io::Write>(doc: &LoroDoc, w: &mut W) {
}
}
pub(crate) fn decode_oplog(oplog: &mut OpLog, bytes: &[u8]) -> Result<(), LoroError> {
pub(crate) fn decode_oplog(oplog: &mut OpLog, bytes: &[u8]) -> Result<Vec<Change>, LoroError> {
let oplog_len = u32::from_le_bytes(bytes[0..4].try_into().unwrap());
let oplog_bytes = &bytes[4..4 + oplog_len as usize];
let mut changes = ChangeStore::decode_snapshot_for_updates(
@ -217,18 +219,7 @@ pub(crate) fn decode_oplog(oplog: &mut OpLog, bytes: &[u8]) -> Result<(), LoroEr
oplog.vv(),
)?;
changes.sort_unstable_by_key(|x| x.lamport);
let ImportChangesResult {
latest_ids,
pending_changes,
changes_that_deps_on_trimmed_history,
} = import_changes_to_oplog(changes, oplog);
// TODO: PERF: should we use hashmap to filter latest_ids with the same peer first?
oplog.try_apply_pending(latest_ids);
oplog.import_unknown_lamport_pending_changes(pending_changes)?;
if !changes_that_deps_on_trimmed_history.is_empty() {
return Err(LoroError::ImportUpdatesThatDependsOnOutdatedVersion);
}
Ok(())
Ok(changes)
}
pub(crate) fn encode_updates<W: std::io::Write>(doc: &LoroDoc, vv: &VersionVector, w: &mut W) {
@ -244,7 +235,7 @@ pub(crate) fn encode_updates_in_range<W: std::io::Write>(
oplog.export_blocks_in_range(spans, w);
}
pub(crate) fn decode_updates(oplog: &mut OpLog, body: Bytes) -> Result<(), LoroError> {
pub(crate) fn decode_updates(oplog: &mut OpLog, body: Bytes) -> Result<Vec<Change>, LoroError> {
let mut reader: &[u8] = body.as_ref();
let mut index = 0;
let self_vv = oplog.vv();
@ -263,16 +254,5 @@ pub(crate) fn decode_updates(oplog: &mut OpLog, body: Bytes) -> Result<(), LoroE
}
changes.sort_unstable_by_key(|x| x.lamport);
let ImportChangesResult {
latest_ids,
pending_changes,
changes_that_deps_on_trimmed_history,
} = import_changes_to_oplog(changes, oplog);
// TODO: PERF: should we use hashmap to filter latest_ids with the same peer first?
oplog.try_apply_pending(latest_ids);
oplog.import_unknown_lamport_pending_changes(pending_changes)?;
if !changes_that_deps_on_trimmed_history.is_empty() {
return Err(LoroError::ImportUpdatesThatDependsOnOutdatedVersion);
}
Ok(())
Ok(changes)
}

View file

@ -2,8 +2,8 @@ use std::sync::Arc;
use either::Either;
use loro_common::{
ContainerID, ContainerType, HasCounterSpan, IdLp, LoroError, LoroResult, LoroValue, PeerID,
TreeID, ID,
ContainerID, ContainerType, CounterSpan, HasCounter, HasCounterSpan, IdLp, IdSpanVector,
LoroError, LoroResult, LoroValue, PeerID, TreeID, ID,
};
use rle::{HasLength, RleVec, Sliceable};
@ -22,8 +22,9 @@ use crate::{
OpLog, VersionVector,
};
use super::outdated_encode_reordered::{
import_changes_to_oplog, ImportChangesResult, ValueRegister,
use super::{
outdated_encode_reordered::{import_changes_to_oplog, ImportChangesResult, ValueRegister},
ImportStatus,
};
use json::{JsonOpContent, JsonSchema};
@ -66,20 +67,34 @@ pub(crate) fn export_json<'a, 'c: 'a>(
}
}
pub(crate) fn import_json(oplog: &mut OpLog, json: JsonSchema) -> LoroResult<()> {
pub(crate) fn import_json(oplog: &mut OpLog, json: JsonSchema) -> LoroResult<ImportStatus> {
let before_vv = oplog.vv().clone();
let changes = decode_changes(json, &oplog.arena)?;
let ImportChangesResult {
latest_ids,
pending_changes,
changes_that_deps_on_trimmed_history,
} = import_changes_to_oplog(changes, oplog);
let mut pending = IdSpanVector::default();
pending_changes.iter().for_each(|c| {
let peer = c.id.peer;
let start = c.ctr_start();
let end = c.ctr_end();
pending
.entry(peer)
.or_insert_with(|| CounterSpan::new(start, end))
.extend_include(start, end);
});
oplog.try_apply_pending(latest_ids);
oplog.import_unknown_lamport_pending_changes(pending_changes)?;
if changes_that_deps_on_trimmed_history.is_empty() {
Ok(())
} else {
Err(LoroError::ImportUpdatesThatDependsOnOutdatedVersion)
}
if !changes_that_deps_on_trimmed_history.is_empty() {
return Err(LoroError::ImportUpdatesThatDependsOnOutdatedVersion);
};
let after_vv = oplog.vv();
Ok(ImportStatus {
success: before_vv.diff(after_vv).right,
pending: (!pending.is_empty()).then_some(pending),
})
}
fn init_encode<'s, 'a: 's>(

View file

@ -30,6 +30,7 @@ use crate::{
use self::encode::{encode_changes, encode_ops, init_encode, TempOp};
use super::ImportStatus;
use super::{
arena::*,
parse_header_and_body,
@ -136,7 +137,7 @@ pub(crate) fn encode_updates(oplog: &OpLog, vv: &VersionVector) -> Vec<u8> {
}
#[instrument(skip_all)]
pub(crate) fn decode_updates(oplog: &mut OpLog, bytes: &[u8]) -> LoroResult<()> {
pub(crate) fn decode_updates(oplog: &mut OpLog, bytes: &[u8]) -> LoroResult<Vec<Change>> {
let iter = serde_columnar::iter_from_bytes::<EncodedDoc>(bytes)?;
let mut arenas = decode_arena(&iter.arenas)?;
let ops_map = extract_ops(
@ -163,19 +164,8 @@ pub(crate) fn decode_updates(oplog: &mut OpLog, bytes: &[u8]) -> LoroResult<()>
deps,
ops_map,
)?;
let ImportChangesResult {
latest_ids,
pending_changes,
changes_that_deps_on_trimmed_history,
} = import_changes_to_oplog(changes, oplog);
// TODO: PERF: should we use hashmap to filter latest_ids with the same peer first?
oplog.try_apply_pending(latest_ids);
oplog.import_unknown_lamport_pending_changes(pending_changes)?;
if !changes_that_deps_on_trimmed_history.is_empty() {
return Err(LoroError::ImportUpdatesThatDependsOnOutdatedVersion);
}
Ok(())
Ok(changes)
}
pub fn decode_import_blob_meta(bytes: &[u8]) -> LoroResult<ImportBlobMetadata> {
@ -734,7 +724,11 @@ pub(crate) fn decode_snapshot(doc: &LoroDoc, bytes: &[u8]) -> LoroResult<()> {
doc.update_oplog_and_apply_delta_to_state_if_needed(
|oplog| {
oplog.try_apply_pending(latest_ids);
Ok(())
// ImportStatus is unnecessary
Ok(ImportStatus {
success: Default::default(),
pending: None,
})
},
"".into(),
)?;

View file

@ -35,7 +35,8 @@ use crate::{
encoding::{
decode_snapshot, export_fast_snapshot, export_fast_updates, export_fast_updates_in_range,
export_snapshot, export_snapshot_at, export_state_only_snapshot, export_trimmed_snapshot,
json_schema::json::JsonSchema, parse_header_and_body, EncodeMode, ParsedHeaderAndBody,
json_schema::json::JsonSchema, parse_header_and_body, EncodeMode, ImportStatus,
ParsedHeaderAndBody,
},
event::{str_to_path, EventTriggerKind, Index, InternalDocDiff},
handler::{Handler, MovableListHandler, TextHandler, TreeHandler, ValueOrHandler},
@ -419,14 +420,18 @@ impl LoroDoc {
}
#[inline(always)]
pub fn import(&self, bytes: &[u8]) -> Result<(), LoroError> {
pub fn import(&self, bytes: &[u8]) -> Result<ImportStatus, LoroError> {
let s = debug_span!("import", peer = self.peer_id());
let _e = s.enter();
self.import_with(bytes, Default::default())
}
#[inline]
pub fn import_with(&self, bytes: &[u8], origin: InternalString) -> Result<(), LoroError> {
pub fn import_with(
&self,
bytes: &[u8],
origin: InternalString,
) -> Result<ImportStatus, LoroError> {
self.commit_then_stop();
let ans = self._import_with(bytes, origin);
self.renew_txn_if_auto_commit();
@ -434,7 +439,11 @@ impl LoroDoc {
}
#[tracing::instrument(skip_all)]
fn _import_with(&self, bytes: &[u8], origin: InternalString) -> Result<(), LoroError> {
fn _import_with(
&self,
bytes: &[u8],
origin: InternalString,
) -> Result<ImportStatus, LoroError> {
ensure_cov::notify_cov("loro_internal::import");
let parsed = parse_header_and_body(bytes)?;
info!("Importing with mode={:?}", &parsed.mode);
@ -499,9 +508,9 @@ impl LoroDoc {
#[tracing::instrument(skip_all)]
pub(crate) fn update_oplog_and_apply_delta_to_state_if_needed(
&self,
f: impl FnOnce(&mut OpLog) -> Result<(), LoroError>,
f: impl FnOnce(&mut OpLog) -> Result<ImportStatus, LoroError>,
origin: InternalString,
) -> Result<(), LoroError> {
) -> Result<ImportStatus, LoroError> {
let mut oplog = self.oplog.try_lock().unwrap();
let old_vv = oplog.vv().clone();
let old_frontiers = oplog.frontiers().clone();
@ -566,7 +575,7 @@ impl LoroDoc {
///
/// only supports backward compatibility but not forward compatibility.
#[tracing::instrument(skip_all)]
pub fn import_json_updates<T: TryInto<JsonSchema>>(&self, json: T) -> LoroResult<()> {
pub fn import_json_updates<T: TryInto<JsonSchema>>(&self, json: T) -> LoroResult<ImportStatus> {
let json = json.try_into().map_err(|_| LoroError::InvalidJsonSchema)?;
self.commit_then_stop();
let result = self.update_oplog_and_apply_delta_to_state_if_needed(
@ -988,7 +997,9 @@ impl LoroDoc {
let mut err = None;
for data in bytes.iter() {
match self.import(data) {
Ok(_) => {}
Ok(_s) => {
// TODO: merge
}
Err(e) => {
err = Some(e);
}
@ -1145,7 +1156,7 @@ impl LoroDoc {
/// Import ops from other doc.
///
/// After `a.merge(b)` and `b.merge(a)`, `a` and `b` will have the same content if they are in attached mode.
pub fn merge(&self, other: &Self) -> LoroResult<()> {
pub fn merge(&self, other: &Self) -> LoroResult<ImportStatus> {
self.import(&other.export_from(&self.oplog_vv()))
}

View file

@ -18,8 +18,8 @@ use crate::configure::Configure;
use crate::container::list::list_op;
use crate::dag::{Dag, DagUtils};
use crate::diff_calc::DiffMode;
use crate::encoding::ParsedHeaderAndBody;
use crate::encoding::{decode_oplog, encode_oplog, EncodeMode};
use crate::encoding::{ImportStatus, ParsedHeaderAndBody};
use crate::history_cache::ContainerHistoryCache;
use crate::id::{Counter, PeerID, ID};
use crate::op::{FutureInnerContent, ListSlice, RawOpContent, RemoteOp, RichOp};
@ -395,7 +395,7 @@ impl OpLog {
}
#[inline(always)]
pub(crate) fn decode(&mut self, data: ParsedHeaderAndBody) -> Result<(), LoroError> {
pub(crate) fn decode(&mut self, data: ParsedHeaderAndBody) -> Result<ImportStatus, LoroError> {
decode_oplog(self, data)
}

View file

@ -1,11 +1,16 @@
use std::sync::{atomic::AtomicBool, Arc, Mutex};
use fxhash::FxHashMap;
use loro_common::{ContainerID, ContainerType, LoroError, LoroResult, LoroValue, PeerID, ID};
use loro_common::{
ContainerID, ContainerType, CounterSpan, LoroError, LoroResult, LoroValue, PeerID, ID,
};
use loro_internal::{
delta::ResolvedMapValue,
encoding::ImportStatus,
event::{Diff, EventTriggerKind},
fx_map,
handler::{Handler, TextDelta, ValueOrHandler},
loro::ExportMode,
version::Frontiers,
ApplyDiff, HandlerTrait, ListHandler, LoroDoc, MapHandler, TextHandler, ToJson, TreeHandler,
TreeParentId,
@ -474,19 +479,6 @@ fn test_checkout() {
);
}
#[test]
fn import() {
let doc = LoroDoc::new();
doc.import(&[
108, 111, 114, 111, 0, 0, 10, 10, 255, 255, 68, 255, 255, 4, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 255, 255, 108, 111, 114, 111, 255, 255, 0, 255, 207, 207, 255, 255, 255, 255,
255,
])
.unwrap_or_default();
}
#[test]
fn test_timestamp() {
let doc = LoroDoc::new();
@ -1271,3 +1263,40 @@ fn set_max_peer_id() {
Result::Err(LoroError::InvalidPeerID)
);
}
#[test]
fn import_status() -> LoroResult<()> {
let doc = LoroDoc::new_auto_commit();
doc.set_peer_id(0)?;
let t = doc.get_text("text");
t.insert(0, "a")?;
let doc2 = LoroDoc::new_auto_commit();
doc2.set_peer_id(1)?;
let t2 = doc2.get_text("text");
t2.insert(0, "b")?;
doc2.commit_then_renew();
let update1 = doc2.export_snapshot().unwrap();
let vv1 = doc2.oplog_vv();
t2.insert(1, "c")?;
let update2 = doc2.export(ExportMode::updates(&vv1)).unwrap();
let status1 = doc.import(&update2)?;
let status2 = doc.import(&update1)?;
assert_eq!(
status1,
ImportStatus {
success: Default::default(),
pending: Some(fx_map!(1=>CounterSpan::new(1, 2)))
}
);
assert_eq!(
status2,
ImportStatus {
success: fx_map!(1=>CounterSpan::new(0, 2)),
pending: None
}
);
Ok(())
}

View file

@ -1,10 +1,11 @@
use std::sync::Arc;
use js_sys::{Array, Object, Reflect, Uint8Array};
use js_sys::{Array, Map, Object, Reflect, Uint8Array};
use loro_internal::delta::ResolvedMapDelta;
use loro_internal::encoding::ImportBlobMetadata;
use loro_internal::encoding::{ImportBlobMetadata, ImportStatus};
use loro_internal::event::Diff;
use loro_internal::handler::{Handler, ValueOrHandler};
use loro_internal::loro_common::IdSpanVector;
use loro_internal::{ListDiffItem, LoroDoc, LoroValue};
use wasm_bindgen::JsValue;
@ -375,3 +376,31 @@ pub(crate) fn handler_to_js_value(handler: Handler, doc: Option<Arc<LoroDoc>>) -
Handler::Unknown(_) => unreachable!(),
}
}
pub(crate) fn import_status_to_js_value(status: ImportStatus) -> JsValue {
let obj = Object::new();
js_sys::Reflect::set(
&obj,
&JsValue::from_str("success"),
&id_span_vector_to_js_value(status.success),
)
.unwrap();
js_sys::Reflect::set(
&obj,
&JsValue::from_str("pending"),
&match status.pending {
None => JsValue::null(),
Some(pending) => id_span_vector_to_js_value(pending),
},
)
.unwrap();
obj.into()
}
fn id_span_vector_to_js_value(v: IdSpanVector) -> JsValue {
let map = Map::new();
for (k, v) in v.into_iter() {
Map::set(&map, &JsValue::from_str(&k.to_string()), &JsValue::from(v));
}
map.into()
}

View file

@ -4,14 +4,14 @@
#![allow(clippy::doc_lazy_continuation)]
// #![warn(missing_docs)]
use convert::{js_to_version_vector, resolved_diff_to_js};
use convert::{import_status_to_js_value, js_to_version_vector, resolved_diff_to_js};
use js_sys::{Array, Object, Promise, Reflect, Uint8Array};
use loro_internal::{
change::Lamport,
configure::{StyleConfig, StyleConfigMap},
container::{richtext::ExpandType, ContainerID},
cursor::{self, Side},
encoding::ImportBlobMetadata,
encoding::{ImportBlobMetadata, ImportStatus},
event::Index,
handler::{
Handler, ListHandler, MapHandler, TextDelta, TextHandler, TreeHandler, ValueOrHandler,
@ -175,6 +175,8 @@ extern "C" {
pub type JsExportMode;
#[wasm_bindgen(typescript_type = "{ origin?: string, timestamp?: number, message?: string }")]
pub type JsCommitOption;
#[wasm_bindgen(typescript_type = "ImportStatus")]
pub type JsImportStatus;
}
mod observer {
@ -1077,18 +1079,19 @@ impl LoroDoc {
///
/// only supports backward compatibility but not forward compatibility.
#[wasm_bindgen(js_name = "importJsonUpdates")]
pub fn import_json_updates(&self, json: JsJsonSchemaOrString) -> JsResult<()> {
pub fn import_json_updates(&self, json: JsJsonSchemaOrString) -> JsResult<JsImportStatus> {
let json: JsValue = json.into();
if JsValue::is_string(&json) {
let json_str = json.as_string().unwrap();
return self
let status = self
.0
.import_json_updates(json_str.as_str())
.map_err(|e| e.into());
.map_err(JsValue::from)?;
return Ok(import_status_to_js_value(status).into());
}
let json_schema: JsonSchema = serde_wasm_bindgen::from_value(json)?;
self.0.import_json_updates(json_schema)?;
Ok(())
let status = self.0.import_json_updates(json_schema)?;
Ok(import_status_to_js_value(status).into())
}
/// Import a snapshot or a update to current doc.
@ -1113,9 +1116,9 @@ impl LoroDoc {
/// // or import updates
/// doc2.import(updates);
/// ```
pub fn import(&self, update_or_snapshot: &[u8]) -> JsResult<()> {
self.0.import(update_or_snapshot)?;
Ok(())
pub fn import(&self, update_or_snapshot: &[u8]) -> JsResult<JsImportStatus> {
let status = self.0.import(update_or_snapshot)?;
Ok(import_status_to_js_value(status).into())
}
/// Import a batch of updates.
@ -4732,4 +4735,11 @@ export type UnknownOp = {
data: Uint8Array
}
};
export type CounterSpan = { start: number, end: number };
export type ImportStatus = {
success: Map<PeerID, CounterSpan>,
pending: Map<PeerID, CounterSpan> | null
}
"#;

View file

@ -6,6 +6,7 @@ pub use loro_internal::cursor::CannotFindRelativePosition;
use loro_internal::cursor::Cursor;
use loro_internal::cursor::PosQueryResult;
use loro_internal::cursor::Side;
pub use loro_internal::encoding::ImportStatus;
use loro_internal::handler::HandlerTrait;
use loro_internal::handler::ValueOrHandler;
use loro_internal::loro::ChangeTravelError;
@ -387,7 +388,7 @@ impl LoroDoc {
/// Import updates/snapshot exported by [`LoroDoc::export_snapshot`] or [`LoroDoc::export_from`].
#[inline]
pub fn import(&self, bytes: &[u8]) -> Result<(), LoroError> {
pub fn import(&self, bytes: &[u8]) -> Result<ImportStatus, LoroError> {
self.doc.import_with(bytes, "".into())
}
@ -396,7 +397,7 @@ impl LoroDoc {
/// It marks the import with a custom `origin` string. It can be used to track the import source
/// in the generated events.
#[inline]
pub fn import_with(&self, bytes: &[u8], origin: &str) -> Result<(), LoroError> {
pub fn import_with(&self, bytes: &[u8], origin: &str) -> Result<ImportStatus, LoroError> {
self.doc.import_with(bytes, origin.into())
}
@ -404,7 +405,10 @@ impl LoroDoc {
///
/// only supports backward compatibility but not forward compatibility.
#[inline]
pub fn import_json_updates<T: TryInto<JsonSchema>>(&self, json: T) -> Result<(), LoroError> {
pub fn import_json_updates<T: TryInto<JsonSchema>>(
&self,
json: T,
) -> Result<ImportStatus, LoroError> {
self.doc.import_json_updates(json)
}

View file

@ -13,6 +13,7 @@ import {
TreeID,
Value,
ContainerType,
PeerID,
} from "loro-wasm";
/**

View file

@ -144,23 +144,31 @@ describe("map", () => {
});
describe("import", () => {
it("pending", () => {
it("pending and import status", () => {
const a = new LoroDoc();
a.setPeerId(0);
a.getText("text").insert(0, "a");
const b = new LoroDoc();
b.setPeerId(1);
b.import(a.exportFrom());
b.getText("text").insert(1, "b");
const c = new LoroDoc();
c.setPeerId(2);
c.import(b.exportFrom());
c.getText("text").insert(2, "c");
// c export from b's version, which cannot be imported directly to a.
// This operation is pending.
a.import(c.exportFrom(b.version()));
const status = a.import(c.exportFrom(b.version()));
const pending = new Map();
pending.set("2", { start: 0, end: 1 });
expect(status).toStrictEqual({ success: new Map(), pending });
expect(a.getText("text").toString()).toBe("a");
// a import the missing ops from b. It makes the pending operation from c valid.
a.import(b.exportFrom(a.version()));
const status2 = a.import(b.exportFrom(a.version()));
pending.set("1", { start: 0, end: 1 });
expect(status2).toStrictEqual({ success: pending, pending: null });
expect(a.getText("text").toString()).toBe("abc");
});