perf: use simplified internal string (#551)
Some checks are pending
Release WASM / Release (push) Waiting to run
Test All / build (push) Waiting to run

* perf: use simplified internal string

* fix: make internal string trait impl more stringish

* perf: reduce InternalString memory cost from 16 bytes to 8 bytes
on 64bit platform

* chore: fix warning

* perf: optimize drop

* test: add init large map example
This commit is contained in:
Zixuan Chen 2024-11-11 15:08:27 +08:00 committed by GitHub
parent b1b977cf9e
commit 5fc3458eb7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 378 additions and 66 deletions

1
Cargo.lock generated
View file

@ -1145,7 +1145,6 @@ dependencies = [
"serde",
"serde_columnar",
"serde_json",
"string_cache",
"thiserror",
"wasm-bindgen",
]

View file

@ -0,0 +1,32 @@
use std::time::Instant;
use dev_utils::get_mem_usage;
use loro::LoroDoc;
fn main() {
let mut init = Instant::now();
{
let start = Instant::now();
let doc = LoroDoc::new();
let map = doc.get_map("map");
for i in 0..1000 {
for j in 0..1000 {
let key = format!("0000{}:0000{}", i, j);
map.insert(&key, i + j).unwrap();
}
}
println!("LargeMap Init Time {:?}", start.elapsed());
let start = Instant::now();
let bytes = doc.export(loro::ExportMode::Snapshot).unwrap();
println!("LargeMap Export Time {:?}", start.elapsed());
let start = Instant::now();
let new_doc = LoroDoc::new();
new_doc.import(&bytes).unwrap();
println!("LargeMap Import Time {:?}", start.elapsed());
println!("Mem {:?}", get_mem_usage());
init = Instant::now();
}
println!("Drop Time {:?}", init.elapsed());
println!("Mem {:?}", get_mem_usage())
}

View file

@ -1,5 +1,6 @@
use dev_utils::get_mem_usage;
use dev_utils::{get_mem_usage, ByteSize};
use examples::sheet::init_large_sheet;
use loro::ID;
pub fn main() {
dev_utils::setup_test_log();
@ -10,53 +11,53 @@ pub fn main() {
println!("Allocated bytes for 10M cells spreadsheet: {}", allocated);
println!("Has history cache: {}", doc.has_history_cache());
examples::utils::bench_fast_snapshot(&doc);
// doc.checkout(&ID::new(doc.peer_id(), 100).into()).unwrap();
// println!(
// "Has history cache after checkout: {}",
// doc.has_history_cache()
// );
doc.checkout(&ID::new(doc.peer_id(), 100).into()).unwrap();
println!(
"Has history cache after checkout: {}",
doc.has_history_cache()
);
// doc.checkout_to_latest();
// let after_checkout = get_mem_usage();
// println!("Allocated bytes after checkout: {}", after_checkout);
doc.checkout_to_latest();
let after_checkout = get_mem_usage();
println!("Allocated bytes after checkout: {}", after_checkout);
// doc.free_diff_calculator();
// let after_free_diff_calculator = get_mem_usage();
// println!(
// "Allocated bytes after freeing diff calculator: {}",
// after_free_diff_calculator
// );
doc.free_diff_calculator();
let after_free_diff_calculator = get_mem_usage();
println!(
"Allocated bytes after freeing diff calculator: {}",
after_free_diff_calculator
);
// println!(
// "Diff calculator size: {}",
// after_checkout - after_free_diff_calculator
// );
println!(
"Diff calculator size: {}",
after_checkout - after_free_diff_calculator
);
// doc.free_history_cache();
// let after_free_history_cache = get_mem_usage();
// println!(
// "Allocated bytes after free history cache: {}",
// after_free_history_cache
// );
doc.free_history_cache();
let after_free_history_cache = get_mem_usage();
println!(
"Allocated bytes after free history cache: {}",
after_free_history_cache
);
// println!(
// "History cache size: {}",
// after_free_diff_calculator - after_free_history_cache
// );
println!(
"History cache size: {}",
after_free_diff_calculator - after_free_history_cache
);
// doc.compact_change_store();
// let after_compact_change_store = get_mem_usage();
// println!(
// "Allocated bytes after compact change store: {}",
// after_compact_change_store
// );
// println!(
// "Shrink change store size: {}",
// after_free_history_cache - after_compact_change_store
// );
doc.compact_change_store();
let after_compact_change_store = get_mem_usage();
println!(
"Allocated bytes after compact change store: {}",
after_compact_change_store
);
println!(
"Shrink change store size: {}",
after_free_history_cache - after_compact_change_store
);
// println!(
// "ChangeStore size: {}",
// ByteSize(doc.with_oplog(|log| log.change_store_kv_size()))
// );
println!(
"ChangeStore size: {}",
ByteSize(doc.with_oplog(|log| log.change_store_kv_size()))
);
}

View file

@ -21,7 +21,6 @@ thiserror = "1.0.43"
wasm-bindgen = { version = "=0.2.92", optional = true }
fxhash = "0.2.1"
enum-as-inner = "0.6.0"
string_cache = "0.8"
arbitrary = { version = "1.3.0", features = ["derive"] }
js-sys = { version = "0.3.60", optional = true }
serde_columnar = { workspace = true }

View file

@ -1,40 +1,218 @@
use std::{fmt::Display, ops::Deref};
use fxhash::FxHashSet;
use serde::{Deserialize, Serialize};
use std::borrow::Borrow;
use std::slice;
use std::sync::LazyLock;
use std::{
fmt::Display,
num::NonZeroU64,
ops::Deref,
sync::{atomic::AtomicUsize, Arc, Mutex},
};
const DYNAMIC_TAG: u8 = 0b_00;
const INLINE_TAG: u8 = 0b_01;
const TAG_MASK: u64 = 0b_11;
const LEN_OFFSET: u64 = 4;
const LEN_MASK: u64 = 0xF0;
#[repr(transparent)]
#[derive(Clone, Debug, Default, Serialize, Deserialize, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct InternalString(string_cache::DefaultAtom);
impl InternalString {
pub fn as_str(&self) -> &str {
&self.0
#[derive(Clone)]
pub struct InternalString {
unsafe_data: UnsafeData,
}
union UnsafeData {
inline: NonZeroU64,
dynamic: *const Box<str>,
}
unsafe impl Sync for UnsafeData {}
unsafe impl Send for UnsafeData {}
impl UnsafeData {
#[inline(always)]
fn is_inline(&self) -> bool {
unsafe { (self.inline.get() & TAG_MASK) as u8 == INLINE_TAG }
}
}
impl<T: Into<string_cache::DefaultAtom>> From<T> for InternalString {
impl Clone for UnsafeData {
fn clone(&self) -> Self {
if self.is_inline() {
Self {
inline: unsafe { self.inline },
}
} else {
let arc = unsafe { Arc::from_raw(self.dynamic) };
let clone = arc.clone();
let new = Arc::into_raw(clone);
std::mem::forget(arc);
Self { dynamic: new }
}
}
}
impl std::fmt::Debug for InternalString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("InternalString(")?;
std::fmt::Debug::fmt(self.as_str(), f)?;
f.write_str(")")
}
}
impl std::hash::Hash for InternalString {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.as_str().hash(state);
}
}
impl PartialEq for InternalString {
fn eq(&self, other: &Self) -> bool {
self.as_str() == other.as_str()
}
}
impl Eq for InternalString {}
impl PartialOrd for InternalString {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for InternalString {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.as_str().cmp(other.as_str())
}
}
impl Serialize for InternalString {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(self.as_str())
}
}
impl<'de> Deserialize<'de> for InternalString {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(InternalString::from(s.as_str()))
}
}
impl Default for InternalString {
fn default() -> Self {
let v: u64 = INLINE_TAG as u64;
Self {
// SAFETY: INLINE_TAG is non-zero
unsafe_data: UnsafeData {
inline: unsafe { NonZeroU64::new_unchecked(v) },
},
}
}
}
impl InternalString {
pub fn as_str(&self) -> &str {
unsafe {
match (self.unsafe_data.inline.get() & TAG_MASK) as u8 {
INLINE_TAG => {
let len = (self.unsafe_data.inline.get() & LEN_MASK) >> LEN_OFFSET;
let src = inline_atom_slice(&self.unsafe_data.inline);
// SAFETY: the chosen range is guaranteed to be valid str
std::str::from_utf8_unchecked(&src[..(len as usize)])
}
DYNAMIC_TAG => {
let ptr = self.unsafe_data.dynamic;
// SAFETY: ptr is valid
(*ptr).deref()
}
_ => unreachable!(),
}
}
}
}
impl AsRef<str> for InternalString {
fn as_ref(&self) -> &str {
self.as_str()
}
}
impl From<&str> for InternalString {
#[inline(always)]
fn from(value: T) -> Self {
Self(value.into())
fn from(s: &str) -> Self {
if s.len() <= 7 {
let mut v: u64 = (INLINE_TAG as u64) | ((s.len() as u64) << LEN_OFFSET);
let arr = inline_atom_slice_mut(&mut v);
arr[..s.len()].copy_from_slice(s.as_bytes());
Self {
unsafe_data: UnsafeData {
// SAFETY: The tag is 1
inline: unsafe { NonZeroU64::new_unchecked(v) },
},
}
} else {
let ans: Arc<Box<str>> = get_or_init_internalized_string(s);
let raw = Arc::into_raw(ans);
// SAFETY: Pointer is non-zero
Self {
unsafe_data: UnsafeData { dynamic: raw },
}
}
}
}
#[inline(always)]
fn inline_atom_slice(x: &NonZeroU64) -> &[u8] {
unsafe {
let x: *const NonZeroU64 = x;
let mut data = x as *const u8;
// All except the lowest byte, which is first in little-endian, last in big-endian.
if cfg!(target_endian = "little") {
data = data.offset(1);
}
let len = 7;
slice::from_raw_parts(data, len)
}
}
#[inline(always)]
fn inline_atom_slice_mut(x: &mut u64) -> &mut [u8] {
unsafe {
let x: *mut u64 = x;
let mut data = x as *mut u8;
// All except the lowest byte, which is first in little-endian, last in big-endian.
if cfg!(target_endian = "little") {
data = data.offset(1);
}
let len = 7;
slice::from_raw_parts_mut(data, len)
}
}
impl From<String> for InternalString {
fn from(s: String) -> Self {
Self::from(s.as_str())
}
}
impl From<&InternalString> for String {
#[inline(always)]
fn from(value: &InternalString) -> Self {
value.0.to_string()
}
}
impl From<&InternalString> for InternalString {
#[inline(always)]
fn from(value: &InternalString) -> Self {
value.clone()
value.as_str().to_string()
}
}
impl Display for InternalString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
self.as_str().fmt(f)
}
}
@ -42,6 +220,109 @@ impl Deref for InternalString {
type Target = str;
fn deref(&self) -> &Self::Target {
self.0.as_ref()
self.as_str()
}
}
#[derive(Hash, PartialEq, Eq)]
struct ArcWrapper(Arc<Box<str>>);
impl Borrow<str> for ArcWrapper {
fn borrow(&self) -> &str {
&self.0
}
}
static STRING_SET: LazyLock<Mutex<FxHashSet<ArcWrapper>>> =
LazyLock::new(|| Mutex::new(FxHashSet::default()));
fn get_or_init_internalized_string(s: &str) -> Arc<Box<str>> {
static MAX_MET_CACHE_SIZE: AtomicUsize = AtomicUsize::new(1 << 16);
let mut set = STRING_SET.lock().unwrap();
if let Some(v) = set.get(s) {
v.0.clone()
} else {
let ans: Arc<Box<str>> = Arc::new(Box::from(s));
set.insert(ArcWrapper(ans.clone()));
let max = MAX_MET_CACHE_SIZE.load(std::sync::atomic::Ordering::Relaxed);
if set.capacity() >= max {
let old = set.len();
set.retain(|s| Arc::strong_count(&s.0) > 1);
let new = set.len();
if old - new > new / 2 {
set.shrink_to_fit();
}
MAX_MET_CACHE_SIZE.store(max * 2, std::sync::atomic::Ordering::Relaxed);
}
ans
}
}
fn drop_cache(s: Arc<Box<str>>) {
let mut set = STRING_SET.lock().unwrap();
set.remove(&ArcWrapper(s));
if set.len() < set.capacity() / 2 && set.capacity() > 128 {
set.shrink_to_fit();
}
}
impl Drop for InternalString {
fn drop(&mut self) {
unsafe {
if (self.unsafe_data.inline.get() & TAG_MASK) as u8 == DYNAMIC_TAG {
let ptr = self.unsafe_data.dynamic;
// SAFETY: ptr is a valid Arc
let arc: Arc<Box<str>> = Arc::from_raw(ptr);
if Arc::strong_count(&arc) == 2 {
drop_cache(arc);
} else {
drop(arc)
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_string_cache() {
let s1 = InternalString::from("hello");
let s3 = InternalString::from("world");
// Content should match
assert_eq!("hello", s1.as_str());
assert_eq!(s3.as_str(), "world");
}
#[test]
fn test_long_string_cache() {
let long_str1 = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.";
let long_str2 = "A very long string that contains lots of repeated characters: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
let s1 = InternalString::from(long_str1);
let s2 = InternalString::from(long_str1);
let s3 = InternalString::from(long_str2);
// Same long strings should be equal
assert_eq!(s1, s2);
// Different long strings should be different
assert_ne!(s1, s3);
// Content should match exactly
assert_eq!(s1.as_str(), long_str1);
assert_eq!(s1.as_str(), long_str1);
assert_eq!(s2.as_str(), long_str1);
assert_eq!(s3.as_str(), long_str2);
// Internal pointers should be same for equal strings
assert!(std::ptr::eq(s1.as_str().as_ptr(), s2.as_str().as_ptr()));
assert!(!std::ptr::eq(s1.as_str().as_ptr(), s3.as_str().as_ptr()));
}
}

View file

@ -3,7 +3,7 @@
#![warn(missing_debug_implementations)]
use event::{DiffEvent, Subscriber};
use fxhash::FxHashSet;
use loro_common::InternalString;
pub use loro_common::InternalString;
pub use loro_internal::cursor::CannotFindRelativePosition;
use loro_internal::cursor::Cursor;
use loro_internal::cursor::PosQueryResult;