tree: when merging files, read all sides concurrently

This should help a bit when merging files using a high-latency
backend.
This commit is contained in:
Martin von Zweigbergk 2024-09-04 09:25:54 -07:00 committed by Martin von Zweigbergk
parent bc06b2a442
commit 5e620bb7ef

View file

@ -22,6 +22,7 @@ use std::hash::Hasher;
use std::io::Read; use std::io::Read;
use std::sync::Arc; use std::sync::Arc;
use futures::future::try_join_all;
use itertools::Itertools; use itertools::Itertools;
use pollster::FutureExt; use pollster::FutureExt;
use tracing::instrument; use tracing::instrument;
@ -457,20 +458,19 @@ pub async fn try_resolve_file_conflict(
// cannot // cannot
let file_id_conflict = file_id_conflict.simplify(); let file_id_conflict = file_id_conflict.simplify();
// TODO: Read the files concurrently let content_futures = file_id_conflict.into_iter().map(|file_id| async {
let contents: Merge<Vec<u8>> = let mut content = vec![];
file_id_conflict.try_map(|&file_id| -> BackendResult<Vec<u8>> { let mut reader = store.read_file_async(filename, file_id).await?;
let mut content = vec![]; reader
store .read_to_end(&mut content)
.read_file(filename, file_id)? .map_err(|err| BackendError::ReadObject {
.read_to_end(&mut content) object_type: file_id.object_type(),
.map_err(|err| BackendError::ReadObject { hash: file_id.hex(),
object_type: file_id.object_type(), source: err.into(),
hash: file_id.hex(), })?;
source: err.into(), BackendResult::Ok(content)
})?; });
Ok(content) let contents = Merge::from_vec(try_join_all(content_futures).await?);
})?;
let merge_result = files::merge(&contents); let merge_result = files::merge(&contents);
match merge_result { match merge_result {
MergeResult::Resolved(merged_content) => { MergeResult::Resolved(merged_content) => {