Better handling of workspace buffer restoration

* Fixes #20775 - deletion status now properly handled on buffer restore.

* Simplifies buffer restore code to have one main path.

* Refactors buffer open+create interfaces to take initial contents to set.

    - Also useful for cases where buffers are automatically populated.

    - Unlocks a small optimization: skipping the file load (but still
    loading git state and filesystem mtime etc)
This commit is contained in:
mgsloan@gmail.com 2024-11-18 09:23:13 -08:00
parent b102a40e04
commit b5883aa715
11 changed files with 273 additions and 198 deletions

View file

@ -7,7 +7,7 @@ use gpui::{
InteractiveElement as _, Model, ParentElement as _, Render, SharedString,
StatefulInteractiveElement, Styled, Transformation, View, ViewContext, VisualContext as _,
};
use language::{LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId};
use language::{BufferContents, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId};
use lsp::LanguageServerName;
use project::{EnvironmentErrorMessage, LanguageServerProgress, Project, WorktreeId};
use smallvec::SmallVec;
@ -86,21 +86,17 @@ impl ActivityIndicator {
cx.subscribe(&this, move |_, _, event, cx| match event {
Event::ShowError { lsp_name, error } => {
let create_buffer = project.update(cx, |project, cx| project.create_buffer(cx));
let contents = BufferContents {
text: format!("Language server error: {}\n\n{}", lsp_name, error),
language: None,
mtime: None,
};
let create_buffer =
project.update(cx, |project, cx| project.create_buffer(Some(contents), cx));
let project = project.clone();
let error = error.clone();
let lsp_name = lsp_name.clone();
cx.spawn(|workspace, mut cx| async move {
let buffer = create_buffer.await?;
buffer.update(&mut cx, |buffer, cx| {
buffer.edit(
[(
0..0,
format!("Language server error: {}\n\n{}", lsp_name, error),
)],
None,
cx,
);
buffer.set_capability(language::Capability::ReadOnly, cx);
})?;
workspace.update(&mut cx, |workspace, cx| {

View file

@ -2447,7 +2447,7 @@ async fn test_propagate_saves_and_fs_changes(
});
let new_buffer_a = project_a
.update(cx_a, |p, cx| p.create_buffer(cx))
.update(cx_a, |p, cx| p.create_buffer(None, cx))
.await
.unwrap();

View file

@ -2317,7 +2317,7 @@ impl Editor {
cx: &mut ViewContext<Workspace>,
) -> Task<Result<View<Editor>>> {
let project = workspace.project().clone();
let create = project.update(cx, |project, cx| project.create_buffer(cx));
let create = project.update(cx, |project, cx| project.create_buffer(None, cx));
cx.spawn(|workspace, mut cx| async move {
let buffer = create.await?;
@ -2352,7 +2352,7 @@ impl Editor {
cx: &mut ViewContext<Workspace>,
) {
let project = workspace.project().clone();
let create = project.update(cx, |project, cx| project.create_buffer(cx));
let create = project.update(cx, |project, cx| project.create_buffer(None, cx));
cx.spawn(|workspace, mut cx| async move {
let buffer = create.await?;

View file

@ -16,8 +16,8 @@ use gpui::{
VisualContext, WeakView, WindowContext,
};
use language::{
proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, CharKind, DiskState, Point,
SelectionGoal,
proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, BufferContents, CharKind,
DiskState, Point, SelectionGoal,
};
use lsp::DiagnosticSeverity;
use multi_buffer::AnchorRangeExt;
@ -976,133 +976,98 @@ impl SerializableItem for Editor {
}
}
Ok(None) => {
return Task::ready(Err(anyhow!("No path or contents found for buffer")));
return Task::ready(Err(anyhow!("Missing serialized editor info in database")));
}
Err(error) => {
return Task::ready(Err(error));
}
};
match serialized_editor {
SerializedEditor {
abs_path: None,
contents: Some(contents),
language,
..
} => cx.spawn(|pane, mut cx| {
let project = project.clone();
async move {
let language = if let Some(language_name) = language {
let language_registry =
project.update(&mut cx, |project, _| project.languages().clone())?;
if let SerializedEditor {
abs_path: None,
contents: None,
..
} = serialized_editor
{
return Task::ready(Err(anyhow!("No path or contents found for buffer")));
}
// We don't fail here, because we'd rather not set the language if the name changed
// than fail to restore the buffer.
language_registry
.language_for_name(&language_name)
.await
.ok()
} else {
None
};
let SerializedEditor {
abs_path,
contents,
language,
mtime,
} = serialized_editor;
// First create the empty buffer
let buffer = project
.update(&mut cx, |project, cx| project.create_buffer(cx))?
.await?;
// Then set the text so that the dirty bit is set correctly
buffer.update(&mut cx, |buffer, cx| {
if let Some(language) = language {
buffer.set_language(Some(language), cx);
}
buffer.set_text(contents, cx);
})?;
pane.update(&mut cx, |_, cx| {
cx.new_view(|cx| {
let mut editor = Editor::for_buffer(buffer, Some(project), cx);
editor.read_scroll_position_from_db(item_id, workspace_id, cx);
editor
cx.spawn(|pane, mut cx| async move {
let resolved_language_future = if contents.is_none() {
None
} else {
project
.read_with(&cx, |project, _| {
language.map(|language| {
project.languages().language_for_name(language.as_str())
})
})
}
}),
SerializedEditor {
abs_path: Some(abs_path),
contents,
.ok()
.flatten()
};
let resolved_language = match resolved_language_future {
None => None,
//Should open even if the language is not found, so errors are ignored.
Some(resolved_language_future) => resolved_language_future.await.ok(),
};
let initial_contents = contents.map(|text| BufferContents {
text,
language: resolved_language,
mtime,
..
} => {
let project_item = project.update(cx, |project, cx| {
let (worktree, path) = project.find_worktree(&abs_path, cx)?;
let project_path = ProjectPath {
worktree_id: worktree.read(cx).id(),
path: path.into(),
};
Some(project.open_path(project_path, cx))
});
});
match project_item {
Some(project_item) => {
cx.spawn(|pane, mut cx| async move {
let (_, project_item) = project_item.await?;
let buffer = project_item.downcast::<Buffer>().map_err(|_| {
anyhow!("Project item at stored path was not a buffer")
})?;
// This is a bit wasteful: we're loading the whole buffer from
// disk and then overwrite the content.
// But for now, it keeps the implementation of the content serialization
// simple, because we don't have to persist all of the metadata that we get
// by loading the file (git diff base, ...).
if let Some(buffer_text) = contents {
buffer.update(&mut cx, |buffer, cx| {
// If we did restore an mtime, we want to store it on the buffer
// so that the next edit will mark the buffer as dirty/conflicted.
if mtime.is_some() {
buffer.did_reload(
buffer.version(),
buffer.line_ending(),
mtime,
cx,
);
}
buffer.set_text(buffer_text, cx);
})?;
}
pane.update(&mut cx, |_, cx| {
cx.new_view(|cx| {
let mut editor = Editor::for_buffer(buffer, Some(project), cx);
editor.read_scroll_position_from_db(item_id, workspace_id, cx);
editor
})
})
let buffer = match abs_path {
None => {
project.update(&mut cx, |project, cx| project.create_buffer(initial_contents, cx))?.await?
}
Some(abs_path) => {
let project_path = project.read_with(&cx, |project, cx| {
project.find_worktree(&abs_path, cx).map(|(worktree, path)| ProjectPath {
worktree_id: worktree.read(cx).id(),
path: path.into(),
})
}
None => {
let open_by_abs_path = workspace.update(cx, |workspace, cx| {
workspace.open_abs_path(abs_path.clone(), false, cx)
});
cx.spawn(|_, mut cx| async move {
})?;
match project_path {
Some(project_path) => {
project.update(&mut cx, |project, cx| {
project.open_buffer_with_initial_contents(
project_path,
initial_contents,
cx,
)
})?.await?
},
None => {
let open_by_abs_path = workspace.update(&mut cx, |workspace, cx| {
workspace.open_abs_path(abs_path.clone(), false, cx)
});
let editor = open_by_abs_path?.await?.downcast::<Editor>().with_context(|| format!("Failed to downcast to Editor after opening abs path {abs_path:?}"))?;
editor.update(&mut cx, |editor, cx| {
editor.read_scroll_position_from_db(item_id, workspace_id, cx);
})?;
Ok(editor)
})
return Ok(editor)
}
}
}
}
SerializedEditor {
abs_path: None,
contents: None,
..
} => Task::ready(Err(anyhow!("No path or contents found for buffer"))),
}
};
pane.update(&mut cx, |_, cx| {
cx.new_view(|cx| {
let mut editor =
Editor::for_buffer(buffer, Some(project), cx);
editor.read_scroll_position_from_db(item_id, workspace_id, cx);
editor
})
})
})
}
fn serialize(

View file

@ -2,7 +2,7 @@ use std::{fs, path::Path};
use anyhow::Context as _;
use gpui::{Context, View, ViewContext, VisualContext, WindowContext};
use language::Language;
use language::{BufferContents, Language};
use multi_buffer::MultiBuffer;
use project::lsp_ext_command::ExpandMacro;
use text::ToPointUtf16;
@ -74,14 +74,17 @@ pub fn expand_macro_recursively(
return Ok(());
}
let contents = BufferContents {
text: macro_expansion.expansion,
language: Some(rust_language),
mtime: None,
};
let buffer = project
.update(&mut cx, |project, cx| project.create_buffer(cx))?
.update(&mut cx, |project, cx| {
project.create_buffer(Some(contents), cx)
})?
.await?;
workspace.update(&mut cx, |workspace, cx| {
buffer.update(cx, |buffer, cx| {
buffer.edit([(0..0, macro_expansion.expansion)], None, cx);
buffer.set_language(Some(rust_language), cx)
});
let multibuffer = cx.new_model(|cx| {
MultiBuffer::singleton(buffer, cx).with_title(macro_expansion.name)
});

View file

@ -620,6 +620,14 @@ impl IndentGuide {
}
}
// TODO: move
pub struct BufferContents {
pub text: String,
// FIXME: use
pub language: Option<Arc<Language>>,
pub mtime: Option<SystemTime>,
}
impl Buffer {
/// Create a new buffer with the given base text.
pub fn local<T: Into<String>>(base_text: T, cx: &ModelContext<Self>) -> Self {

View file

@ -20,7 +20,7 @@ use language::{
deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version,
split_operations,
},
Buffer, BufferEvent, Capability, DiskState, File as _, Language, Operation,
Buffer, BufferContents, BufferEvent, Capability, DiskState, File as _, Language, Operation,
};
use rpc::{proto, AnyProtoClient, ErrorExt as _, TypedEnvelope};
use smol::channel::Receiver;
@ -33,6 +33,7 @@ trait BufferStoreImpl {
fn open_buffer(
&self,
path: Arc<Path>,
initial_contents: Option<BufferContents>,
worktree: Model<Worktree>,
cx: &mut ModelContext<BufferStore>,
) -> Task<Result<Model<Buffer>>>;
@ -50,7 +51,11 @@ trait BufferStoreImpl {
cx: &mut ModelContext<BufferStore>,
) -> Task<Result<()>>;
fn create_buffer(&self, cx: &mut ModelContext<BufferStore>) -> Task<Result<Model<Buffer>>>;
fn create_buffer(
&self,
initial_contents: Option<BufferContents>,
cx: &mut ModelContext<BufferStore>,
) -> Task<Result<Model<Buffer>>>;
fn reload_buffers(
&self,
@ -344,6 +349,8 @@ impl BufferStoreImpl for Model<RemoteBufferStore> {
fn open_buffer(
&self,
path: Arc<Path>,
// TODO: How to handle interaction between workspace persistence and remote dev?
_: Option<BufferContents>,
worktree: Model<Worktree>,
cx: &mut ModelContext<BufferStore>,
) -> Task<Result<Model<Buffer>>> {
@ -373,7 +380,11 @@ impl BufferStoreImpl for Model<RemoteBufferStore> {
})
}
fn create_buffer(&self, cx: &mut ModelContext<BufferStore>) -> Task<Result<Model<Buffer>>> {
fn create_buffer(
&self,
initial_contents: Option<BufferContents>,
cx: &mut ModelContext<BufferStore>,
) -> Task<Result<Model<Buffer>>> {
self.update(cx, |this, cx| {
let create = this.upstream_client.request(proto::OpenNewBuffer {
project_id: this.project_id,
@ -825,13 +836,19 @@ impl BufferStoreImpl for Model<LocalBufferStore> {
fn open_buffer(
&self,
path: Arc<Path>,
initial_contents: Option<BufferContents>,
worktree: Model<Worktree>,
cx: &mut ModelContext<BufferStore>,
) -> Task<Result<Model<Buffer>>> {
let buffer_store = cx.weak_model();
self.update(cx, |_, cx| {
let load_buffer = worktree.update(cx, |worktree, cx| {
let load_file = worktree.load_file(path.as_ref(), cx);
let load_file = match initial_contents {
None => worktree.load_file(path.as_ref(), cx),
Some(initial_contents) => {
worktree.restore_file(path.as_ref(), initial_contents, cx)
}
};
let reservation = cx.reserve_model();
let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
cx.spawn(move |_, mut cx| async move {
@ -901,10 +918,24 @@ impl BufferStoreImpl for Model<LocalBufferStore> {
})
}
fn create_buffer(&self, cx: &mut ModelContext<BufferStore>) -> Task<Result<Model<Buffer>>> {
fn create_buffer(
&self,
initial_contents: Option<BufferContents>,
cx: &mut ModelContext<BufferStore>,
) -> Task<Result<Model<Buffer>>> {
let (text, language) = match initial_contents {
Some(BufferContents {
text,
language,
mtime: _,
}) => (text, language),
None => ("".into(), None),
};
let handle = self.clone();
cx.spawn(|buffer_store, mut cx| async move {
let buffer = cx.new_model(|cx| {
Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx)
Buffer::local(text, cx)
.with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
})?;
buffer_store.update(&mut cx, |buffer_store, cx| {
buffer_store.add_buffer(buffer.clone(), cx).log_err();
@ -1010,6 +1041,7 @@ impl BufferStore {
pub fn open_buffer(
&mut self,
project_path: ProjectPath,
initial_contents: Option<BufferContents>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> {
let existing_buffer = self.get_by_path(&project_path, cx);
@ -1036,9 +1068,12 @@ impl BufferStore {
entry.insert(rx.clone());
let project_path = project_path.clone();
let load_buffer = self
.state
.open_buffer(project_path.path.clone(), worktree, cx);
let load_buffer = self.state.open_buffer(
project_path.path.clone(),
initial_contents,
worktree,
cx,
);
cx.spawn(move |this, mut cx| async move {
let load_result = load_buffer.await;
@ -1062,8 +1097,12 @@ impl BufferStore {
})
}
pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<Model<Buffer>>> {
self.state.create_buffer(cx)
pub fn create_buffer(
&mut self,
initial_contents: Option<BufferContents>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> {
self.state.create_buffer(initial_contents, cx)
}
pub fn save_buffer(
@ -1422,7 +1461,7 @@ impl BufferStore {
let buffers = this.update(&mut cx, |this, cx| {
project_paths
.into_iter()
.map(|project_path| this.open_buffer(project_path, cx))
.map(|project_path| this.open_buffer(project_path, None, cx))
.collect::<Vec<_>>()
})?;
for buffer_task in buffers {

View file

@ -3653,7 +3653,7 @@ impl LspStore {
lsp_store
.update(&mut cx, |lsp_store, cx| {
lsp_store.buffer_store().update(cx, |buffer_store, cx| {
buffer_store.open_buffer(project_path, cx)
buffer_store.open_buffer(project_path, None, cx)
})
})?
.await

View file

@ -46,7 +46,7 @@ use gpui::{
};
use itertools::Itertools;
use language::{
language_settings::InlayHintKind, proto::split_operations, Buffer, BufferEvent,
language_settings::InlayHintKind, proto::split_operations, Buffer, BufferContents, BufferEvent,
CachedLspAdapter, Capability, CodeLabel, DiagnosticEntry, Documentation, File as _, Language,
LanguageName, LanguageRegistry, PointUtf16, ToOffset, ToPointUtf16, Toolchain, ToolchainList,
Transaction, Unclipped,
@ -111,6 +111,13 @@ const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500;
const MAX_SEARCH_RESULT_FILES: usize = 5_000;
const MAX_SEARCH_RESULT_RANGES: usize = 10_000;
// FIXME: use?
pub enum NotFoundHandling {
Create,
RequireCreate,
RequireExistence,
}
pub trait Item {
fn try_open(
project: &Model<Project>,
@ -1757,9 +1764,14 @@ impl Project {
}
}
pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<Model<Buffer>>> {
self.buffer_store
.update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))
pub fn create_buffer(
&mut self,
initial_contents: Option<BufferContents>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> {
self.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.create_buffer(initial_contents, cx)
})
}
pub fn create_local_buffer(
@ -1809,13 +1821,22 @@ impl Project {
&mut self,
path: impl Into<ProjectPath>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> {
self.open_buffer_with_initial_contents(path, None, cx)
}
pub fn open_buffer_with_initial_contents(
&mut self,
path: impl Into<ProjectPath>,
initial_contents: Option<BufferContents>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Model<Buffer>>> {
if self.is_disconnected(cx) {
return Task::ready(Err(anyhow!(ErrorCode::Disconnected)));
}
self.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.open_buffer(path.into(), cx)
buffer_store.open_buffer(path.into(), initial_contents, cx)
})
}
@ -3923,7 +3944,7 @@ impl Project {
mut cx: AsyncAppContext,
) -> Result<proto::OpenBufferResponse> {
let buffer = this
.update(&mut cx, |this, cx| this.create_buffer(cx))?
.update(&mut cx, |this, cx| this.create_buffer(None, cx))?
.await?;
let peer_id = envelope.original_sender_id()?;

View file

@ -402,6 +402,8 @@ impl HeadlessProject {
worktree_id,
path: PathBuf::from(message.payload.path).into(),
},
// TODO: How to handle interaction between workspace persistence and remote dev?
None,
cx,
)
});
@ -430,7 +432,7 @@ impl HeadlessProject {
let buffer_store = this.buffer_store.clone();
let buffer = this
.buffer_store
.update(cx, |buffer_store, cx| buffer_store.create_buffer(cx));
.update(cx, |buffer_store, cx| buffer_store.create_buffer(None, cx));
anyhow::Ok((buffer_store, buffer))
})??;
@ -468,6 +470,8 @@ impl HeadlessProject {
worktree_id: worktree.read(cx).id(),
path: path.into(),
},
// TODO: How to handle interaction between workspace persistence and remote dev?
None,
cx,
)
});

View file

@ -29,7 +29,7 @@ use gpui::{
Task,
};
use ignore::IgnoreStack;
use language::DiskState;
use language::{BufferContents, DiskState};
use parking_lot::Mutex;
use paths::local_settings_folder_relative_path;
use postage::{
@ -720,6 +720,20 @@ impl Worktree {
}
}
pub fn restore_file(
&self,
path: &Path,
initial_contents: BufferContents,
cx: &ModelContext<Worktree>,
) -> Task<Result<LoadedFile>> {
match self {
Worktree::Local(this) => this.restore_file(path, initial_contents, cx),
Worktree::Remote(_) => {
Task::ready(Err(anyhow!("remote worktrees can't yet restore files")))
}
}
}
pub fn write_file(
&self,
path: &Path,
@ -1353,68 +1367,97 @@ impl LocalWorktree {
})
}
fn restore_file(
&self,
path: &Path,
initial_contents: BufferContents,
cx: &ModelContext<Worktree>,
) -> Task<Result<LoadedFile>> {
self.load_or_restore_file(path, Some(initial_contents), cx)
}
fn load_file(&self, path: &Path, cx: &ModelContext<Worktree>) -> Task<Result<LoadedFile>> {
self.load_or_restore_file(path, None, cx)
}
fn load_or_restore_file(
&self,
path: &Path,
initial_contents: Option<BufferContents>,
cx: &ModelContext<Worktree>,
) -> Task<Result<LoadedFile>> {
let path = Arc::from(path);
let abs_path = self.absolutize(&path);
let fs = self.fs.clone();
let entry = self.refresh_entry(path.clone(), None, cx);
let is_private = self.is_path_private(path.as_ref());
cx.spawn(|this, mut cx| async move {
cx.spawn(move |this, mut cx| async move {
let (initial_text, was_present) =
initial_contents.map_or((None, false), |initial_contents| {
(
Some(initial_contents.text),
initial_contents.mtime.is_some(),
)
});
let abs_path = abs_path?;
let text = fs.load(&abs_path).await?;
let mut index_task = None;
let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
if let Some(repo) = snapshot.repository_for_path(&path) {
if let Some(repo_path) = repo.relativize(&snapshot, &path).log_err() {
if let Some(git_repo) = snapshot.git_repositories.get(&*repo.work_directory) {
let git_repo = git_repo.repo_ptr.clone();
index_task = Some(
cx.background_executor()
.spawn(async move { git_repo.load_index_text(&repo_path) }),
);
}
}
}
let text_future = initial_text.map_or_else(
|| fs.load(&abs_path),
|text| Box::pin(std::future::ready(Ok(text))),
);
let diff_base = if let Some(index_task) = index_task {
index_task.await
} else {
None
};
let diff_base = this
.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())
.map(|snapshot| {
let mut index_task = Task::ready(None);
if let Some(repo) = snapshot.repository_for_path(&path) {
if let Some(repo_path) = repo.relativize(&snapshot, &path).log_err() {
if let Some(git_repo) =
snapshot.git_repositories.get(&*repo.work_directory)
{
let git_repo = git_repo.repo_ptr.clone();
index_task = cx
.background_executor()
.spawn(async move { git_repo.load_index_text(&repo_path) });
}
}
}
index_task
});
let worktree = this
.upgrade()
.ok_or_else(|| anyhow!("worktree was dropped"))?;
let file = match entry.await? {
Some(entry) => File::for_entry(entry, worktree),
None => {
let metadata = fs
.metadata(&abs_path)
.await
.with_context(|| {
format!("Loading metadata for excluded file {abs_path:?}")
})?
.with_context(|| {
format!("Excluded file {abs_path:?} got removed during loading")
})?;
Arc::new(File {
let file = match entry.await {
Err(e) => Err(e),
Ok(Some(entry)) => Ok(File::for_entry(entry, worktree)),
Ok(None) => {
let metadata = fs.metadata(&abs_path).await.with_context(|| {
format!("Loading metadata for excluded file {abs_path:?}")
})?;
let disk_state = match metadata {
Some(metadata) => DiskState::Present {
mtime: metadata.mtime,
},
None if was_present => DiskState::Deleted,
None => DiskState::New,
};
Ok(Arc::new(File {
entry_id: None,
worktree,
path,
disk_state: DiskState::Present {
mtime: metadata.mtime,
},
disk_state,
is_local: true,
is_private,
})
}))
}
};
// Error reporting is done at the end so that file loading errors take precedence.
Ok(LoadedFile {
file,
text,
diff_base,
text: text_future.await?,
file: file?,
diff_base: diff_base?.await,
})
})
}
@ -1830,12 +1873,8 @@ impl LocalWorktree {
cx.spawn(move |this, mut cx| async move {
refresh.recv().await;
log::trace!("refreshed entry {path:?} in {:?}", t0.elapsed());
let new_entry = this.update(&mut cx, |this, _| {
this.entry_for_path(path)
.cloned()
.ok_or_else(|| anyhow!("failed to read path after update"))
})??;
Ok(Some(new_entry))
let new_entry = this.update(&mut cx, |this, _| this.entry_for_path(path).cloned())?;
Ok(new_entry)
})
}