mirror of
https://github.com/zed-industries/zed.git
synced 2025-01-11 21:13:02 +00:00
Merge branch 'main' into multibuffer-following
This commit is contained in:
commit
f797dfb88f
175 changed files with 16722 additions and 8699 deletions
2
.github/workflows/release_actions.yml
vendored
2
.github/workflows/release_actions.yml
vendored
|
@ -14,7 +14,7 @@ jobs:
|
|||
content: |
|
||||
📣 Zed ${{ github.event.release.tag_name }} was just released!
|
||||
|
||||
Restart your Zed or head to https://zed.dev/releases to grab it.
|
||||
Restart your Zed or head to https://zed.dev/releases/latest to grab it.
|
||||
|
||||
```md
|
||||
# Changelog
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -18,3 +18,4 @@ DerivedData/
|
|||
.swiftpm/config/registries.json
|
||||
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
|
||||
.netrc
|
||||
**/*.db
|
||||
|
|
1191
Cargo.lock
generated
1191
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -45,6 +45,8 @@ members = [
|
|||
"crates/search",
|
||||
"crates/settings",
|
||||
"crates/snippet",
|
||||
"crates/sqlez",
|
||||
"crates/sqlez_macros",
|
||||
"crates/sum_tree",
|
||||
"crates/terminal",
|
||||
"crates/text",
|
||||
|
@ -81,3 +83,4 @@ split-debuginfo = "unpacked"
|
|||
[profile.release]
|
||||
debug = true
|
||||
|
||||
|
||||
|
|
|
@ -8,6 +8,22 @@
|
|||
"Namespace": "G"
|
||||
}
|
||||
],
|
||||
"i": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"Object": {
|
||||
"around": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"a": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"Object": {
|
||||
"around": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"h": "vim::Left",
|
||||
"backspace": "vim::Backspace",
|
||||
"j": "vim::Down",
|
||||
|
@ -38,22 +54,6 @@
|
|||
],
|
||||
"%": "vim::Matching",
|
||||
"escape": "editor::Cancel",
|
||||
"i": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"Object": {
|
||||
"around": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"a": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"Object": {
|
||||
"around": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"0": "vim::StartOfLine", // When no number operator present, use start of line motion
|
||||
"1": [
|
||||
"vim::Number",
|
||||
|
@ -110,6 +110,12 @@
|
|||
"vim::PushOperator",
|
||||
"Yank"
|
||||
],
|
||||
"z": [
|
||||
"vim::PushOperator",
|
||||
{
|
||||
"Namespace": "Z"
|
||||
}
|
||||
],
|
||||
"i": [
|
||||
"vim::SwitchMode",
|
||||
"Insert"
|
||||
|
@ -147,6 +153,30 @@
|
|||
{
|
||||
"focus": true
|
||||
}
|
||||
],
|
||||
"ctrl-f": [
|
||||
"vim::Scroll",
|
||||
"PageDown"
|
||||
],
|
||||
"ctrl-b": [
|
||||
"vim::Scroll",
|
||||
"PageUp"
|
||||
],
|
||||
"ctrl-d": [
|
||||
"vim::Scroll",
|
||||
"HalfPageDown"
|
||||
],
|
||||
"ctrl-u": [
|
||||
"vim::Scroll",
|
||||
"HalfPageUp"
|
||||
],
|
||||
"ctrl-e": [
|
||||
"vim::Scroll",
|
||||
"LineDown"
|
||||
],
|
||||
"ctrl-y": [
|
||||
"vim::Scroll",
|
||||
"LineUp"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
@ -188,6 +218,18 @@
|
|||
"y": "vim::CurrentLine"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && vim_operator == z",
|
||||
"bindings": {
|
||||
"t": "editor::ScrollCursorTop",
|
||||
"z": "editor::ScrollCursorCenter",
|
||||
"b": "editor::ScrollCursorBottom",
|
||||
"escape": [
|
||||
"vim::SwitchMode",
|
||||
"Normal"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && VimObject",
|
||||
"bindings": {
|
||||
|
|
|
@ -11,7 +11,7 @@ use settings::Settings;
|
|||
use smallvec::SmallVec;
|
||||
use std::{cmp::Reverse, fmt::Write, sync::Arc};
|
||||
use util::ResultExt;
|
||||
use workspace::{ItemHandle, StatusItemView, Workspace};
|
||||
use workspace::{item::ItemHandle, StatusItemView, Workspace};
|
||||
|
||||
actions!(lsp_status, [ShowErrorMessage]);
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ path = "src/auto_update.rs"
|
|||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
db = { path = "../db" }
|
||||
client = { path = "../client" }
|
||||
gpui = { path = "../gpui" }
|
||||
menu = { path = "../menu" }
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
mod update_notification;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use client::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN, ZED_SERVER_URL};
|
||||
use client::{http::HttpClient, ZED_SECRET_CLIENT_TOKEN};
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use gpui::{
|
||||
actions, platform::AppVersion, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
|
||||
MutableAppContext, Task, WeakViewHandle,
|
||||
};
|
||||
use lazy_static::lazy_static;
|
||||
use serde::Deserialize;
|
||||
use settings::ReleaseChannel;
|
||||
use smol::{fs::File, io::AsyncReadExt, process::Command};
|
||||
use std::{env, ffi::OsString, path::PathBuf, sync::Arc, time::Duration};
|
||||
use update_notification::UpdateNotification;
|
||||
use util::channel::ReleaseChannel;
|
||||
use workspace::Workspace;
|
||||
|
||||
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
|
||||
|
@ -41,7 +42,6 @@ pub struct AutoUpdater {
|
|||
current_version: AppVersion,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
pending_poll: Option<Task<()>>,
|
||||
db: project::Db,
|
||||
server_url: String,
|
||||
}
|
||||
|
||||
|
@ -55,11 +55,11 @@ impl Entity for AutoUpdater {
|
|||
type Event = ();
|
||||
}
|
||||
|
||||
pub fn init(db: project::Db, http_client: Arc<dyn HttpClient>, cx: &mut MutableAppContext) {
|
||||
pub fn init(http_client: Arc<dyn HttpClient>, server_url: String, cx: &mut MutableAppContext) {
|
||||
if let Some(version) = (*ZED_APP_VERSION).or_else(|| cx.platform().app_version().ok()) {
|
||||
let server_url = ZED_SERVER_URL.to_string();
|
||||
let server_url = server_url;
|
||||
let auto_updater = cx.add_model(|cx| {
|
||||
let updater = AutoUpdater::new(version, db.clone(), http_client, server_url.clone());
|
||||
let updater = AutoUpdater::new(version, http_client, server_url.clone());
|
||||
updater.start_polling(cx).detach();
|
||||
updater
|
||||
});
|
||||
|
@ -70,7 +70,14 @@ pub fn init(db: project::Db, http_client: Arc<dyn HttpClient>, cx: &mut MutableA
|
|||
}
|
||||
});
|
||||
cx.add_global_action(move |_: &ViewReleaseNotes, cx| {
|
||||
cx.platform().open_url(&format!("{server_url}/releases"));
|
||||
let latest_release_url = if cx.has_global::<ReleaseChannel>()
|
||||
&& *cx.global::<ReleaseChannel>() == ReleaseChannel::Preview
|
||||
{
|
||||
format!("{server_url}/releases/preview/latest")
|
||||
} else {
|
||||
format!("{server_url}/releases/latest")
|
||||
};
|
||||
cx.platform().open_url(&latest_release_url);
|
||||
});
|
||||
cx.add_action(UpdateNotification::dismiss);
|
||||
}
|
||||
|
@ -113,14 +120,12 @@ impl AutoUpdater {
|
|||
|
||||
fn new(
|
||||
current_version: AppVersion,
|
||||
db: project::Db,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
server_url: String,
|
||||
) -> Self {
|
||||
Self {
|
||||
status: AutoUpdateStatus::Idle,
|
||||
current_version,
|
||||
db,
|
||||
http_client,
|
||||
server_url,
|
||||
pending_poll: None,
|
||||
|
@ -290,20 +295,28 @@ impl AutoUpdater {
|
|||
should_show: bool,
|
||||
cx: &AppContext,
|
||||
) -> Task<Result<()>> {
|
||||
let db = self.db.clone();
|
||||
cx.background().spawn(async move {
|
||||
if should_show {
|
||||
db.write_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY, "")?;
|
||||
KEY_VALUE_STORE
|
||||
.write_kvp(
|
||||
SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string(),
|
||||
"".to_string(),
|
||||
)
|
||||
.await?;
|
||||
} else {
|
||||
db.delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?;
|
||||
KEY_VALUE_STORE
|
||||
.delete_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY.to_string())
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn should_show_update_notification(&self, cx: &AppContext) -> Task<Result<bool>> {
|
||||
let db = self.db.clone();
|
||||
cx.background()
|
||||
.spawn(async move { Ok(db.read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?.is_some()) })
|
||||
cx.background().spawn(async move {
|
||||
Ok(KEY_VALUE_STORE
|
||||
.read_kvp(SHOULD_SHOW_UPDATE_NOTIFICATION_KEY)?
|
||||
.is_some())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,8 +5,9 @@ use gpui::{
|
|||
Element, Entity, MouseButton, View, ViewContext,
|
||||
};
|
||||
use menu::Cancel;
|
||||
use settings::{ReleaseChannel, Settings};
|
||||
use workspace::Notification;
|
||||
use settings::Settings;
|
||||
use util::channel::ReleaseChannel;
|
||||
use workspace::notifications::Notification;
|
||||
|
||||
pub struct UpdateNotification {
|
||||
version: AppVersion,
|
||||
|
@ -27,9 +28,9 @@ impl View for UpdateNotification {
|
|||
|
||||
fn render(&mut self, cx: &mut gpui::RenderContext<'_, Self>) -> gpui::ElementBox {
|
||||
let theme = cx.global::<Settings>().theme.clone();
|
||||
let theme = &theme.update_notification;
|
||||
let theme = &theme.simple_message_notification;
|
||||
|
||||
let app_name = cx.global::<ReleaseChannel>().name();
|
||||
let app_name = cx.global::<ReleaseChannel>().display_name();
|
||||
|
||||
MouseEventHandler::<ViewReleaseNotes>::new(0, cx, |state, cx| {
|
||||
Flex::column()
|
||||
|
|
|
@ -4,7 +4,10 @@ use gpui::{
|
|||
use itertools::Itertools;
|
||||
use search::ProjectSearchView;
|
||||
use settings::Settings;
|
||||
use workspace::{ItemEvent, ItemHandle, ToolbarItemLocation, ToolbarItemView};
|
||||
use workspace::{
|
||||
item::{ItemEvent, ItemHandle},
|
||||
ToolbarItemLocation, ToolbarItemView,
|
||||
};
|
||||
|
||||
pub enum Event {
|
||||
UpdateLocation,
|
||||
|
|
|
@ -22,7 +22,7 @@ pub fn init(client: Arc<Client>, user_store: ModelHandle<UserStore>, cx: &mut Mu
|
|||
#[derive(Clone)]
|
||||
pub struct IncomingCall {
|
||||
pub room_id: u64,
|
||||
pub caller: Arc<User>,
|
||||
pub calling_user: Arc<User>,
|
||||
pub participants: Vec<Arc<User>>,
|
||||
pub initial_project: Option<proto::ParticipantProject>,
|
||||
}
|
||||
|
@ -78,9 +78,9 @@ impl ActiveCall {
|
|||
user_store.get_users(envelope.payload.participant_user_ids, cx)
|
||||
})
|
||||
.await?,
|
||||
caller: user_store
|
||||
calling_user: user_store
|
||||
.update(&mut cx, |user_store, cx| {
|
||||
user_store.get_user(envelope.payload.caller_user_id, cx)
|
||||
user_store.get_user(envelope.payload.calling_user_id, cx)
|
||||
})
|
||||
.await?,
|
||||
initial_project: envelope.payload.initial_project,
|
||||
|
@ -110,13 +110,13 @@ impl ActiveCall {
|
|||
|
||||
pub fn invite(
|
||||
&mut self,
|
||||
recipient_user_id: u64,
|
||||
called_user_id: u64,
|
||||
initial_project: Option<ModelHandle<Project>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let client = self.client.clone();
|
||||
let user_store = self.user_store.clone();
|
||||
if !self.pending_invites.insert(recipient_user_id) {
|
||||
if !self.pending_invites.insert(called_user_id) {
|
||||
return Task::ready(Err(anyhow!("user was already invited")));
|
||||
}
|
||||
|
||||
|
@ -136,13 +136,13 @@ impl ActiveCall {
|
|||
};
|
||||
|
||||
room.update(&mut cx, |room, cx| {
|
||||
room.call(recipient_user_id, initial_project_id, cx)
|
||||
room.call(called_user_id, initial_project_id, cx)
|
||||
})
|
||||
.await?;
|
||||
} else {
|
||||
let room = cx
|
||||
.update(|cx| {
|
||||
Room::create(recipient_user_id, initial_project, client, user_store, cx)
|
||||
Room::create(called_user_id, initial_project, client, user_store, cx)
|
||||
})
|
||||
.await?;
|
||||
|
||||
|
@ -155,7 +155,7 @@ impl ActiveCall {
|
|||
|
||||
let result = invite.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.pending_invites.remove(&recipient_user_id);
|
||||
this.pending_invites.remove(&called_user_id);
|
||||
cx.notify();
|
||||
});
|
||||
result
|
||||
|
@ -164,7 +164,7 @@ impl ActiveCall {
|
|||
|
||||
pub fn cancel_invite(
|
||||
&mut self,
|
||||
recipient_user_id: u64,
|
||||
called_user_id: u64,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let room_id = if let Some(room) = self.room() {
|
||||
|
@ -178,7 +178,7 @@ impl ActiveCall {
|
|||
client
|
||||
.request(proto::CancelCall {
|
||||
room_id,
|
||||
recipient_user_id,
|
||||
called_user_id,
|
||||
})
|
||||
.await?;
|
||||
anyhow::Ok(())
|
||||
|
|
|
@ -4,7 +4,7 @@ use collections::HashMap;
|
|||
use gpui::WeakModelHandle;
|
||||
pub use live_kit_client::Frame;
|
||||
use project::Project;
|
||||
use std::sync::Arc;
|
||||
use std::{fmt, sync::Arc};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum ParticipantLocation {
|
||||
|
@ -36,7 +36,7 @@ pub struct LocalParticipant {
|
|||
pub active_project: Option<WeakModelHandle<Project>>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct RemoteParticipant {
|
||||
pub user: Arc<User>,
|
||||
pub projects: Vec<proto::ParticipantProject>,
|
||||
|
@ -49,6 +49,12 @@ pub struct RemoteVideoTrack {
|
|||
pub(crate) live_kit_track: Arc<live_kit_client::RemoteVideoTrack>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for RemoteVideoTrack {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("RemoteVideoTrack").finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteVideoTrack {
|
||||
pub fn frames(&self) -> async_broadcast::Receiver<Frame> {
|
||||
self.live_kit_track.frames()
|
||||
|
|
|
@ -5,14 +5,18 @@ use crate::{
|
|||
use anyhow::{anyhow, Result};
|
||||
use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
|
||||
use collections::{BTreeMap, HashSet};
|
||||
use futures::StreamExt;
|
||||
use gpui::{AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task};
|
||||
use futures::{FutureExt, StreamExt};
|
||||
use gpui::{
|
||||
AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, WeakModelHandle,
|
||||
};
|
||||
use live_kit_client::{LocalTrackPublication, LocalVideoTrack, RemoteVideoTrackUpdate};
|
||||
use postage::stream::Stream;
|
||||
use project::Project;
|
||||
use std::{mem, os::unix::prelude::OsStrExt, sync::Arc};
|
||||
use std::{mem, sync::Arc, time::Duration};
|
||||
use util::{post_inc, ResultExt};
|
||||
|
||||
pub const RECONNECTION_TIMEOUT: Duration = client::RECEIVE_TIMEOUT;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Event {
|
||||
ParticipantLocationChanged {
|
||||
|
@ -46,6 +50,7 @@ pub struct Room {
|
|||
user_store: ModelHandle<UserStore>,
|
||||
subscriptions: Vec<client::Subscription>,
|
||||
pending_room_update: Option<Task<()>>,
|
||||
_maintain_connection: Task<Result<()>>,
|
||||
}
|
||||
|
||||
impl Entity for Room {
|
||||
|
@ -53,7 +58,7 @@ impl Entity for Room {
|
|||
|
||||
fn release(&mut self, _: &mut MutableAppContext) {
|
||||
if self.status.is_online() {
|
||||
self.client.send(proto::LeaveRoom { id: self.id }).log_err();
|
||||
self.client.send(proto::LeaveRoom {}).log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -66,21 +71,6 @@ impl Room {
|
|||
user_store: ModelHandle<UserStore>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let mut client_status = client.status();
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
let is_connected = client_status
|
||||
.next()
|
||||
.await
|
||||
.map_or(false, |s| s.is_connected());
|
||||
// Even if we're initially connected, any future change of the status means we momentarily disconnected.
|
||||
if !is_connected || client_status.next().await.is_some() {
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
let _ = this.update(&mut cx, |this, cx| this.leave(cx));
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let live_kit_room = if let Some(connection_info) = live_kit_connection_info {
|
||||
let room = live_kit_client::Room::new();
|
||||
let mut status = room.status();
|
||||
|
@ -131,6 +121,9 @@ impl Room {
|
|||
None
|
||||
};
|
||||
|
||||
let _maintain_connection =
|
||||
cx.spawn_weak(|this, cx| Self::maintain_connection(this, client.clone(), cx));
|
||||
|
||||
Self {
|
||||
id,
|
||||
live_kit: live_kit_room,
|
||||
|
@ -145,11 +138,12 @@ impl Room {
|
|||
pending_room_update: None,
|
||||
client,
|
||||
user_store,
|
||||
_maintain_connection,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn create(
|
||||
recipient_user_id: u64,
|
||||
called_user_id: u64,
|
||||
initial_project: Option<ModelHandle<Project>>,
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
|
@ -182,7 +176,7 @@ impl Room {
|
|||
match room
|
||||
.update(&mut cx, |room, cx| {
|
||||
room.leave_when_empty = true;
|
||||
room.call(recipient_user_id, initial_project_id, cx)
|
||||
room.call(called_user_id, initial_project_id, cx)
|
||||
})
|
||||
.await
|
||||
{
|
||||
|
@ -241,10 +235,87 @@ impl Room {
|
|||
self.participant_user_ids.clear();
|
||||
self.subscriptions.clear();
|
||||
self.live_kit.take();
|
||||
self.client.send(proto::LeaveRoom { id: self.id })?;
|
||||
self.client.send(proto::LeaveRoom {})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn maintain_connection(
|
||||
this: WeakModelHandle<Self>,
|
||||
client: Arc<Client>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let mut client_status = client.status();
|
||||
loop {
|
||||
let is_connected = client_status
|
||||
.next()
|
||||
.await
|
||||
.map_or(false, |s| s.is_connected());
|
||||
// Even if we're initially connected, any future change of the status means we momentarily disconnected.
|
||||
if !is_connected || client_status.next().await.is_some() {
|
||||
let room_id = this
|
||||
.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("room was dropped"))?
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.status = RoomStatus::Rejoining;
|
||||
cx.notify();
|
||||
this.id
|
||||
});
|
||||
|
||||
// Wait for client to re-establish a connection to the server.
|
||||
let mut reconnection_timeout = cx.background().timer(RECONNECTION_TIMEOUT).fuse();
|
||||
let client_reconnection = async {
|
||||
loop {
|
||||
if let Some(status) = client_status.next().await {
|
||||
if status.is_connected() {
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
.fuse();
|
||||
futures::pin_mut!(client_reconnection);
|
||||
|
||||
futures::select_biased! {
|
||||
reconnected = client_reconnection => {
|
||||
if reconnected {
|
||||
// Client managed to reconnect to the server. Now attempt to join the room.
|
||||
let rejoin_room = async {
|
||||
let response = client.request(proto::JoinRoom { id: room_id }).await?;
|
||||
let room_proto = response.room.ok_or_else(|| anyhow!("invalid room"))?;
|
||||
this.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("room was dropped"))?
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.status = RoomStatus::Online;
|
||||
this.apply_room_update(room_proto, cx)
|
||||
})?;
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
// If we successfully joined the room, go back around the loop
|
||||
// waiting for future connection status changes.
|
||||
if rejoin_room.await.log_err().is_some() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = reconnection_timeout => {}
|
||||
}
|
||||
|
||||
// The client failed to re-establish a connection to the server
|
||||
// or an error occurred while trying to re-join the room. Either way
|
||||
// we leave the room and return an error.
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
let _ = this.update(&mut cx, |this, cx| this.leave(cx));
|
||||
}
|
||||
return Err(anyhow!(
|
||||
"can't reconnect to room: client failed to re-establish connection"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> u64 {
|
||||
self.id
|
||||
}
|
||||
|
@ -294,6 +365,11 @@ impl Room {
|
|||
.position(|participant| Some(participant.user_id) == self.client.user_id());
|
||||
let local_participant = local_participant_ix.map(|ix| room.participants.swap_remove(ix));
|
||||
|
||||
let pending_participant_user_ids = room
|
||||
.pending_participants
|
||||
.iter()
|
||||
.map(|p| p.user_id)
|
||||
.collect::<Vec<_>>();
|
||||
let remote_participant_user_ids = room
|
||||
.participants
|
||||
.iter()
|
||||
|
@ -303,7 +379,7 @@ impl Room {
|
|||
self.user_store.update(cx, move |user_store, cx| {
|
||||
(
|
||||
user_store.get_users(remote_participant_user_ids, cx),
|
||||
user_store.get_users(room.pending_participant_user_ids, cx),
|
||||
user_store.get_users(pending_participant_user_ids, cx),
|
||||
)
|
||||
});
|
||||
self.pending_room_update = Some(cx.spawn(|this, mut cx| async move {
|
||||
|
@ -320,9 +396,11 @@ impl Room {
|
|||
}
|
||||
|
||||
if let Some(participants) = remote_participants.log_err() {
|
||||
let mut participant_peer_ids = HashSet::default();
|
||||
for (participant, user) in room.participants.into_iter().zip(participants) {
|
||||
let peer_id = PeerId(participant.peer_id);
|
||||
this.participant_user_ids.insert(participant.user_id);
|
||||
participant_peer_ids.insert(peer_id);
|
||||
|
||||
let old_projects = this
|
||||
.remote_participants
|
||||
|
@ -389,8 +467,8 @@ impl Room {
|
|||
}
|
||||
}
|
||||
|
||||
this.remote_participants.retain(|_, participant| {
|
||||
if this.participant_user_ids.contains(&participant.user.id) {
|
||||
this.remote_participants.retain(|peer_id, participant| {
|
||||
if participant_peer_ids.contains(peer_id) {
|
||||
true
|
||||
} else {
|
||||
for project in &participant.projects {
|
||||
|
@ -472,10 +550,12 @@ impl Room {
|
|||
{
|
||||
for participant in self.remote_participants.values() {
|
||||
assert!(self.participant_user_ids.contains(&participant.user.id));
|
||||
assert_ne!(participant.user.id, self.client.user_id().unwrap());
|
||||
}
|
||||
|
||||
for participant in &self.pending_participants {
|
||||
assert!(self.participant_user_ids.contains(&participant.id));
|
||||
assert_ne!(participant.id, self.client.user_id().unwrap());
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
|
@ -487,7 +567,7 @@ impl Room {
|
|||
|
||||
pub(crate) fn call(
|
||||
&mut self,
|
||||
recipient_user_id: u64,
|
||||
called_user_id: u64,
|
||||
initial_project_id: Option<u64>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
|
@ -503,7 +583,7 @@ impl Room {
|
|||
let result = client
|
||||
.request(proto::Call {
|
||||
room_id,
|
||||
recipient_user_id,
|
||||
called_user_id,
|
||||
initial_project_id,
|
||||
})
|
||||
.await;
|
||||
|
@ -538,7 +618,7 @@ impl Room {
|
|||
id: worktree.id().to_proto(),
|
||||
root_name: worktree.root_name().into(),
|
||||
visible: worktree.is_visible(),
|
||||
abs_path: worktree.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: worktree.abs_path().to_string_lossy().into(),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
|
@ -746,6 +826,7 @@ impl Default for ScreenTrack {
|
|||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
pub enum RoomStatus {
|
||||
Online,
|
||||
Rejoining,
|
||||
Offline,
|
||||
}
|
||||
|
||||
|
|
|
@ -11,14 +11,12 @@ use async_tungstenite::tungstenite::{
|
|||
error::Error as WebsocketError,
|
||||
http::{Request, StatusCode},
|
||||
};
|
||||
use db::Db;
|
||||
use futures::{future::LocalBoxFuture, AsyncReadExt, FutureExt, SinkExt, StreamExt, TryStreamExt};
|
||||
use gpui::{
|
||||
actions,
|
||||
serde_json::{self, Value},
|
||||
AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, AppContext,
|
||||
AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task, View, ViewContext,
|
||||
ViewHandle,
|
||||
AsyncAppContext, Entity, ModelHandle, MutableAppContext, Task, View, ViewContext, ViewHandle,
|
||||
};
|
||||
use http::HttpClient;
|
||||
use lazy_static::lazy_static;
|
||||
|
@ -27,13 +25,13 @@ use postage::watch;
|
|||
use rand::prelude::*;
|
||||
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, RequestMessage};
|
||||
use serde::Deserialize;
|
||||
use settings::ReleaseChannel;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
collections::HashMap,
|
||||
convert::TryFrom,
|
||||
fmt::Write as _,
|
||||
future::Future,
|
||||
marker::PhantomData,
|
||||
path::PathBuf,
|
||||
sync::{Arc, Weak},
|
||||
time::{Duration, Instant},
|
||||
|
@ -41,6 +39,7 @@ use std::{
|
|||
use telemetry::Telemetry;
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
use util::channel::ReleaseChannel;
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
|
||||
pub use rpc::*;
|
||||
|
@ -172,7 +171,7 @@ struct ClientState {
|
|||
entity_id_extractors: HashMap<TypeId, fn(&dyn AnyTypedEnvelope) -> u64>,
|
||||
_reconnect_task: Option<Task<()>>,
|
||||
reconnect_interval: Duration,
|
||||
entities_by_type_and_remote_id: HashMap<(TypeId, u64), AnyWeakEntityHandle>,
|
||||
entities_by_type_and_remote_id: HashMap<(TypeId, u64), WeakSubscriber>,
|
||||
models_by_message_type: HashMap<TypeId, AnyWeakModelHandle>,
|
||||
entity_types_by_message_type: HashMap<TypeId, TypeId>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
|
@ -182,7 +181,7 @@ struct ClientState {
|
|||
dyn Send
|
||||
+ Sync
|
||||
+ Fn(
|
||||
AnyEntityHandle,
|
||||
Subscriber,
|
||||
Box<dyn AnyTypedEnvelope>,
|
||||
&Arc<Client>,
|
||||
AsyncAppContext,
|
||||
|
@ -191,12 +190,13 @@ struct ClientState {
|
|||
>,
|
||||
}
|
||||
|
||||
enum AnyWeakEntityHandle {
|
||||
enum WeakSubscriber {
|
||||
Model(AnyWeakModelHandle),
|
||||
View(AnyWeakViewHandle),
|
||||
Pending(Vec<Box<dyn AnyTypedEnvelope>>),
|
||||
}
|
||||
|
||||
enum AnyEntityHandle {
|
||||
enum Subscriber {
|
||||
Model(AnyModelHandle),
|
||||
View(AnyViewHandle),
|
||||
}
|
||||
|
@ -254,6 +254,54 @@ impl Drop for Subscription {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct PendingEntitySubscription<T: Entity> {
|
||||
client: Arc<Client>,
|
||||
remote_id: u64,
|
||||
_entity_type: PhantomData<T>,
|
||||
consumed: bool,
|
||||
}
|
||||
|
||||
impl<T: Entity> PendingEntitySubscription<T> {
|
||||
pub fn set_model(mut self, model: &ModelHandle<T>, cx: &mut AsyncAppContext) -> Subscription {
|
||||
self.consumed = true;
|
||||
let mut state = self.client.state.write();
|
||||
let id = (TypeId::of::<T>(), self.remote_id);
|
||||
let Some(WeakSubscriber::Pending(messages)) =
|
||||
state.entities_by_type_and_remote_id.remove(&id)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
state
|
||||
.entities_by_type_and_remote_id
|
||||
.insert(id, WeakSubscriber::Model(model.downgrade().into()));
|
||||
drop(state);
|
||||
for message in messages {
|
||||
self.client.handle_message(message, cx);
|
||||
}
|
||||
Subscription::Entity {
|
||||
client: Arc::downgrade(&self.client),
|
||||
id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Entity> Drop for PendingEntitySubscription<T> {
|
||||
fn drop(&mut self) {
|
||||
if !self.consumed {
|
||||
let mut state = self.client.state.write();
|
||||
if let Some(WeakSubscriber::Pending(messages)) = state
|
||||
.entities_by_type_and_remote_id
|
||||
.remove(&(TypeId::of::<T>(), self.remote_id))
|
||||
{
|
||||
for message in messages {
|
||||
log::info!("unhandled message {}", message.payload_type_name());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Client {
|
||||
pub fn new(http: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
|
||||
Arc::new(Self {
|
||||
|
@ -349,7 +397,11 @@ impl Client {
|
|||
let this = self.clone();
|
||||
let reconnect_interval = state.reconnect_interval;
|
||||
state._reconnect_task = Some(cx.spawn(|cx| async move {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
let mut rng = StdRng::seed_from_u64(0);
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
let mut rng = StdRng::from_entropy();
|
||||
|
||||
let mut delay = INITIAL_RECONNECTION_DELAY;
|
||||
while let Err(error) = this.authenticate_and_connect(true, &cx).await {
|
||||
log::error!("failed to connect {}", error);
|
||||
|
@ -387,26 +439,28 @@ impl Client {
|
|||
self.state
|
||||
.write()
|
||||
.entities_by_type_and_remote_id
|
||||
.insert(id, AnyWeakEntityHandle::View(cx.weak_handle().into()));
|
||||
.insert(id, WeakSubscriber::View(cx.weak_handle().into()));
|
||||
Subscription::Entity {
|
||||
client: Arc::downgrade(self),
|
||||
id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_model_for_remote_entity<T: Entity>(
|
||||
pub fn subscribe_to_entity<T: Entity>(
|
||||
self: &Arc<Self>,
|
||||
remote_id: u64,
|
||||
cx: &mut ModelContext<T>,
|
||||
) -> Subscription {
|
||||
) -> PendingEntitySubscription<T> {
|
||||
let id = (TypeId::of::<T>(), remote_id);
|
||||
self.state
|
||||
.write()
|
||||
.entities_by_type_and_remote_id
|
||||
.insert(id, AnyWeakEntityHandle::Model(cx.weak_handle().into()));
|
||||
Subscription::Entity {
|
||||
client: Arc::downgrade(self),
|
||||
id,
|
||||
.insert(id, WeakSubscriber::Pending(Default::default()));
|
||||
|
||||
PendingEntitySubscription {
|
||||
client: self.clone(),
|
||||
remote_id,
|
||||
consumed: false,
|
||||
_entity_type: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -434,7 +488,7 @@ impl Client {
|
|||
let prev_handler = state.message_handlers.insert(
|
||||
message_type_id,
|
||||
Arc::new(move |handle, envelope, client, cx| {
|
||||
let handle = if let AnyEntityHandle::Model(handle) = handle {
|
||||
let handle = if let Subscriber::Model(handle) = handle {
|
||||
handle
|
||||
} else {
|
||||
unreachable!();
|
||||
|
@ -488,7 +542,7 @@ impl Client {
|
|||
F: 'static + Future<Output = Result<()>>,
|
||||
{
|
||||
self.add_entity_message_handler::<M, E, _, _>(move |handle, message, client, cx| {
|
||||
if let AnyEntityHandle::View(handle) = handle {
|
||||
if let Subscriber::View(handle) = handle {
|
||||
handler(handle.downcast::<E>().unwrap(), message, client, cx)
|
||||
} else {
|
||||
unreachable!();
|
||||
|
@ -507,7 +561,7 @@ impl Client {
|
|||
F: 'static + Future<Output = Result<()>>,
|
||||
{
|
||||
self.add_entity_message_handler::<M, E, _, _>(move |handle, message, client, cx| {
|
||||
if let AnyEntityHandle::Model(handle) = handle {
|
||||
if let Subscriber::Model(handle) = handle {
|
||||
handler(handle.downcast::<E>().unwrap(), message, client, cx)
|
||||
} else {
|
||||
unreachable!();
|
||||
|
@ -522,7 +576,7 @@ impl Client {
|
|||
H: 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Fn(AnyEntityHandle, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
|
||||
+ Fn(Subscriber, TypedEnvelope<M>, Arc<Self>, AsyncAppContext) -> F,
|
||||
F: 'static + Future<Output = Result<()>>,
|
||||
{
|
||||
let model_type_id = TypeId::of::<E>();
|
||||
|
@ -784,94 +838,8 @@ impl Client {
|
|||
let cx = cx.clone();
|
||||
let this = self.clone();
|
||||
async move {
|
||||
let mut message_id = 0_usize;
|
||||
while let Some(message) = incoming.next().await {
|
||||
let mut state = this.state.write();
|
||||
message_id += 1;
|
||||
let type_name = message.payload_type_name();
|
||||
let payload_type_id = message.payload_type_id();
|
||||
let sender_id = message.original_sender_id().map(|id| id.0);
|
||||
|
||||
let model = state
|
||||
.models_by_message_type
|
||||
.get(&payload_type_id)
|
||||
.and_then(|model| model.upgrade(&cx))
|
||||
.map(AnyEntityHandle::Model)
|
||||
.or_else(|| {
|
||||
let entity_type_id =
|
||||
*state.entity_types_by_message_type.get(&payload_type_id)?;
|
||||
let entity_id = state
|
||||
.entity_id_extractors
|
||||
.get(&message.payload_type_id())
|
||||
.map(|extract_entity_id| {
|
||||
(extract_entity_id)(message.as_ref())
|
||||
})?;
|
||||
|
||||
let entity = state
|
||||
.entities_by_type_and_remote_id
|
||||
.get(&(entity_type_id, entity_id))?;
|
||||
if let Some(entity) = entity.upgrade(&cx) {
|
||||
Some(entity)
|
||||
} else {
|
||||
state
|
||||
.entities_by_type_and_remote_id
|
||||
.remove(&(entity_type_id, entity_id));
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
let model = if let Some(model) = model {
|
||||
model
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
continue;
|
||||
};
|
||||
|
||||
let handler = state.message_handlers.get(&payload_type_id).cloned();
|
||||
// Dropping the state prevents deadlocks if the handler interacts with rpc::Client.
|
||||
// It also ensures we don't hold the lock while yielding back to the executor, as
|
||||
// that might cause the executor thread driving this future to block indefinitely.
|
||||
drop(state);
|
||||
|
||||
if let Some(handler) = handler {
|
||||
let future = handler(model, message, &this, cx.clone());
|
||||
let client_id = this.id;
|
||||
log::debug!(
|
||||
"rpc message received. client_id:{}, message_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
message_id,
|
||||
sender_id,
|
||||
type_name
|
||||
);
|
||||
cx.foreground()
|
||||
.spawn(async move {
|
||||
match future.await {
|
||||
Ok(()) => {
|
||||
log::debug!(
|
||||
"rpc message handled. client_id:{}, message_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
message_id,
|
||||
sender_id,
|
||||
type_name
|
||||
);
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!(
|
||||
"error handling message. client_id:{}, message_id:{}, sender_id:{:?}, type:{}, error:{:?}",
|
||||
client_id,
|
||||
message_id,
|
||||
sender_id,
|
||||
type_name,
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
}
|
||||
|
||||
this.handle_message(message, &cx);
|
||||
// Don't starve the main thread when receiving lots of messages at once.
|
||||
smol::future::yield_now().await;
|
||||
}
|
||||
|
@ -1218,8 +1186,99 @@ impl Client {
|
|||
self.peer.respond_with_error(receipt, error)
|
||||
}
|
||||
|
||||
pub fn start_telemetry(&self, db: Db) {
|
||||
self.telemetry.start(db.clone());
|
||||
fn handle_message(
|
||||
self: &Arc<Client>,
|
||||
message: Box<dyn AnyTypedEnvelope>,
|
||||
cx: &AsyncAppContext,
|
||||
) {
|
||||
let mut state = self.state.write();
|
||||
let type_name = message.payload_type_name();
|
||||
let payload_type_id = message.payload_type_id();
|
||||
let sender_id = message.original_sender_id().map(|id| id.0);
|
||||
|
||||
let mut subscriber = None;
|
||||
|
||||
if let Some(message_model) = state
|
||||
.models_by_message_type
|
||||
.get(&payload_type_id)
|
||||
.and_then(|model| model.upgrade(cx))
|
||||
{
|
||||
subscriber = Some(Subscriber::Model(message_model));
|
||||
} else if let Some((extract_entity_id, entity_type_id)) =
|
||||
state.entity_id_extractors.get(&payload_type_id).zip(
|
||||
state
|
||||
.entity_types_by_message_type
|
||||
.get(&payload_type_id)
|
||||
.copied(),
|
||||
)
|
||||
{
|
||||
let entity_id = (extract_entity_id)(message.as_ref());
|
||||
|
||||
match state
|
||||
.entities_by_type_and_remote_id
|
||||
.get_mut(&(entity_type_id, entity_id))
|
||||
{
|
||||
Some(WeakSubscriber::Pending(pending)) => {
|
||||
pending.push(message);
|
||||
return;
|
||||
}
|
||||
Some(weak_subscriber @ _) => subscriber = weak_subscriber.upgrade(cx),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let subscriber = if let Some(subscriber) = subscriber {
|
||||
subscriber
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
return;
|
||||
};
|
||||
|
||||
let handler = state.message_handlers.get(&payload_type_id).cloned();
|
||||
// Dropping the state prevents deadlocks if the handler interacts with rpc::Client.
|
||||
// It also ensures we don't hold the lock while yielding back to the executor, as
|
||||
// that might cause the executor thread driving this future to block indefinitely.
|
||||
drop(state);
|
||||
|
||||
if let Some(handler) = handler {
|
||||
let future = handler(subscriber, message, &self, cx.clone());
|
||||
let client_id = self.id;
|
||||
log::debug!(
|
||||
"rpc message received. client_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
sender_id,
|
||||
type_name
|
||||
);
|
||||
cx.foreground()
|
||||
.spawn(async move {
|
||||
match future.await {
|
||||
Ok(()) => {
|
||||
log::debug!(
|
||||
"rpc message handled. client_id:{}, sender_id:{:?}, type:{}",
|
||||
client_id,
|
||||
sender_id,
|
||||
type_name
|
||||
);
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!(
|
||||
"error handling message. client_id:{}, sender_id:{:?}, type:{}, error:{:?}",
|
||||
client_id,
|
||||
sender_id,
|
||||
type_name,
|
||||
error
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
} else {
|
||||
log::info!("unhandled message {}", type_name);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn start_telemetry(&self) {
|
||||
self.telemetry.start();
|
||||
}
|
||||
|
||||
pub fn report_event(&self, kind: &str, properties: Value) {
|
||||
|
@ -1231,11 +1290,12 @@ impl Client {
|
|||
}
|
||||
}
|
||||
|
||||
impl AnyWeakEntityHandle {
|
||||
fn upgrade(&self, cx: &AsyncAppContext) -> Option<AnyEntityHandle> {
|
||||
impl WeakSubscriber {
|
||||
fn upgrade(&self, cx: &AsyncAppContext) -> Option<Subscriber> {
|
||||
match self {
|
||||
AnyWeakEntityHandle::Model(handle) => handle.upgrade(cx).map(AnyEntityHandle::Model),
|
||||
AnyWeakEntityHandle::View(handle) => handle.upgrade(cx).map(AnyEntityHandle::View),
|
||||
WeakSubscriber::Model(handle) => handle.upgrade(cx).map(Subscriber::Model),
|
||||
WeakSubscriber::View(handle) => handle.upgrade(cx).map(Subscriber::View),
|
||||
WeakSubscriber::Pending(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1480,11 +1540,17 @@ mod tests {
|
|||
subscription: None,
|
||||
});
|
||||
|
||||
let _subscription1 = model1.update(cx, |_, cx| client.add_model_for_remote_entity(1, cx));
|
||||
let _subscription2 = model2.update(cx, |_, cx| client.add_model_for_remote_entity(2, cx));
|
||||
let _subscription1 = client
|
||||
.subscribe_to_entity(1)
|
||||
.set_model(&model1, &mut cx.to_async());
|
||||
let _subscription2 = client
|
||||
.subscribe_to_entity(2)
|
||||
.set_model(&model2, &mut cx.to_async());
|
||||
// Ensure dropping a subscription for the same entity type still allows receiving of
|
||||
// messages for other entity IDs of the same type.
|
||||
let subscription3 = model3.update(cx, |_, cx| client.add_model_for_remote_entity(3, cx));
|
||||
let subscription3 = client
|
||||
.subscribe_to_entity(3)
|
||||
.set_model(&model3, &mut cx.to_async());
|
||||
drop(subscription3);
|
||||
|
||||
server.send(proto::JoinProject { project_id: 1 });
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::http::HttpClient;
|
||||
use db::Db;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use gpui::{
|
||||
executor::Background,
|
||||
serde_json::{self, value::Map, Value},
|
||||
|
@ -10,7 +10,6 @@ use lazy_static::lazy_static;
|
|||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use settings::ReleaseChannel;
|
||||
use std::{
|
||||
io::Write,
|
||||
mem,
|
||||
|
@ -19,7 +18,7 @@ use std::{
|
|||
time::{Duration, SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
use tempfile::NamedTempFile;
|
||||
use util::{post_inc, ResultExt, TryFutureExt};
|
||||
use util::{channel::ReleaseChannel, post_inc, ResultExt, TryFutureExt};
|
||||
use uuid::Uuid;
|
||||
|
||||
pub struct Telemetry {
|
||||
|
@ -107,7 +106,7 @@ impl Telemetry {
|
|||
pub fn new(client: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
|
||||
let platform = cx.platform();
|
||||
let release_channel = if cx.has_global::<ReleaseChannel>() {
|
||||
Some(cx.global::<ReleaseChannel>().name())
|
||||
Some(cx.global::<ReleaseChannel>().display_name())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -148,18 +147,21 @@ impl Telemetry {
|
|||
Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
|
||||
}
|
||||
|
||||
pub fn start(self: &Arc<Self>, db: Db) {
|
||||
pub fn start(self: &Arc<Self>) {
|
||||
let this = self.clone();
|
||||
self.executor
|
||||
.spawn(
|
||||
async move {
|
||||
let device_id = if let Ok(Some(device_id)) = db.read_kvp("device_id") {
|
||||
device_id
|
||||
} else {
|
||||
let device_id = Uuid::new_v4().to_string();
|
||||
db.write_kvp("device_id", &device_id)?;
|
||||
device_id
|
||||
};
|
||||
let device_id =
|
||||
if let Ok(Some(device_id)) = KEY_VALUE_STORE.read_kvp("device_id") {
|
||||
device_id
|
||||
} else {
|
||||
let device_id = Uuid::new_v4().to_string();
|
||||
KEY_VALUE_STORE
|
||||
.write_kvp("device_id".to_string(), device_id.clone())
|
||||
.await?;
|
||||
device_id
|
||||
};
|
||||
|
||||
let device_id: Arc<str> = device_id.into();
|
||||
let mut state = this.state.lock();
|
||||
|
|
|
@ -150,7 +150,6 @@ impl UserStore {
|
|||
client.telemetry.set_authenticated_user_info(None, false);
|
||||
}
|
||||
|
||||
client.telemetry.report_event("sign in", Default::default());
|
||||
current_user_tx.send(user).await.ok();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
|
|||
default-run = "collab"
|
||||
edition = "2021"
|
||||
name = "collab"
|
||||
version = "0.2.4"
|
||||
version = "0.3.1"
|
||||
|
||||
[[bin]]
|
||||
name = "collab"
|
||||
|
@ -19,12 +19,12 @@ rpc = { path = "../rpc" }
|
|||
util = { path = "../util" }
|
||||
|
||||
anyhow = "1.0.40"
|
||||
async-trait = "0.1.50"
|
||||
async-tungstenite = "0.16"
|
||||
axum = { version = "0.5", features = ["json", "headers", "ws"] }
|
||||
axum-extra = { version = "0.3", features = ["erased-json"] }
|
||||
base64 = "0.13"
|
||||
clap = { version = "3.1", features = ["derive"], optional = true }
|
||||
dashmap = "5.4"
|
||||
envy = "0.4.2"
|
||||
futures = "0.3"
|
||||
hyper = "0.14"
|
||||
|
@ -36,9 +36,13 @@ prometheus = "0.13"
|
|||
rand = "0.8"
|
||||
reqwest = { version = "0.11", features = ["json"], optional = true }
|
||||
scrypt = "0.7"
|
||||
# Remove fork dependency when a version with https://github.com/SeaQL/sea-orm/pull/1283 is released.
|
||||
sea-orm = { git = "https://github.com/zed-industries/sea-orm", rev = "18f4c691085712ad014a51792af75a9044bacee6", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls"] }
|
||||
sea-query = "0.27"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_json = "1.0"
|
||||
sha-1 = "0.9"
|
||||
sqlx = { version = "0.6", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
|
||||
time = { version = "0.3", features = ["serde", "serde-well-known"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
tokio-tungstenite = "0.17"
|
||||
|
@ -49,11 +53,6 @@ tracing = "0.1.34"
|
|||
tracing-log = "0.1.3"
|
||||
tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] }
|
||||
|
||||
[dependencies.sqlx]
|
||||
git = "https://github.com/launchbadge/sqlx"
|
||||
rev = "4b7053807c705df312bcb9b6281e184bf7534eb3"
|
||||
features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid"]
|
||||
|
||||
[dev-dependencies]
|
||||
collections = { path = "../collections", features = ["test-support"] }
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
|
@ -65,6 +64,7 @@ fs = { path = "../fs", features = ["test-support"] }
|
|||
git = { path = "../git", features = ["test-support"] }
|
||||
live_kit_client = { path = "../live_kit_client", features = ["test-support"] }
|
||||
lsp = { path = "../lsp", features = ["test-support"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
project = { path = "../project", features = ["test-support"] }
|
||||
rpc = { path = "../rpc", features = ["test-support"] }
|
||||
settings = { path = "../settings", features = ["test-support"] }
|
||||
|
@ -76,13 +76,10 @@ env_logger = "0.9"
|
|||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
util = { path = "../util" }
|
||||
lazy_static = "1.4"
|
||||
sea-orm = { git = "https://github.com/zed-industries/sea-orm", rev = "18f4c691085712ad014a51792af75a9044bacee6", features = ["sqlx-sqlite"] }
|
||||
serde_json = { version = "1.0", features = ["preserve_order"] }
|
||||
sqlx = { version = "0.6", features = ["sqlite"] }
|
||||
unindent = "0.1"
|
||||
|
||||
[dev-dependencies.sqlx]
|
||||
git = "https://github.com/launchbadge/sqlx"
|
||||
rev = "4b7053807c705df312bcb9b6281e184bf7534eb3"
|
||||
features = ["sqlite"]
|
||||
|
||||
[features]
|
||||
seed-support = ["clap", "lipsum", "reqwest"]
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
CREATE TABLE IF NOT EXISTS "users" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
CREATE TABLE "users" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"github_login" VARCHAR,
|
||||
"admin" BOOLEAN,
|
||||
"email_address" VARCHAR(255) DEFAULT NULL,
|
||||
|
@ -8,7 +8,7 @@ CREATE TABLE IF NOT EXISTS "users" (
|
|||
"inviter_id" INTEGER REFERENCES users (id),
|
||||
"connected_once" BOOLEAN NOT NULL DEFAULT false,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT now,
|
||||
"metrics_id" VARCHAR(255),
|
||||
"metrics_id" TEXT,
|
||||
"github_user_id" INTEGER
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
|
||||
|
@ -16,15 +16,15 @@ CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
|
|||
CREATE INDEX "index_users_on_email_address" ON "users" ("email_address");
|
||||
CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "access_tokens" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
CREATE TABLE "access_tokens" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"user_id" INTEGER REFERENCES users (id),
|
||||
"hash" VARCHAR(128)
|
||||
);
|
||||
CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "contacts" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
CREATE TABLE "contacts" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"user_id_a" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"user_id_b" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"a_to_b" BOOLEAN NOT NULL,
|
||||
|
@ -34,8 +34,102 @@ CREATE TABLE IF NOT EXISTS "contacts" (
|
|||
CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b");
|
||||
CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "projects" (
|
||||
"id" INTEGER PRIMARY KEY,
|
||||
"host_user_id" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"unregistered" BOOLEAN NOT NULL DEFAULT false
|
||||
CREATE TABLE "rooms" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"live_kit_room" VARCHAR NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE "projects" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"room_id" INTEGER REFERENCES rooms (id) NOT NULL,
|
||||
"host_user_id" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"host_connection_id" INTEGER NOT NULL,
|
||||
"host_connection_epoch" TEXT NOT NULL,
|
||||
"unregistered" BOOLEAN NOT NULL DEFAULT FALSE
|
||||
);
|
||||
CREATE INDEX "index_projects_on_host_connection_epoch" ON "projects" ("host_connection_epoch");
|
||||
|
||||
CREATE TABLE "worktrees" (
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"id" INTEGER NOT NULL,
|
||||
"root_name" VARCHAR NOT NULL,
|
||||
"abs_path" VARCHAR NOT NULL,
|
||||
"visible" BOOL NOT NULL,
|
||||
"scan_id" INTEGER NOT NULL,
|
||||
"is_complete" BOOL NOT NULL,
|
||||
PRIMARY KEY(project_id, id)
|
||||
);
|
||||
CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
|
||||
|
||||
CREATE TABLE "worktree_entries" (
|
||||
"project_id" INTEGER NOT NULL,
|
||||
"worktree_id" INTEGER NOT NULL,
|
||||
"id" INTEGER NOT NULL,
|
||||
"is_dir" BOOL NOT NULL,
|
||||
"path" VARCHAR NOT NULL,
|
||||
"inode" INTEGER NOT NULL,
|
||||
"mtime_seconds" INTEGER NOT NULL,
|
||||
"mtime_nanos" INTEGER NOT NULL,
|
||||
"is_symlink" BOOL NOT NULL,
|
||||
"is_ignored" BOOL NOT NULL,
|
||||
PRIMARY KEY(project_id, worktree_id, id),
|
||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
|
||||
CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
|
||||
|
||||
CREATE TABLE "worktree_diagnostic_summaries" (
|
||||
"project_id" INTEGER NOT NULL,
|
||||
"worktree_id" INTEGER NOT NULL,
|
||||
"path" VARCHAR NOT NULL,
|
||||
"language_server_id" INTEGER NOT NULL,
|
||||
"error_count" INTEGER NOT NULL,
|
||||
"warning_count" INTEGER NOT NULL,
|
||||
PRIMARY KEY(project_id, worktree_id, path),
|
||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id");
|
||||
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id");
|
||||
|
||||
CREATE TABLE "language_servers" (
|
||||
"id" INTEGER NOT NULL,
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"name" VARCHAR NOT NULL,
|
||||
PRIMARY KEY(project_id, id)
|
||||
);
|
||||
CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id");
|
||||
|
||||
CREATE TABLE "project_collaborators" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"connection_id" INTEGER NOT NULL,
|
||||
"connection_epoch" TEXT NOT NULL,
|
||||
"user_id" INTEGER NOT NULL,
|
||||
"replica_id" INTEGER NOT NULL,
|
||||
"is_host" BOOLEAN NOT NULL
|
||||
);
|
||||
CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
|
||||
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id");
|
||||
CREATE INDEX "index_project_collaborators_on_connection_epoch" ON "project_collaborators" ("connection_epoch");
|
||||
CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id");
|
||||
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_epoch" ON "project_collaborators" ("project_id", "connection_id", "connection_epoch");
|
||||
|
||||
CREATE TABLE "room_participants" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"room_id" INTEGER NOT NULL REFERENCES rooms (id),
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
"answering_connection_id" INTEGER,
|
||||
"answering_connection_epoch" TEXT,
|
||||
"answering_connection_lost" BOOLEAN NOT NULL,
|
||||
"location_kind" INTEGER,
|
||||
"location_project_id" INTEGER,
|
||||
"initial_project_id" INTEGER,
|
||||
"calling_user_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
"calling_connection_id" INTEGER NOT NULL,
|
||||
"calling_connection_epoch" TEXT NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
|
||||
CREATE INDEX "index_room_participants_on_answering_connection_epoch" ON "room_participants" ("answering_connection_epoch");
|
||||
CREATE INDEX "index_room_participants_on_calling_connection_epoch" ON "room_participants" ("calling_connection_epoch");
|
||||
CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id");
|
||||
CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_epoch" ON "room_participants" ("answering_connection_id", "answering_connection_epoch");
|
||||
|
|
|
@ -0,0 +1,90 @@
|
|||
CREATE TABLE IF NOT EXISTS "rooms" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"live_kit_room" VARCHAR NOT NULL
|
||||
);
|
||||
|
||||
ALTER TABLE "projects"
|
||||
ADD "room_id" INTEGER REFERENCES rooms (id),
|
||||
ADD "host_connection_id" INTEGER,
|
||||
ADD "host_connection_epoch" UUID;
|
||||
CREATE INDEX "index_projects_on_host_connection_epoch" ON "projects" ("host_connection_epoch");
|
||||
|
||||
CREATE TABLE "worktrees" (
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"id" INT8 NOT NULL,
|
||||
"root_name" VARCHAR NOT NULL,
|
||||
"abs_path" VARCHAR NOT NULL,
|
||||
"visible" BOOL NOT NULL,
|
||||
"scan_id" INT8 NOT NULL,
|
||||
"is_complete" BOOL NOT NULL,
|
||||
PRIMARY KEY(project_id, id)
|
||||
);
|
||||
CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
|
||||
|
||||
CREATE TABLE "worktree_entries" (
|
||||
"project_id" INTEGER NOT NULL,
|
||||
"worktree_id" INT8 NOT NULL,
|
||||
"id" INT8 NOT NULL,
|
||||
"is_dir" BOOL NOT NULL,
|
||||
"path" VARCHAR NOT NULL,
|
||||
"inode" INT8 NOT NULL,
|
||||
"mtime_seconds" INT8 NOT NULL,
|
||||
"mtime_nanos" INTEGER NOT NULL,
|
||||
"is_symlink" BOOL NOT NULL,
|
||||
"is_ignored" BOOL NOT NULL,
|
||||
PRIMARY KEY(project_id, worktree_id, id),
|
||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
|
||||
CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
|
||||
|
||||
CREATE TABLE "worktree_diagnostic_summaries" (
|
||||
"project_id" INTEGER NOT NULL,
|
||||
"worktree_id" INT8 NOT NULL,
|
||||
"path" VARCHAR NOT NULL,
|
||||
"language_server_id" INT8 NOT NULL,
|
||||
"error_count" INTEGER NOT NULL,
|
||||
"warning_count" INTEGER NOT NULL,
|
||||
PRIMARY KEY(project_id, worktree_id, path),
|
||||
FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id");
|
||||
CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id");
|
||||
|
||||
CREATE TABLE "language_servers" (
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"id" INT8 NOT NULL,
|
||||
"name" VARCHAR NOT NULL,
|
||||
PRIMARY KEY(project_id, id)
|
||||
);
|
||||
CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id");
|
||||
|
||||
CREATE TABLE "project_collaborators" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
|
||||
"connection_id" INTEGER NOT NULL,
|
||||
"connection_epoch" UUID NOT NULL,
|
||||
"user_id" INTEGER NOT NULL,
|
||||
"replica_id" INTEGER NOT NULL,
|
||||
"is_host" BOOLEAN NOT NULL
|
||||
);
|
||||
CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
|
||||
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id");
|
||||
CREATE INDEX "index_project_collaborators_on_connection_epoch" ON "project_collaborators" ("connection_epoch");
|
||||
|
||||
CREATE TABLE "room_participants" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"room_id" INTEGER NOT NULL REFERENCES rooms (id),
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
"answering_connection_id" INTEGER,
|
||||
"answering_connection_epoch" UUID,
|
||||
"location_kind" INTEGER,
|
||||
"location_project_id" INTEGER,
|
||||
"initial_project_id" INTEGER,
|
||||
"calling_user_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
"calling_connection_id" INTEGER NOT NULL,
|
||||
"calling_connection_epoch" UUID NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
|
||||
CREATE INDEX "index_room_participants_on_answering_connection_epoch" ON "room_participants" ("answering_connection_epoch");
|
||||
CREATE INDEX "index_room_participants_on_calling_connection_epoch" ON "room_participants" ("calling_connection_epoch");
|
|
@ -0,0 +1,2 @@
|
|||
ALTER TABLE "signups"
|
||||
ADD "added_to_mailing_list" BOOLEAN NOT NULL DEFAULT FALSE;
|
|
@ -0,0 +1,7 @@
|
|||
ALTER TABLE "room_participants"
|
||||
ADD "answering_connection_lost" BOOLEAN NOT NULL DEFAULT FALSE;
|
||||
|
||||
CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id");
|
||||
CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_epoch" ON "project_collaborators" ("project_id", "connection_id", "connection_epoch");
|
||||
CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id");
|
||||
CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_epoch" ON "room_participants" ("answering_connection_id", "answering_connection_epoch");
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
auth,
|
||||
db::{Invite, NewUserParams, Signup, User, UserId, WaitlistSummary},
|
||||
db::{Invite, NewSignup, NewUserParams, User, UserId, WaitlistSummary},
|
||||
rpc::{self, ResultExt},
|
||||
AppState, Error, Result,
|
||||
};
|
||||
|
@ -204,7 +204,7 @@ async fn create_user(
|
|||
#[derive(Deserialize)]
|
||||
struct UpdateUserParams {
|
||||
admin: Option<bool>,
|
||||
invite_count: Option<u32>,
|
||||
invite_count: Option<i32>,
|
||||
}
|
||||
|
||||
async fn update_user(
|
||||
|
@ -335,7 +335,7 @@ async fn get_user_for_invite_code(
|
|||
}
|
||||
|
||||
async fn create_signup(
|
||||
Json(params): Json<Signup>,
|
||||
Json(params): Json<NewSignup>,
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
) -> Result<()> {
|
||||
app.db.create_signup(¶ms).await?;
|
||||
|
|
|
@ -75,7 +75,7 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
|
|||
|
||||
const MAX_ACCESS_TOKENS_TO_STORE: usize = 8;
|
||||
|
||||
pub async fn create_access_token(db: &db::DefaultDb, user_id: UserId) -> Result<String> {
|
||||
pub async fn create_access_token(db: &db::Database, user_id: UserId) -> Result<String> {
|
||||
let access_token = rpc::auth::random_token();
|
||||
let access_token_hash =
|
||||
hash_access_token(&access_token).context("failed to hash access token")?;
|
||||
|
|
|
@ -1,12 +1,8 @@
|
|||
use collab::{Error, Result};
|
||||
use db::DefaultDb;
|
||||
use collab::db;
|
||||
use db::{ConnectOptions, Database};
|
||||
use serde::{de::DeserializeOwned, Deserialize};
|
||||
use std::fmt::Write;
|
||||
|
||||
#[allow(unused)]
|
||||
#[path = "../db.rs"]
|
||||
mod db;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GitHubUser {
|
||||
id: i32,
|
||||
|
@ -17,7 +13,7 @@ struct GitHubUser {
|
|||
#[tokio::main]
|
||||
async fn main() {
|
||||
let database_url = std::env::var("DATABASE_URL").expect("missing DATABASE_URL env var");
|
||||
let db = DefaultDb::new(&database_url, 5)
|
||||
let db = Database::new(ConnectOptions::new(database_url))
|
||||
.await
|
||||
.expect("failed to connect to postgres database");
|
||||
let github_token = std::env::var("GITHUB_TOKEN").expect("missing GITHUB_TOKEN env var");
|
||||
|
|
File diff suppressed because it is too large
Load diff
29
crates/collab/src/db/access_token.rs
Normal file
29
crates/collab/src/db/access_token.rs
Normal file
|
@ -0,0 +1,29 @@
|
|||
use super::{AccessTokenId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "access_tokens")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: AccessTokenId,
|
||||
pub user_id: UserId,
|
||||
pub hash: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
58
crates/collab/src/db/contact.rs
Normal file
58
crates/collab/src/db/contact.rs
Normal file
|
@ -0,0 +1,58 @@
|
|||
use super::{ContactId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "contacts")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ContactId,
|
||||
pub user_id_a: UserId,
|
||||
pub user_id_b: UserId,
|
||||
pub a_to_b: bool,
|
||||
pub should_notify: bool,
|
||||
pub accepted: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room_participant::Entity",
|
||||
from = "Column::UserIdA",
|
||||
to = "super::room_participant::Column::UserId"
|
||||
)]
|
||||
UserARoomParticipant,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room_participant::Entity",
|
||||
from = "Column::UserIdB",
|
||||
to = "super::room_participant::Column::UserId"
|
||||
)]
|
||||
UserBRoomParticipant,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Contact {
|
||||
Accepted {
|
||||
user_id: UserId,
|
||||
should_notify: bool,
|
||||
busy: bool,
|
||||
},
|
||||
Outgoing {
|
||||
user_id: UserId,
|
||||
},
|
||||
Incoming {
|
||||
user_id: UserId,
|
||||
should_notify: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Contact {
|
||||
pub fn user_id(&self) -> UserId {
|
||||
match self {
|
||||
Contact::Accepted { user_id, .. } => *user_id,
|
||||
Contact::Outgoing { user_id } => *user_id,
|
||||
Contact::Incoming { user_id, .. } => *user_id,
|
||||
}
|
||||
}
|
||||
}
|
30
crates/collab/src/db/language_server.rs
Normal file
30
crates/collab/src/db/language_server.rs
Normal file
|
@ -0,0 +1,30 @@
|
|||
use super::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "language_servers")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::project::Entity",
|
||||
from = "Column::ProjectId",
|
||||
to = "super::project::Column::Id"
|
||||
)]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
67
crates/collab/src/db/project.rs
Normal file
67
crates/collab/src/db/project.rs
Normal file
|
@ -0,0 +1,67 @@
|
|||
use super::{ProjectId, RoomId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "projects")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ProjectId,
|
||||
pub room_id: RoomId,
|
||||
pub host_user_id: UserId,
|
||||
pub host_connection_id: i32,
|
||||
pub host_connection_epoch: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::HostUserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
HostUser,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room::Entity",
|
||||
from = "Column::RoomId",
|
||||
to = "super::room::Column::Id"
|
||||
)]
|
||||
Room,
|
||||
#[sea_orm(has_many = "super::worktree::Entity")]
|
||||
Worktrees,
|
||||
#[sea_orm(has_many = "super::project_collaborator::Entity")]
|
||||
Collaborators,
|
||||
#[sea_orm(has_many = "super::language_server::Entity")]
|
||||
LanguageServers,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::HostUser.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::room::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Room.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::worktree::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Worktrees.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::project_collaborator::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Collaborators.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::language_server::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::LanguageServers.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
33
crates/collab/src/db/project_collaborator.rs
Normal file
33
crates/collab/src/db/project_collaborator.rs
Normal file
|
@ -0,0 +1,33 @@
|
|||
use super::{ProjectCollaboratorId, ProjectId, ReplicaId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "project_collaborators")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: ProjectCollaboratorId,
|
||||
pub project_id: ProjectId,
|
||||
pub connection_id: i32,
|
||||
pub connection_epoch: Uuid,
|
||||
pub user_id: UserId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub is_host: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::project::Entity",
|
||||
from = "Column::ProjectId",
|
||||
to = "super::project::Column::Id"
|
||||
)]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
32
crates/collab/src/db/room.rs
Normal file
32
crates/collab/src/db/room.rs
Normal file
|
@ -0,0 +1,32 @@
|
|||
use super::RoomId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "rooms")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: RoomId,
|
||||
pub live_kit_room: String,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::room_participant::Entity")]
|
||||
RoomParticipant,
|
||||
#[sea_orm(has_many = "super::project::Entity")]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::room_participant::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RoomParticipant.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
50
crates/collab/src/db/room_participant.rs
Normal file
50
crates/collab/src/db/room_participant.rs
Normal file
|
@ -0,0 +1,50 @@
|
|||
use super::{ProjectId, RoomId, RoomParticipantId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "room_participants")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: RoomParticipantId,
|
||||
pub room_id: RoomId,
|
||||
pub user_id: UserId,
|
||||
pub answering_connection_id: Option<i32>,
|
||||
pub answering_connection_epoch: Option<Uuid>,
|
||||
pub answering_connection_lost: bool,
|
||||
pub location_kind: Option<i32>,
|
||||
pub location_project_id: Option<ProjectId>,
|
||||
pub initial_project_id: Option<ProjectId>,
|
||||
pub calling_user_id: UserId,
|
||||
pub calling_connection_id: i32,
|
||||
pub calling_connection_epoch: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::room::Entity",
|
||||
from = "Column::RoomId",
|
||||
to = "super::room::Column::Id"
|
||||
)]
|
||||
Room,
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::room::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Room.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
57
crates/collab/src/db/signup.rs
Normal file
57
crates/collab/src/db/signup.rs
Normal file
|
@ -0,0 +1,57 @@
|
|||
use super::{SignupId, UserId};
|
||||
use sea_orm::{entity::prelude::*, FromQueryResult};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "signups")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: SignupId,
|
||||
pub email_address: String,
|
||||
pub email_confirmation_code: String,
|
||||
pub email_confirmation_sent: bool,
|
||||
pub created_at: DateTime,
|
||||
pub device_id: Option<String>,
|
||||
pub user_id: Option<UserId>,
|
||||
pub inviting_user_id: Option<UserId>,
|
||||
pub platform_mac: bool,
|
||||
pub platform_linux: bool,
|
||||
pub platform_windows: bool,
|
||||
pub platform_unknown: bool,
|
||||
pub editor_features: Option<Vec<String>>,
|
||||
pub programming_languages: Option<Vec<String>>,
|
||||
pub added_to_mailing_list: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)]
|
||||
pub struct Invite {
|
||||
pub email_address: String,
|
||||
pub email_confirmation_code: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct NewSignup {
|
||||
pub email_address: String,
|
||||
pub platform_mac: bool,
|
||||
pub platform_windows: bool,
|
||||
pub platform_linux: bool,
|
||||
pub editor_features: Vec<String>,
|
||||
pub programming_languages: Vec<String>,
|
||||
pub device_id: Option<String>,
|
||||
pub added_to_mailing_list: bool,
|
||||
pub created_at: Option<DateTime>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromQueryResult)]
|
||||
pub struct WaitlistSummary {
|
||||
pub count: i64,
|
||||
pub linux_count: i64,
|
||||
pub mac_count: i64,
|
||||
pub windows_count: i64,
|
||||
pub unknown_count: i64,
|
||||
}
|
|
@ -1,19 +1,22 @@
|
|||
use super::db::*;
|
||||
use super::*;
|
||||
use gpui::executor::{Background, Deterministic};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[cfg(test)]
|
||||
use pretty_assertions::{assert_eq, assert_ne};
|
||||
|
||||
macro_rules! test_both_dbs {
|
||||
($postgres_test_name:ident, $sqlite_test_name:ident, $db:ident, $body:block) => {
|
||||
#[gpui::test]
|
||||
async fn $postgres_test_name() {
|
||||
let test_db = PostgresTestDb::new(Deterministic::new(0).build_background());
|
||||
let test_db = TestDb::postgres(Deterministic::new(0).build_background());
|
||||
let $db = test_db.db();
|
||||
$body
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn $sqlite_test_name() {
|
||||
let test_db = SqliteTestDb::new(Deterministic::new(0).build_background());
|
||||
let test_db = TestDb::sqlite(Deterministic::new(0).build_background());
|
||||
let $db = test_db.db();
|
||||
$body
|
||||
}
|
||||
|
@ -26,9 +29,10 @@ test_both_dbs!(
|
|||
db,
|
||||
{
|
||||
let mut user_ids = Vec::new();
|
||||
let mut user_metric_ids = Vec::new();
|
||||
for i in 1..=4 {
|
||||
user_ids.push(
|
||||
db.create_user(
|
||||
let user = db
|
||||
.create_user(
|
||||
&format!("user{i}@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
|
@ -38,9 +42,9 @@ test_both_dbs!(
|
|||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id,
|
||||
);
|
||||
.unwrap();
|
||||
user_ids.push(user.user_id);
|
||||
user_metric_ids.push(user.metrics_id);
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
|
@ -52,6 +56,7 @@ test_both_dbs!(
|
|||
github_user_id: Some(1),
|
||||
email_address: Some("user1@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[0].parse().unwrap(),
|
||||
..Default::default()
|
||||
},
|
||||
User {
|
||||
|
@ -60,6 +65,7 @@ test_both_dbs!(
|
|||
github_user_id: Some(2),
|
||||
email_address: Some("user2@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[1].parse().unwrap(),
|
||||
..Default::default()
|
||||
},
|
||||
User {
|
||||
|
@ -68,6 +74,7 @@ test_both_dbs!(
|
|||
github_user_id: Some(3),
|
||||
email_address: Some("user3@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[2].parse().unwrap(),
|
||||
..Default::default()
|
||||
},
|
||||
User {
|
||||
|
@ -76,6 +83,7 @@ test_both_dbs!(
|
|||
github_user_id: Some(4),
|
||||
email_address: Some("user4@example.com".to_string()),
|
||||
admin: false,
|
||||
metrics_id: user_metric_ids[3].parse().unwrap(),
|
||||
..Default::default()
|
||||
}
|
||||
]
|
||||
|
@ -258,7 +266,8 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
db.get_contacts(user_1).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_2,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
}],
|
||||
);
|
||||
assert!(db.has_contact(user_1, user_2).await.unwrap());
|
||||
|
@ -268,6 +277,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
|
||||
|
@ -284,6 +294,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
&[Contact::Accepted {
|
||||
user_id: user_2,
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
|
||||
|
@ -296,6 +307,7 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
&[Contact::Accepted {
|
||||
user_id: user_2,
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
|
||||
|
@ -309,10 +321,12 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
Contact::Accepted {
|
||||
user_id: user_2,
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user_3,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
@ -320,7 +334,8 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
db.get_contacts(user_3).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}],
|
||||
);
|
||||
|
||||
|
@ -335,14 +350,16 @@ test_both_dbs!(test_add_contacts_postgres, test_add_contacts_sqlite, db, {
|
|||
db.get_contacts(user_2).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
db.get_contacts(user_3).await.unwrap(),
|
||||
&[Contact::Accepted {
|
||||
user_id: user_1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}],
|
||||
);
|
||||
});
|
||||
|
@ -388,16 +405,81 @@ test_both_dbs!(test_metrics_id_postgres, test_metrics_id_sqlite, db, {
|
|||
assert_ne!(metrics_id1, metrics_id2);
|
||||
});
|
||||
|
||||
test_both_dbs!(
|
||||
test_project_count_postgres,
|
||||
test_project_count_sqlite,
|
||||
db,
|
||||
{
|
||||
let user1 = db
|
||||
.create_user(
|
||||
&format!("admin@example.com"),
|
||||
true,
|
||||
NewUserParams {
|
||||
github_login: "admin".into(),
|
||||
github_user_id: 0,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
let user2 = db
|
||||
.create_user(
|
||||
&format!("user@example.com"),
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user".into(),
|
||||
github_user_id: 1,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let room_id = RoomId::from_proto(
|
||||
db.create_room(user1.user_id, ConnectionId(0), "")
|
||||
.await
|
||||
.unwrap()
|
||||
.id,
|
||||
);
|
||||
db.call(room_id, user1.user_id, ConnectionId(0), user2.user_id, None)
|
||||
.await
|
||||
.unwrap();
|
||||
db.join_room(room_id, user2.user_id, ConnectionId(1))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
|
||||
|
||||
db.share_project(room_id, ConnectionId(1), &[])
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
|
||||
|
||||
db.share_project(room_id, ConnectionId(1), &[])
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
|
||||
|
||||
// Projects shared by admins aren't counted.
|
||||
db.share_project(room_id, ConnectionId(0), &[])
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
|
||||
|
||||
db.leave_room(ConnectionId(1)).await.unwrap();
|
||||
assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
|
||||
}
|
||||
);
|
||||
|
||||
#[test]
|
||||
fn test_fuzzy_like_string() {
|
||||
assert_eq!(DefaultDb::fuzzy_like_string("abcd"), "%a%b%c%d%");
|
||||
assert_eq!(DefaultDb::fuzzy_like_string("x y"), "%x%y%");
|
||||
assert_eq!(DefaultDb::fuzzy_like_string(" z "), "%z%");
|
||||
assert_eq!(Database::fuzzy_like_string("abcd"), "%a%b%c%d%");
|
||||
assert_eq!(Database::fuzzy_like_string("x y"), "%x%y%");
|
||||
assert_eq!(Database::fuzzy_like_string(" z "), "%z%");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_fuzzy_search_users() {
|
||||
let test_db = PostgresTestDb::new(build_background_executor());
|
||||
let test_db = TestDb::postgres(build_background_executor());
|
||||
let db = test_db.db();
|
||||
for (i, github_login) in [
|
||||
"California",
|
||||
|
@ -433,7 +515,7 @@ async fn test_fuzzy_search_users() {
|
|||
&["rhode-island", "colorado", "oregon"],
|
||||
);
|
||||
|
||||
async fn fuzzy_search_user_names(db: &Db<sqlx::Postgres>, query: &str) -> Vec<String> {
|
||||
async fn fuzzy_search_user_names(db: &Database, query: &str) -> Vec<String> {
|
||||
db.fuzzy_search_users(query, 10)
|
||||
.await
|
||||
.unwrap()
|
||||
|
@ -445,7 +527,7 @@ async fn test_fuzzy_search_users() {
|
|||
|
||||
#[gpui::test]
|
||||
async fn test_invite_codes() {
|
||||
let test_db = PostgresTestDb::new(build_background_executor());
|
||||
let test_db = TestDb::postgres(build_background_executor());
|
||||
let db = test_db.db();
|
||||
|
||||
let NewUserResult { user_id: user1, .. } = db
|
||||
|
@ -504,16 +586,20 @@ async fn test_invite_codes() {
|
|||
db.get_contacts(user1).await.unwrap(),
|
||||
[Contact::Accepted {
|
||||
user_id: user2,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
db.get_contacts(user2).await.unwrap(),
|
||||
[Contact::Accepted {
|
||||
user_id: user1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert!(db.has_contact(user1, user2).await.unwrap());
|
||||
assert!(db.has_contact(user2, user1).await.unwrap());
|
||||
assert_eq!(
|
||||
db.get_invite_code_for_user(user2).await.unwrap().unwrap().1,
|
||||
7
|
||||
|
@ -550,11 +636,13 @@ async fn test_invite_codes() {
|
|||
[
|
||||
Contact::Accepted {
|
||||
user_id: user2,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user3,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
@ -562,9 +650,12 @@ async fn test_invite_codes() {
|
|||
db.get_contacts(user3).await.unwrap(),
|
||||
[Contact::Accepted {
|
||||
user_id: user1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert!(db.has_contact(user1, user3).await.unwrap());
|
||||
assert!(db.has_contact(user3, user1).await.unwrap());
|
||||
assert_eq!(
|
||||
db.get_invite_code_for_user(user3).await.unwrap().unwrap().1,
|
||||
3
|
||||
|
@ -607,15 +698,18 @@ async fn test_invite_codes() {
|
|||
[
|
||||
Contact::Accepted {
|
||||
user_id: user2,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user3,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user4,
|
||||
should_notify: true
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
@ -623,9 +717,12 @@ async fn test_invite_codes() {
|
|||
db.get_contacts(user4).await.unwrap(),
|
||||
[Contact::Accepted {
|
||||
user_id: user1,
|
||||
should_notify: false
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert!(db.has_contact(user1, user4).await.unwrap());
|
||||
assert!(db.has_contact(user4, user1).await.unwrap());
|
||||
assert_eq!(
|
||||
db.get_invite_code_for_user(user4).await.unwrap().unwrap().1,
|
||||
5
|
||||
|
@ -637,11 +734,162 @@ async fn test_invite_codes() {
|
|||
.unwrap_err();
|
||||
let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
|
||||
assert_eq!(invite_count, 1);
|
||||
|
||||
// A newer user can invite an existing one via a different email address
|
||||
// than the one they used to sign up.
|
||||
let user5 = db
|
||||
.create_user(
|
||||
"user5@example.com",
|
||||
false,
|
||||
NewUserParams {
|
||||
github_login: "user5".into(),
|
||||
github_user_id: 5,
|
||||
invite_count: 0,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.user_id;
|
||||
db.set_invite_count_for_user(user5, 5).await.unwrap();
|
||||
let (user5_invite_code, _) = db.get_invite_code_for_user(user5).await.unwrap().unwrap();
|
||||
let user5_invite_to_user1 = db
|
||||
.create_invite_from_code(&user5_invite_code, "user1@different.com", None)
|
||||
.await
|
||||
.unwrap();
|
||||
let user1_2 = db
|
||||
.create_user_from_invite(
|
||||
&user5_invite_to_user1,
|
||||
NewUserParams {
|
||||
github_login: "user1".into(),
|
||||
github_user_id: 1,
|
||||
invite_count: 5,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.user_id;
|
||||
assert_eq!(user1_2, user1);
|
||||
assert_eq!(
|
||||
db.get_contacts(user1).await.unwrap(),
|
||||
[
|
||||
Contact::Accepted {
|
||||
user_id: user2,
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user3,
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user4,
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
},
|
||||
Contact::Accepted {
|
||||
user_id: user5,
|
||||
should_notify: false,
|
||||
busy: false,
|
||||
}
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
db.get_contacts(user5).await.unwrap(),
|
||||
[Contact::Accepted {
|
||||
user_id: user1,
|
||||
should_notify: true,
|
||||
busy: false,
|
||||
}]
|
||||
);
|
||||
assert!(db.has_contact(user1, user5).await.unwrap());
|
||||
assert!(db.has_contact(user5, user1).await.unwrap());
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_multiple_signup_overwrite() {
|
||||
let test_db = TestDb::postgres(build_background_executor());
|
||||
let db = test_db.db();
|
||||
|
||||
let email_address = "user_1@example.com".to_string();
|
||||
|
||||
let initial_signup_created_at_milliseconds = 0;
|
||||
|
||||
let initial_signup = NewSignup {
|
||||
email_address: email_address.clone(),
|
||||
platform_mac: false,
|
||||
platform_linux: true,
|
||||
platform_windows: false,
|
||||
editor_features: vec!["speed".into()],
|
||||
programming_languages: vec!["rust".into(), "c".into()],
|
||||
device_id: Some(format!("device_id")),
|
||||
added_to_mailing_list: false,
|
||||
created_at: Some(
|
||||
DateTime::from_timestamp_millis(initial_signup_created_at_milliseconds).unwrap(),
|
||||
),
|
||||
};
|
||||
|
||||
db.create_signup(&initial_signup).await.unwrap();
|
||||
|
||||
let initial_signup_from_db = db.get_signup(&email_address).await.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
initial_signup_from_db.clone(),
|
||||
signup::Model {
|
||||
email_address: initial_signup.email_address,
|
||||
platform_mac: initial_signup.platform_mac,
|
||||
platform_linux: initial_signup.platform_linux,
|
||||
platform_windows: initial_signup.platform_windows,
|
||||
editor_features: Some(initial_signup.editor_features),
|
||||
programming_languages: Some(initial_signup.programming_languages),
|
||||
added_to_mailing_list: initial_signup.added_to_mailing_list,
|
||||
..initial_signup_from_db
|
||||
}
|
||||
);
|
||||
|
||||
let subsequent_signup = NewSignup {
|
||||
email_address: email_address.clone(),
|
||||
platform_mac: true,
|
||||
platform_linux: false,
|
||||
platform_windows: true,
|
||||
editor_features: vec!["git integration".into(), "clean design".into()],
|
||||
programming_languages: vec!["d".into(), "elm".into()],
|
||||
device_id: Some(format!("different_device_id")),
|
||||
added_to_mailing_list: true,
|
||||
// subsequent signup happens next day
|
||||
created_at: Some(
|
||||
DateTime::from_timestamp_millis(
|
||||
initial_signup_created_at_milliseconds + (1000 * 60 * 60 * 24),
|
||||
)
|
||||
.unwrap(),
|
||||
),
|
||||
};
|
||||
|
||||
db.create_signup(&subsequent_signup).await.unwrap();
|
||||
|
||||
let subsequent_signup_from_db = db.get_signup(&email_address).await.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
subsequent_signup_from_db.clone(),
|
||||
signup::Model {
|
||||
platform_mac: subsequent_signup.platform_mac,
|
||||
platform_linux: subsequent_signup.platform_linux,
|
||||
platform_windows: subsequent_signup.platform_windows,
|
||||
editor_features: Some(subsequent_signup.editor_features),
|
||||
programming_languages: Some(subsequent_signup.programming_languages),
|
||||
device_id: subsequent_signup.device_id,
|
||||
added_to_mailing_list: subsequent_signup.added_to_mailing_list,
|
||||
// shouldn't overwrite their creation Datetime - user shouldn't lose their spot in line
|
||||
created_at: initial_signup_from_db.created_at,
|
||||
..subsequent_signup_from_db
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_signups() {
|
||||
let test_db = PostgresTestDb::new(build_background_executor());
|
||||
let test_db = TestDb::postgres(build_background_executor());
|
||||
let db = test_db.db();
|
||||
|
||||
let usernames = (0..8).map(|i| format!("person-{i}")).collect::<Vec<_>>();
|
||||
|
@ -649,7 +897,7 @@ async fn test_signups() {
|
|||
let all_signups = usernames
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, username)| Signup {
|
||||
.map(|(i, username)| NewSignup {
|
||||
email_address: format!("{username}@example.com"),
|
||||
platform_mac: true,
|
||||
platform_linux: i % 2 == 0,
|
||||
|
@ -657,8 +905,10 @@ async fn test_signups() {
|
|||
editor_features: vec!["speed".into()],
|
||||
programming_languages: vec!["rust".into(), "c".into()],
|
||||
device_id: Some(format!("device_id_{i}")),
|
||||
added_to_mailing_list: i != 0, // One user failed to subscribe
|
||||
created_at: Some(DateTime::from_timestamp_millis(i as i64).unwrap()), // Signups are consecutive
|
||||
})
|
||||
.collect::<Vec<Signup>>();
|
||||
.collect::<Vec<NewSignup>>();
|
||||
|
||||
// people sign up on the waitlist
|
||||
for signup in &all_signups {
|
49
crates/collab/src/db/user.rs
Normal file
49
crates/collab/src/db/user.rs
Normal file
|
@ -0,0 +1,49 @@
|
|||
use super::UserId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel, Serialize)]
|
||||
#[sea_orm(table_name = "users")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: UserId,
|
||||
pub github_login: String,
|
||||
pub github_user_id: Option<i32>,
|
||||
pub email_address: Option<String>,
|
||||
pub admin: bool,
|
||||
pub invite_code: Option<String>,
|
||||
pub invite_count: i32,
|
||||
pub inviter_id: Option<UserId>,
|
||||
pub connected_once: bool,
|
||||
pub metrics_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::access_token::Entity")]
|
||||
AccessToken,
|
||||
#[sea_orm(has_one = "super::room_participant::Entity")]
|
||||
RoomParticipant,
|
||||
#[sea_orm(has_many = "super::project::Entity")]
|
||||
HostedProjects,
|
||||
}
|
||||
|
||||
impl Related<super::access_token::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::AccessToken.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::room_participant::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::RoomParticipant.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::HostedProjects.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
34
crates/collab/src/db/worktree.rs
Normal file
34
crates/collab/src/db/worktree.rs
Normal file
|
@ -0,0 +1,34 @@
|
|||
use super::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktrees")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
pub abs_path: String,
|
||||
pub root_name: String,
|
||||
pub visible: bool,
|
||||
pub scan_id: i64,
|
||||
pub is_complete: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::project::Entity",
|
||||
from = "Column::ProjectId",
|
||||
to = "super::project::Column::Id"
|
||||
)]
|
||||
Project,
|
||||
}
|
||||
|
||||
impl Related<super::project::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Project.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
21
crates/collab/src/db/worktree_diagnostic_summary.rs
Normal file
21
crates/collab/src/db/worktree_diagnostic_summary.rs
Normal file
|
@ -0,0 +1,21 @@
|
|||
use super::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktree_diagnostic_summaries")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub worktree_id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub path: String,
|
||||
pub language_server_id: i64,
|
||||
pub error_count: i32,
|
||||
pub warning_count: i32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
25
crates/collab/src/db/worktree_entry.rs
Normal file
25
crates/collab/src/db/worktree_entry.rs
Normal file
|
@ -0,0 +1,25 @@
|
|||
use super::ProjectId;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "worktree_entries")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub project_id: ProjectId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub worktree_id: i64,
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub is_dir: bool,
|
||||
pub path: String,
|
||||
pub inode: i64,
|
||||
pub mtime_seconds: i64,
|
||||
pub mtime_nanos: i32,
|
||||
pub is_symlink: bool,
|
||||
pub is_ignored: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
36
crates/collab/src/executor.rs
Normal file
36
crates/collab/src/executor.rs
Normal file
|
@ -0,0 +1,36 @@
|
|||
use std::{future::Future, time::Duration};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Executor {
|
||||
Production,
|
||||
#[cfg(test)]
|
||||
Deterministic(std::sync::Arc<gpui::executor::Background>),
|
||||
}
|
||||
|
||||
impl Executor {
|
||||
pub fn spawn_detached<F>(&self, future: F)
|
||||
where
|
||||
F: 'static + Send + Future<Output = ()>,
|
||||
{
|
||||
match self {
|
||||
Executor::Production => {
|
||||
tokio::spawn(future);
|
||||
}
|
||||
#[cfg(test)]
|
||||
Executor::Deterministic(background) => {
|
||||
background.spawn(future).detach();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sleep(&self, duration: Duration) -> impl Future<Output = ()> {
|
||||
let this = self.clone();
|
||||
async move {
|
||||
match this {
|
||||
Executor::Production => tokio::time::sleep(duration).await,
|
||||
#[cfg(test)]
|
||||
Executor::Deterministic(background) => background.timer(duration).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
use crate::{
|
||||
db::{NewUserParams, ProjectId, SqliteTestDb as TestDb, UserId},
|
||||
rpc::{Executor, Server},
|
||||
db::{self, NewUserParams, TestDb, UserId},
|
||||
executor::Executor,
|
||||
rpc::{Server, RECONNECT_TIMEOUT},
|
||||
AppState,
|
||||
};
|
||||
use ::rpc::Peer;
|
||||
|
@ -16,7 +17,7 @@ use editor::{
|
|||
ToggleCodeActions, Undo,
|
||||
};
|
||||
use fs::{FakeFs, Fs as _, HomeDir, LineEnding};
|
||||
use futures::{channel::oneshot, Future, StreamExt as _};
|
||||
use futures::{channel::oneshot, StreamExt as _};
|
||||
use gpui::{
|
||||
executor::{self, Deterministic},
|
||||
geometry::vector::vec2f,
|
||||
|
@ -30,9 +31,7 @@ use language::{
|
|||
use live_kit_client::MacOSDisplay;
|
||||
use lsp::{self, FakeLanguageServer};
|
||||
use parking_lot::Mutex;
|
||||
use project::{
|
||||
search::SearchQuery, DiagnosticSummary, Project, ProjectPath, ProjectStore, WorktreeId,
|
||||
};
|
||||
use project::{search::SearchQuery, DiagnosticSummary, Project, ProjectPath, WorktreeId};
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
use settings::{Formatter, Settings};
|
||||
|
@ -46,12 +45,11 @@ use std::{
|
|||
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
|
||||
Arc,
|
||||
},
|
||||
time::Duration,
|
||||
};
|
||||
use theme::ThemeRegistry;
|
||||
use unindent::Unindent as _;
|
||||
use util::post_inc;
|
||||
use workspace::{shared_screen::SharedScreen, Item, SplitDirection, ToggleFollow, Workspace};
|
||||
use workspace::{item::Item, shared_screen::SharedScreen, SplitDirection, ToggleFollow, Workspace};
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
|
@ -71,8 +69,6 @@ async fn test_basic_calls(
|
|||
deterministic.forbid_parking();
|
||||
let mut server = TestServer::start(cx_a.background()).await;
|
||||
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
let client_c = server.create_client(cx_c, "user_c").await;
|
||||
|
@ -104,7 +100,7 @@ async fn test_basic_calls(
|
|||
// User B receives the call.
|
||||
let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
|
||||
let call_b = incoming_call_b.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_b.caller.github_login, "user_a");
|
||||
assert_eq!(call_b.calling_user.github_login, "user_a");
|
||||
|
||||
// User B connects via another client and also receives a ring on the newly-connected client.
|
||||
let _client_b2 = server.create_client(cx_b2, "user_b").await;
|
||||
|
@ -112,7 +108,7 @@ async fn test_basic_calls(
|
|||
let mut incoming_call_b2 = active_call_b2.read_with(cx_b2, |call, _| call.incoming());
|
||||
deterministic.run_until_parked();
|
||||
let call_b2 = incoming_call_b2.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_b2.caller.github_login, "user_a");
|
||||
assert_eq!(call_b2.calling_user.github_login, "user_a");
|
||||
|
||||
// User B joins the room using the first client.
|
||||
active_call_b
|
||||
|
@ -165,7 +161,7 @@ async fn test_basic_calls(
|
|||
|
||||
// User C receives the call, but declines it.
|
||||
let call_c = incoming_call_c.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_c.caller.github_login, "user_b");
|
||||
assert_eq!(call_c.calling_user.github_login, "user_b");
|
||||
active_call_c.update(cx_c, |call, _| call.decline_incoming().unwrap());
|
||||
assert!(incoming_call_c.next().await.unwrap().is_none());
|
||||
|
||||
|
@ -258,8 +254,6 @@ async fn test_basic_calls(
|
|||
pending: Default::default()
|
||||
}
|
||||
);
|
||||
|
||||
eprintln!("finished test {:?}", start.elapsed());
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
|
@ -308,7 +302,7 @@ async fn test_room_uniqueness(
|
|||
// User B receives the call from user A.
|
||||
let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
|
||||
let call_b1 = incoming_call_b.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_b1.caller.github_login, "user_a");
|
||||
assert_eq!(call_b1.calling_user.github_login, "user_a");
|
||||
|
||||
// Ensure calling users A and B from client C fails.
|
||||
active_call_c
|
||||
|
@ -367,11 +361,11 @@ async fn test_room_uniqueness(
|
|||
.unwrap();
|
||||
deterministic.run_until_parked();
|
||||
let call_b2 = incoming_call_b.next().await.unwrap().unwrap();
|
||||
assert_eq!(call_b2.caller.github_login, "user_c");
|
||||
assert_eq!(call_b2.calling_user.github_login, "user_c");
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_leaving_room_on_disconnection(
|
||||
async fn test_disconnecting_from_room(
|
||||
deterministic: Arc<Deterministic>,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
|
@ -420,9 +414,29 @@ async fn test_leaving_room_on_disconnection(
|
|||
}
|
||||
);
|
||||
|
||||
// When user A disconnects, both client A and B clear their room on the active call.
|
||||
// User A automatically reconnects to the room upon disconnection.
|
||||
server.disconnect_client(client_a.peer_id().unwrap());
|
||||
cx_a.foreground().advance_clock(rpc::RECEIVE_TIMEOUT);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||
deterministic.run_until_parked();
|
||||
assert_eq!(
|
||||
room_participants(&room_a, cx_a),
|
||||
RoomParticipants {
|
||||
remote: vec!["user_b".to_string()],
|
||||
pending: Default::default()
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
room_participants(&room_b, cx_b),
|
||||
RoomParticipants {
|
||||
remote: vec!["user_a".to_string()],
|
||||
pending: Default::default()
|
||||
}
|
||||
);
|
||||
|
||||
// When user A disconnects, both client A and B clear their room on the active call.
|
||||
server.forbid_connections();
|
||||
server.disconnect_client(client_a.peer_id().unwrap());
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
active_call_a.read_with(cx_a, |call, _| assert!(call.room().is_none()));
|
||||
active_call_b.read_with(cx_b, |call, _| assert!(call.room().is_none()));
|
||||
assert_eq!(
|
||||
|
@ -440,6 +454,10 @@ async fn test_leaving_room_on_disconnection(
|
|||
}
|
||||
);
|
||||
|
||||
// Allow user A to reconnect to the server.
|
||||
server.allow_connections();
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||
|
||||
// Call user B again from client A.
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| {
|
||||
|
@ -563,7 +581,7 @@ async fn test_calls_on_multiple_connections(
|
|||
|
||||
// User B disconnects the client that is not on the call. Everything should be fine.
|
||||
client_b1.disconnect(&cx_b1.to_async()).unwrap();
|
||||
deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||
client_b1
|
||||
.authenticate_and_connect(false, &cx_b1.to_async())
|
||||
.await
|
||||
|
@ -622,12 +640,15 @@ async fn test_calls_on_multiple_connections(
|
|||
assert!(incoming_call_b2.next().await.unwrap().is_some());
|
||||
|
||||
// User A disconnects, causing both connections to stop ringing.
|
||||
server.forbid_connections();
|
||||
server.disconnect_client(client_a.peer_id().unwrap());
|
||||
deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
assert!(incoming_call_b1.next().await.unwrap().is_none());
|
||||
assert!(incoming_call_b2.next().await.unwrap().is_none());
|
||||
|
||||
// User A reconnects automatically, then calls user B again.
|
||||
server.allow_connections();
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| {
|
||||
call.invite(client_b1.user_id().unwrap(), None, cx)
|
||||
|
@ -642,7 +663,7 @@ async fn test_calls_on_multiple_connections(
|
|||
server.forbid_connections();
|
||||
server.disconnect_client(client_b1.peer_id().unwrap());
|
||||
server.disconnect_client(client_b2.peer_id().unwrap());
|
||||
deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
active_call_a.read_with(cx_a, |call, _| assert!(call.room().is_none()));
|
||||
}
|
||||
|
||||
|
@ -695,7 +716,7 @@ async fn test_share_project(
|
|||
let incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
|
||||
deterministic.run_until_parked();
|
||||
let call = incoming_call_b.borrow().clone().unwrap();
|
||||
assert_eq!(call.caller.github_login, "user_a");
|
||||
assert_eq!(call.calling_user.github_login, "user_a");
|
||||
let initial_project = call.initial_project.unwrap();
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.accept_incoming(cx))
|
||||
|
@ -766,7 +787,7 @@ async fn test_share_project(
|
|||
let incoming_call_c = active_call_c.read_with(cx_c, |call, _| call.incoming());
|
||||
deterministic.run_until_parked();
|
||||
let call = incoming_call_c.borrow().clone().unwrap();
|
||||
assert_eq!(call.caller.github_login, "user_b");
|
||||
assert_eq!(call.calling_user.github_login, "user_b");
|
||||
let initial_project = call.initial_project.unwrap();
|
||||
active_call_c
|
||||
.update(cx_c, |call, cx| call.accept_incoming(cx))
|
||||
|
@ -905,8 +926,15 @@ async fn test_host_disconnect(
|
|||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
|
||||
|
||||
let (_, workspace_b) =
|
||||
cx_b.add_window(|cx| Workspace::new(project_b.clone(), |_, _| unimplemented!(), cx));
|
||||
let (_, workspace_b) = cx_b.add_window(|cx| {
|
||||
Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project_b.clone(),
|
||||
|_, _| unimplemented!(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let editor_b = workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "b.txt"), None, true, cx)
|
||||
|
@ -925,8 +953,9 @@ async fn test_host_disconnect(
|
|||
assert!(cx_b.is_window_edited(workspace_b.window_id()));
|
||||
|
||||
// Drop client A's connection. Collaborators should disappear and the project should not be shown as shared.
|
||||
server.forbid_connections();
|
||||
server.disconnect_client(client_a.peer_id().unwrap());
|
||||
deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
project_a
|
||||
.condition(cx_a, |project, _| project.collaborators().is_empty())
|
||||
.await;
|
||||
|
@ -949,6 +978,11 @@ async fn test_host_disconnect(
|
|||
.unwrap();
|
||||
assert!(can_close);
|
||||
|
||||
// Allow client A to reconnect to the server.
|
||||
server.allow_connections();
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||
|
||||
// Client B calls client A again after they reconnected.
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| {
|
||||
|
@ -969,7 +1003,7 @@ async fn test_host_disconnect(
|
|||
|
||||
// Drop client A's connection again. We should still unshare it successfully.
|
||||
server.disconnect_client(client_a.peer_id().unwrap());
|
||||
deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||
project_a.read_with(cx_a, |project, _| assert!(!project.is_shared()));
|
||||
}
|
||||
|
||||
|
@ -2284,7 +2318,6 @@ async fn test_leaving_project(
|
|||
project_id,
|
||||
client_b.client.clone(),
|
||||
client_b.user_store.clone(),
|
||||
client_b.project_store.clone(),
|
||||
client_b.language_registry.clone(),
|
||||
FakeFs::new(cx.background()),
|
||||
cx,
|
||||
|
@ -2296,7 +2329,7 @@ async fn test_leaving_project(
|
|||
// Simulate connection loss for client C and ensure client A observes client C leaving the project.
|
||||
client_c.wait_for_current_user(cx_c).await;
|
||||
server.disconnect_client(client_c.peer_id().unwrap());
|
||||
cx_a.foreground().advance_clock(rpc::RECEIVE_TIMEOUT);
|
||||
cx_a.foreground().advance_clock(RECEIVE_TIMEOUT);
|
||||
deterministic.run_until_parked();
|
||||
project_a.read_with(cx_a, |project, _| {
|
||||
assert_eq!(project.collaborators().len(), 0);
|
||||
|
@ -2408,12 +2441,6 @@ async fn test_collaborating_with_diagnostics(
|
|||
|
||||
// Wait for server to see the diagnostics update.
|
||||
deterministic.run_until_parked();
|
||||
{
|
||||
let store = server.store.lock().await;
|
||||
let project = store.project(ProjectId::from_proto(project_id)).unwrap();
|
||||
let worktree = project.worktrees.get(&worktree_id.to_proto()).unwrap();
|
||||
assert!(!worktree.diagnostic_summaries.is_empty());
|
||||
}
|
||||
|
||||
// Ensure client B observes the new diagnostics.
|
||||
project_b.read_with(cx_b, |project, cx| {
|
||||
|
@ -2435,7 +2462,10 @@ async fn test_collaborating_with_diagnostics(
|
|||
|
||||
// Join project as client C and observe the diagnostics.
|
||||
let project_c = client_c.build_remote_project(project_id, cx_c).await;
|
||||
let project_c_diagnostic_summaries = Rc::new(RefCell::new(Vec::new()));
|
||||
let project_c_diagnostic_summaries =
|
||||
Rc::new(RefCell::new(project_c.read_with(cx_c, |project, cx| {
|
||||
project.diagnostic_summaries(cx).collect::<Vec<_>>()
|
||||
})));
|
||||
project_c.update(cx_c, |_, cx| {
|
||||
let summaries = project_c_diagnostic_summaries.clone();
|
||||
cx.subscribe(&project_c, {
|
||||
|
@ -3701,8 +3731,15 @@ async fn test_collaborating_with_code_actions(
|
|||
|
||||
// Join the project as client B.
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
let (_window_b, workspace_b) =
|
||||
cx_b.add_window(|cx| Workspace::new(project_b.clone(), |_, _| unimplemented!(), cx));
|
||||
let (_window_b, workspace_b) = cx_b.add_window(|cx| {
|
||||
Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project_b.clone(),
|
||||
|_, _| unimplemented!(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let editor_b = workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "main.rs"), None, true, cx)
|
||||
|
@ -3922,8 +3959,15 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
|
|||
.unwrap();
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
|
||||
let (_window_b, workspace_b) =
|
||||
cx_b.add_window(|cx| Workspace::new(project_b.clone(), |_, _| unimplemented!(), cx));
|
||||
let (_window_b, workspace_b) = cx_b.add_window(|cx| {
|
||||
Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project_b.clone(),
|
||||
|_, _| unimplemented!(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let editor_b = workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "one.rs"), None, true, cx)
|
||||
|
@ -4176,18 +4220,21 @@ async fn test_contacts(
|
|||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
cx_c: &mut TestAppContext,
|
||||
cx_d: &mut TestAppContext,
|
||||
) {
|
||||
cx_a.foreground().forbid_parking();
|
||||
let mut server = TestServer::start(cx_a.background()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
let client_c = server.create_client(cx_c, "user_c").await;
|
||||
let client_d = server.create_client(cx_d, "user_d").await;
|
||||
server
|
||||
.make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
|
||||
.await;
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
let active_call_c = cx_c.read(ActiveCall::global);
|
||||
let _active_call_d = cx_d.read(ActiveCall::global);
|
||||
|
||||
deterministic.run_until_parked();
|
||||
assert_eq!(
|
||||
|
@ -4211,10 +4258,11 @@ async fn test_contacts(
|
|||
("user_b".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(contacts(&client_d, cx_d), []);
|
||||
|
||||
server.disconnect_client(client_c.peer_id().unwrap());
|
||||
server.forbid_connections();
|
||||
deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
assert_eq!(
|
||||
contacts(&client_a, cx_a),
|
||||
[
|
||||
|
@ -4230,6 +4278,7 @@ async fn test_contacts(
|
|||
]
|
||||
);
|
||||
assert_eq!(contacts(&client_c, cx_c), []);
|
||||
assert_eq!(contacts(&client_d, cx_d), []);
|
||||
|
||||
server.allow_connections();
|
||||
client_c
|
||||
|
@ -4259,6 +4308,7 @@ async fn test_contacts(
|
|||
("user_b".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(contacts(&client_d, cx_d), []);
|
||||
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| {
|
||||
|
@ -4288,6 +4338,39 @@ async fn test_contacts(
|
|||
("user_b".to_string(), "online", "busy")
|
||||
]
|
||||
);
|
||||
assert_eq!(contacts(&client_d, cx_d), []);
|
||||
|
||||
// Client B and client D become contacts while client B is being called.
|
||||
server
|
||||
.make_contacts(&mut [(&client_b, cx_b), (&client_d, cx_d)])
|
||||
.await;
|
||||
deterministic.run_until_parked();
|
||||
assert_eq!(
|
||||
contacts(&client_a, cx_a),
|
||||
[
|
||||
("user_b".to_string(), "online", "busy"),
|
||||
("user_c".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
contacts(&client_b, cx_b),
|
||||
[
|
||||
("user_a".to_string(), "online", "busy"),
|
||||
("user_c".to_string(), "online", "free"),
|
||||
("user_d".to_string(), "online", "free"),
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
contacts(&client_c, cx_c),
|
||||
[
|
||||
("user_a".to_string(), "online", "busy"),
|
||||
("user_b".to_string(), "online", "busy")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
contacts(&client_d, cx_d),
|
||||
[("user_b".to_string(), "online", "busy")]
|
||||
);
|
||||
|
||||
active_call_b.update(cx_b, |call, _| call.decline_incoming().unwrap());
|
||||
deterministic.run_until_parked();
|
||||
|
@ -4302,7 +4385,8 @@ async fn test_contacts(
|
|||
contacts(&client_b, cx_b),
|
||||
[
|
||||
("user_a".to_string(), "online", "free"),
|
||||
("user_c".to_string(), "online", "free")
|
||||
("user_c".to_string(), "online", "free"),
|
||||
("user_d".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -4312,6 +4396,10 @@ async fn test_contacts(
|
|||
("user_b".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
contacts(&client_d, cx_d),
|
||||
[("user_b".to_string(), "online", "free")]
|
||||
);
|
||||
|
||||
active_call_c
|
||||
.update(cx_c, |call, cx| {
|
||||
|
@ -4331,7 +4419,8 @@ async fn test_contacts(
|
|||
contacts(&client_b, cx_b),
|
||||
[
|
||||
("user_a".to_string(), "online", "busy"),
|
||||
("user_c".to_string(), "online", "busy")
|
||||
("user_c".to_string(), "online", "busy"),
|
||||
("user_d".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -4341,6 +4430,10 @@ async fn test_contacts(
|
|||
("user_b".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
contacts(&client_d, cx_d),
|
||||
[("user_b".to_string(), "online", "free")]
|
||||
);
|
||||
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.accept_incoming(cx))
|
||||
|
@ -4358,7 +4451,8 @@ async fn test_contacts(
|
|||
contacts(&client_b, cx_b),
|
||||
[
|
||||
("user_a".to_string(), "online", "busy"),
|
||||
("user_c".to_string(), "online", "busy")
|
||||
("user_c".to_string(), "online", "busy"),
|
||||
("user_d".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -4368,6 +4462,10 @@ async fn test_contacts(
|
|||
("user_b".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
contacts(&client_d, cx_d),
|
||||
[("user_b".to_string(), "online", "free")]
|
||||
);
|
||||
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| {
|
||||
|
@ -4387,7 +4485,8 @@ async fn test_contacts(
|
|||
contacts(&client_b, cx_b),
|
||||
[
|
||||
("user_a".to_string(), "online", "busy"),
|
||||
("user_c".to_string(), "online", "busy")
|
||||
("user_c".to_string(), "online", "busy"),
|
||||
("user_d".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -4397,6 +4496,10 @@ async fn test_contacts(
|
|||
("user_b".to_string(), "online", "busy")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
contacts(&client_d, cx_d),
|
||||
[("user_b".to_string(), "online", "busy")]
|
||||
);
|
||||
|
||||
active_call_a.update(cx_a, |call, cx| call.hang_up(cx).unwrap());
|
||||
deterministic.run_until_parked();
|
||||
|
@ -4411,7 +4514,8 @@ async fn test_contacts(
|
|||
contacts(&client_b, cx_b),
|
||||
[
|
||||
("user_a".to_string(), "online", "free"),
|
||||
("user_c".to_string(), "online", "free")
|
||||
("user_c".to_string(), "online", "free"),
|
||||
("user_d".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -4421,6 +4525,10 @@ async fn test_contacts(
|
|||
("user_b".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
contacts(&client_d, cx_d),
|
||||
[("user_b".to_string(), "online", "free")]
|
||||
);
|
||||
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| {
|
||||
|
@ -4440,7 +4548,8 @@ async fn test_contacts(
|
|||
contacts(&client_b, cx_b),
|
||||
[
|
||||
("user_a".to_string(), "online", "busy"),
|
||||
("user_c".to_string(), "online", "free")
|
||||
("user_c".to_string(), "online", "free"),
|
||||
("user_d".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -4450,16 +4559,21 @@ async fn test_contacts(
|
|||
("user_b".to_string(), "online", "busy")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
contacts(&client_d, cx_d),
|
||||
[("user_b".to_string(), "online", "busy")]
|
||||
);
|
||||
|
||||
server.forbid_connections();
|
||||
server.disconnect_client(client_a.peer_id().unwrap());
|
||||
deterministic.advance_clock(rpc::RECEIVE_TIMEOUT);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
assert_eq!(contacts(&client_a, cx_a), []);
|
||||
assert_eq!(
|
||||
contacts(&client_b, cx_b),
|
||||
[
|
||||
("user_a".to_string(), "offline", "free"),
|
||||
("user_c".to_string(), "online", "free")
|
||||
("user_c".to_string(), "online", "free"),
|
||||
("user_d".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -4469,8 +4583,11 @@ async fn test_contacts(
|
|||
("user_b".to_string(), "online", "free")
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
contacts(&client_d, cx_d),
|
||||
[("user_b".to_string(), "online", "free")]
|
||||
);
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn contacts(
|
||||
client: &TestClient,
|
||||
cx: &TestAppContext,
|
||||
|
@ -4953,6 +5070,129 @@ async fn test_following(
|
|||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_following_tab_order(
|
||||
deterministic: Arc<Deterministic>,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
cx_a.update(editor::init);
|
||||
cx_b.update(editor::init);
|
||||
|
||||
let mut server = TestServer::start(cx_a.background()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
|
||||
client_a
|
||||
.fs
|
||||
.insert_tree(
|
||||
"/a",
|
||||
json!({
|
||||
"1.txt": "one",
|
||||
"2.txt": "two",
|
||||
"3.txt": "three",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_b = client_b.build_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let workspace_a = client_a.build_workspace(&project_a, cx_a);
|
||||
let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone());
|
||||
|
||||
let workspace_b = client_b.build_workspace(&project_b, cx_b);
|
||||
let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone());
|
||||
|
||||
let client_b_id = project_a.read_with(cx_a, |project, _| {
|
||||
project.collaborators().values().next().unwrap().peer_id
|
||||
});
|
||||
|
||||
//Open 1, 3 in that order on client A
|
||||
workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "1.txt"), None, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "3.txt"), None, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let pane_paths = |pane: &ViewHandle<workspace::Pane>, cx: &mut TestAppContext| {
|
||||
pane.update(cx, |pane, cx| {
|
||||
pane.items()
|
||||
.map(|item| {
|
||||
item.project_path(cx)
|
||||
.unwrap()
|
||||
.path
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_owned()
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
};
|
||||
|
||||
//Verify that the tabs opened in the order we expect
|
||||
assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt"]);
|
||||
|
||||
//Follow client B as client A
|
||||
workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
workspace
|
||||
.toggle_follow(&ToggleFollow(client_b_id), cx)
|
||||
.unwrap()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
//Open just 2 on client B
|
||||
workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "2.txt"), None, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// Verify that newly opened followed file is at the end
|
||||
assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
|
||||
|
||||
//Open just 1 on client B
|
||||
workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "1.txt"), None, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(&pane_paths(&pane_b, cx_b), &["2.txt", "1.txt"]);
|
||||
deterministic.run_until_parked();
|
||||
|
||||
// Verify that following into 1 did not reorder
|
||||
assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
cx_a.foreground().forbid_parking();
|
||||
|
@ -5422,7 +5662,6 @@ async fn test_random_collaboration(
|
|||
|
||||
let mut clients = Vec::new();
|
||||
let mut user_ids = Vec::new();
|
||||
let mut peer_ids = Vec::new();
|
||||
let mut op_start_signals = Vec::new();
|
||||
let mut next_entity_id = 100000;
|
||||
|
||||
|
@ -5449,7 +5688,6 @@ async fn test_random_collaboration(
|
|||
let op_start_signal = futures::channel::mpsc::unbounded();
|
||||
let guest = server.create_client(&mut guest_cx, &guest_username).await;
|
||||
user_ids.push(guest.current_user_id(&guest_cx));
|
||||
peer_ids.push(guest.peer_id().unwrap());
|
||||
op_start_signals.push(op_start_signal.0);
|
||||
clients.push(guest_cx.foreground().spawn(guest.simulate(
|
||||
guest_username.clone(),
|
||||
|
@ -5461,16 +5699,26 @@ async fn test_random_collaboration(
|
|||
log::info!("Added connection for {}", guest_username);
|
||||
operations += 1;
|
||||
}
|
||||
20..=29 if clients.len() > 1 => {
|
||||
20..=24 if clients.len() > 1 => {
|
||||
let guest_ix = rng.lock().gen_range(1..clients.len());
|
||||
log::info!("Removing guest {}", user_ids[guest_ix]);
|
||||
log::info!(
|
||||
"Simulating full disconnection of guest {}",
|
||||
user_ids[guest_ix]
|
||||
);
|
||||
let removed_guest_id = user_ids.remove(guest_ix);
|
||||
let removed_peer_id = peer_ids.remove(guest_ix);
|
||||
let user_connection_ids = server
|
||||
.connection_pool
|
||||
.lock()
|
||||
.await
|
||||
.user_connection_ids(removed_guest_id)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(user_connection_ids.len(), 1);
|
||||
let removed_peer_id = PeerId(user_connection_ids[0].0);
|
||||
let guest = clients.remove(guest_ix);
|
||||
op_start_signals.remove(guest_ix);
|
||||
server.forbid_connections();
|
||||
server.disconnect_client(removed_peer_id);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
deterministic.start_waiting();
|
||||
log::info!("Waiting for guest {} to exit...", removed_guest_id);
|
||||
let (guest, mut guest_cx) = guest.await;
|
||||
|
@ -5482,18 +5730,15 @@ async fn test_random_collaboration(
|
|||
}
|
||||
for user_id in &user_ids {
|
||||
let contacts = server.app_state.db.get_contacts(*user_id).await.unwrap();
|
||||
let contacts = server
|
||||
.store
|
||||
.lock()
|
||||
.await
|
||||
.build_initial_contacts_update(contacts)
|
||||
.contacts;
|
||||
let pool = server.connection_pool.lock().await;
|
||||
for contact in contacts {
|
||||
if contact.online {
|
||||
assert_ne!(
|
||||
contact.user_id, removed_guest_id.0 as u64,
|
||||
"removed guest is still a contact of another peer"
|
||||
);
|
||||
if let db::Contact::Accepted { user_id, .. } = contact {
|
||||
if pool.is_user_online(user_id) {
|
||||
assert_ne!(
|
||||
user_id, removed_guest_id,
|
||||
"removed guest is still a contact of another peer"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5507,6 +5752,22 @@ async fn test_random_collaboration(
|
|||
|
||||
operations += 1;
|
||||
}
|
||||
25..=29 if clients.len() > 1 => {
|
||||
let guest_ix = rng.lock().gen_range(1..clients.len());
|
||||
let user_id = user_ids[guest_ix];
|
||||
log::info!("Simulating temporary disconnection of guest {}", user_id);
|
||||
let user_connection_ids = server
|
||||
.connection_pool
|
||||
.lock()
|
||||
.await
|
||||
.user_connection_ids(user_id)
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(user_connection_ids.len(), 1);
|
||||
let peer_id = PeerId(user_connection_ids[0].0);
|
||||
server.disconnect_client(peer_id);
|
||||
deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
|
||||
operations += 1;
|
||||
}
|
||||
_ if !op_start_signals.is_empty() => {
|
||||
while operations < max_operations && rng.lock().gen_bool(0.7) {
|
||||
op_start_signals
|
||||
|
@ -5685,7 +5946,13 @@ impl TestServer {
|
|||
async fn start(background: Arc<executor::Background>) -> Self {
|
||||
static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
|
||||
let test_db = TestDb::new(background.clone());
|
||||
let use_postgres = env::var("USE_POSTGRES").ok();
|
||||
let use_postgres = use_postgres.as_deref();
|
||||
let test_db = if use_postgres == Some("true") || use_postgres == Some("1") {
|
||||
TestDb::postgres(background.clone())
|
||||
} else {
|
||||
TestDb::sqlite(background.clone())
|
||||
};
|
||||
let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
|
||||
let live_kit_server = live_kit_client::TestServer::create(
|
||||
format!("http://livekit.{}.test", live_kit_server_id),
|
||||
|
@ -5789,7 +6056,7 @@ impl TestServer {
|
|||
client_name,
|
||||
user,
|
||||
Some(connection_id_tx),
|
||||
cx.background(),
|
||||
Executor::Deterministic(cx.background()),
|
||||
))
|
||||
.detach();
|
||||
let connection_id = connection_id_rx.await.unwrap();
|
||||
|
@ -5803,17 +6070,15 @@ impl TestServer {
|
|||
|
||||
let fs = FakeFs::new(cx.background());
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
|
||||
let project_store = cx.add_model(|_| ProjectStore::new());
|
||||
let app_state = Arc::new(workspace::AppState {
|
||||
client: client.clone(),
|
||||
user_store: user_store.clone(),
|
||||
project_store: project_store.clone(),
|
||||
languages: Arc::new(LanguageRegistry::new(Task::ready(()))),
|
||||
themes: ThemeRegistry::new((), cx.font_cache()),
|
||||
fs: fs.clone(),
|
||||
build_window_options: Default::default,
|
||||
initialize_workspace: |_, _, _| unimplemented!(),
|
||||
default_item_factory: |_, _| unimplemented!(),
|
||||
dock_default_item_factory: |_, _| unimplemented!(),
|
||||
});
|
||||
|
||||
Project::init(&client);
|
||||
|
@ -5834,7 +6099,6 @@ impl TestServer {
|
|||
remote_projects: Default::default(),
|
||||
next_root_dir_id: 0,
|
||||
user_store,
|
||||
project_store,
|
||||
fs,
|
||||
language_registry: Arc::new(LanguageRegistry::test()),
|
||||
buffers: Default::default(),
|
||||
|
@ -5929,6 +6193,7 @@ impl Deref for TestServer {
|
|||
impl Drop for TestServer {
|
||||
fn drop(&mut self) {
|
||||
self.peer.reset();
|
||||
self.server.teardown();
|
||||
self.test_live_kit_server.teardown().unwrap();
|
||||
}
|
||||
}
|
||||
|
@ -5940,7 +6205,6 @@ struct TestClient {
|
|||
remote_projects: Vec<ModelHandle<Project>>,
|
||||
next_root_dir_id: usize,
|
||||
pub user_store: ModelHandle<UserStore>,
|
||||
pub project_store: ModelHandle<ProjectStore>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
fs: Arc<FakeFs>,
|
||||
buffers: HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>,
|
||||
|
@ -6010,7 +6274,6 @@ impl TestClient {
|
|||
Project::local(
|
||||
self.client.clone(),
|
||||
self.user_store.clone(),
|
||||
self.project_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
|
@ -6038,7 +6301,6 @@ impl TestClient {
|
|||
host_project_id,
|
||||
self.client.clone(),
|
||||
self.user_store.clone(),
|
||||
self.project_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
FakeFs::new(cx.background()),
|
||||
cx,
|
||||
|
@ -6054,7 +6316,13 @@ impl TestClient {
|
|||
) -> ViewHandle<Workspace> {
|
||||
let (_, root_view) = cx.add_window(|_| EmptyView);
|
||||
cx.add_view(&root_view, |cx| {
|
||||
Workspace::new(project.clone(), |_, _| unimplemented!(), cx)
|
||||
Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project.clone(),
|
||||
|_, _| unimplemented!(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -6168,7 +6436,6 @@ impl TestClient {
|
|||
remote_project_id,
|
||||
client.client.clone(),
|
||||
client.user_store.clone(),
|
||||
client.project_store.clone(),
|
||||
client.language_registry.clone(),
|
||||
FakeFs::new(cx.background()),
|
||||
cx.to_async(),
|
||||
|
@ -6187,11 +6454,14 @@ impl TestClient {
|
|||
.clone()
|
||||
}
|
||||
};
|
||||
if let Err(error) = active_call
|
||||
.update(cx, |call, cx| call.share_project(project.clone(), cx))
|
||||
.await
|
||||
{
|
||||
log::error!("{}: error sharing project, {:?}", username, error);
|
||||
|
||||
if active_call.read_with(cx, |call, _| call.room().is_some()) {
|
||||
if let Err(error) = active_call
|
||||
.update(cx, |call, cx| call.share_project(project.clone(), cx))
|
||||
.await
|
||||
{
|
||||
log::error!("{}: error sharing project, {:?}", username, error);
|
||||
}
|
||||
}
|
||||
|
||||
let buffers = client.buffers.entry(project.clone()).or_default();
|
||||
|
@ -6418,7 +6688,7 @@ impl TestClient {
|
|||
buffers.extend(search.await?.into_keys());
|
||||
}
|
||||
}
|
||||
60..=69 => {
|
||||
60..=79 => {
|
||||
let worktree = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project
|
||||
|
@ -6619,18 +6889,6 @@ impl Drop for TestClient {
|
|||
}
|
||||
}
|
||||
|
||||
impl Executor for Arc<gpui::executor::Background> {
|
||||
type Sleep = gpui::executor::Timer;
|
||||
|
||||
fn spawn_detached<F: 'static + Send + Future<Output = ()>>(&self, future: F) {
|
||||
self.spawn(future).detach();
|
||||
}
|
||||
|
||||
fn sleep(&self, duration: Duration) -> Self::Sleep {
|
||||
self.as_ref().timer(duration)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
struct RoomParticipants {
|
||||
remote: Vec<String>,
|
||||
|
|
|
@ -1,9 +1,22 @@
|
|||
pub mod api;
|
||||
pub mod auth;
|
||||
pub mod db;
|
||||
pub mod env;
|
||||
mod executor;
|
||||
#[cfg(test)]
|
||||
mod integration_tests;
|
||||
pub mod rpc;
|
||||
|
||||
use axum::{http::StatusCode, response::IntoResponse};
|
||||
use db::Database;
|
||||
use serde::Deserialize;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
pub type Result<T, E = Error> = std::result::Result<T, E>;
|
||||
|
||||
pub enum Error {
|
||||
Http(StatusCode, String),
|
||||
Database(sea_orm::error::DbErr),
|
||||
Internal(anyhow::Error),
|
||||
}
|
||||
|
||||
|
@ -13,9 +26,9 @@ impl From<anyhow::Error> for Error {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<sqlx::Error> for Error {
|
||||
fn from(error: sqlx::Error) -> Self {
|
||||
Self::Internal(error.into())
|
||||
impl From<sea_orm::error::DbErr> for Error {
|
||||
fn from(error: sea_orm::error::DbErr) -> Self {
|
||||
Self::Database(error)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -41,6 +54,9 @@ impl IntoResponse for Error {
|
|||
fn into_response(self) -> axum::response::Response {
|
||||
match self {
|
||||
Error::Http(code, message) => (code, message).into_response(),
|
||||
Error::Database(error) => {
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response()
|
||||
}
|
||||
Error::Internal(error) => {
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response()
|
||||
}
|
||||
|
@ -52,6 +68,7 @@ impl std::fmt::Debug for Error {
|
|||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Error::Http(code, message) => (code, message).fmt(f),
|
||||
Error::Database(error) => error.fmt(f),
|
||||
Error::Internal(error) => error.fmt(f),
|
||||
}
|
||||
}
|
||||
|
@ -61,9 +78,64 @@ impl std::fmt::Display for Error {
|
|||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Error::Http(code, message) => write!(f, "{code}: {message}"),
|
||||
Error::Database(error) => error.fmt(f),
|
||||
Error::Internal(error) => error.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct Config {
|
||||
pub http_port: u16,
|
||||
pub database_url: String,
|
||||
pub api_token: String,
|
||||
pub invite_link_prefix: String,
|
||||
pub live_kit_server: Option<String>,
|
||||
pub live_kit_key: Option<String>,
|
||||
pub live_kit_secret: Option<String>,
|
||||
pub rust_log: Option<String>,
|
||||
pub log_json: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct MigrateConfig {
|
||||
pub database_url: String,
|
||||
pub migrations_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub struct AppState {
|
||||
pub db: Arc<Database>,
|
||||
pub live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
|
||||
pub config: Config,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
pub async fn new(config: Config) -> Result<Arc<Self>> {
|
||||
let mut db_options = db::ConnectOptions::new(config.database_url.clone());
|
||||
db_options.max_connections(5);
|
||||
let db = Database::new(db_options).await?;
|
||||
let live_kit_client = if let Some(((server, key), secret)) = config
|
||||
.live_kit_server
|
||||
.as_ref()
|
||||
.zip(config.live_kit_key.as_ref())
|
||||
.zip(config.live_kit_secret.as_ref())
|
||||
{
|
||||
Some(Arc::new(live_kit_server::api::LiveKitClient::new(
|
||||
server.clone(),
|
||||
key.clone(),
|
||||
secret.clone(),
|
||||
)) as Arc<dyn live_kit_server::api::Client>)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let this = Self {
|
||||
db: Arc::new(db),
|
||||
live_kit_client,
|
||||
config,
|
||||
};
|
||||
Ok(Arc::new(this))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,86 +1,18 @@
|
|||
mod api;
|
||||
mod auth;
|
||||
mod db;
|
||||
mod env;
|
||||
mod rpc;
|
||||
|
||||
#[cfg(test)]
|
||||
mod db_tests;
|
||||
#[cfg(test)]
|
||||
mod integration_tests;
|
||||
|
||||
use crate::rpc::ResultExt as _;
|
||||
use anyhow::anyhow;
|
||||
use axum::{routing::get, Router};
|
||||
use collab::{Error, Result};
|
||||
use db::DefaultDb as Db;
|
||||
use serde::Deserialize;
|
||||
use collab::{db, env, AppState, Config, MigrateConfig, Result};
|
||||
use db::Database;
|
||||
use std::{
|
||||
env::args,
|
||||
net::{SocketAddr, TcpListener},
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
path::Path,
|
||||
};
|
||||
use tokio::signal;
|
||||
use tracing_log::LogTracer;
|
||||
use tracing_subscriber::{filter::EnvFilter, fmt::format::JsonFields, Layer};
|
||||
use util::ResultExt;
|
||||
|
||||
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct Config {
|
||||
pub http_port: u16,
|
||||
pub database_url: String,
|
||||
pub api_token: String,
|
||||
pub invite_link_prefix: String,
|
||||
pub live_kit_server: Option<String>,
|
||||
pub live_kit_key: Option<String>,
|
||||
pub live_kit_secret: Option<String>,
|
||||
pub rust_log: Option<String>,
|
||||
pub log_json: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct MigrateConfig {
|
||||
pub database_url: String,
|
||||
pub migrations_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub struct AppState {
|
||||
db: Arc<Db>,
|
||||
live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
|
||||
config: Config,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
async fn new(config: Config) -> Result<Arc<Self>> {
|
||||
let db = Db::new(&config.database_url, 5).await?;
|
||||
let live_kit_client = if let Some(((server, key), secret)) = config
|
||||
.live_kit_server
|
||||
.as_ref()
|
||||
.zip(config.live_kit_key.as_ref())
|
||||
.zip(config.live_kit_secret.as_ref())
|
||||
{
|
||||
Some(Arc::new(live_kit_server::api::LiveKitClient::new(
|
||||
server.clone(),
|
||||
key.clone(),
|
||||
secret.clone(),
|
||||
)) as Arc<dyn live_kit_server::api::Client>)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let this = Self {
|
||||
db: Arc::new(db),
|
||||
live_kit_client,
|
||||
config,
|
||||
};
|
||||
Ok(Arc::new(this))
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
if let Err(error) = env::load_dotenv() {
|
||||
|
@ -96,7 +28,9 @@ async fn main() -> Result<()> {
|
|||
}
|
||||
Some("migrate") => {
|
||||
let config = envy::from_env::<MigrateConfig>().expect("error loading config");
|
||||
let db = Db::new(&config.database_url, 5).await?;
|
||||
let mut db_options = db::ConnectOptions::new(config.database_url.clone());
|
||||
db_options.max_connections(5);
|
||||
let db = Database::new(db_options).await?;
|
||||
|
||||
let migrations_path = config
|
||||
.migrations_path
|
||||
|
@ -118,18 +52,19 @@ async fn main() -> Result<()> {
|
|||
init_tracing(&config);
|
||||
|
||||
let state = AppState::new(config).await?;
|
||||
state.db.clear_stale_data().await?;
|
||||
|
||||
let listener = TcpListener::bind(&format!("0.0.0.0:{}", state.config.http_port))
|
||||
.expect("failed to bind TCP listener");
|
||||
|
||||
let rpc_server = rpc::Server::new(state.clone());
|
||||
let rpc_server = collab::rpc::Server::new(state.clone());
|
||||
|
||||
let app = api::routes(rpc_server.clone(), state.clone())
|
||||
.merge(rpc::routes(rpc_server.clone()))
|
||||
let app = collab::api::routes(rpc_server.clone(), state.clone())
|
||||
.merge(collab::rpc::routes(rpc_server.clone()))
|
||||
.merge(Router::new().route("/", get(handle_root)));
|
||||
|
||||
axum::Server::from_tcp(listener)?
|
||||
.serve(app.into_make_service_with_connect_info::<SocketAddr>())
|
||||
.with_graceful_shutdown(graceful_shutdown(rpc_server, state))
|
||||
.await?;
|
||||
}
|
||||
_ => {
|
||||
|
@ -174,52 +109,3 @@ pub fn init_tracing(config: &Config) -> Option<()> {
|
|||
|
||||
None
|
||||
}
|
||||
|
||||
async fn graceful_shutdown(rpc_server: Arc<rpc::Server>, state: Arc<AppState>) {
|
||||
let ctrl_c = async {
|
||||
signal::ctrl_c()
|
||||
.await
|
||||
.expect("failed to install Ctrl+C handler");
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
let terminate = async {
|
||||
signal::unix::signal(signal::unix::SignalKind::terminate())
|
||||
.expect("failed to install signal handler")
|
||||
.recv()
|
||||
.await;
|
||||
};
|
||||
|
||||
#[cfg(not(unix))]
|
||||
let terminate = std::future::pending::<()>();
|
||||
|
||||
tokio::select! {
|
||||
_ = ctrl_c => {},
|
||||
_ = terminate => {},
|
||||
}
|
||||
|
||||
if let Some(live_kit) = state.live_kit_client.as_ref() {
|
||||
let deletions = rpc_server
|
||||
.store()
|
||||
.await
|
||||
.rooms()
|
||||
.values()
|
||||
.map(|room| {
|
||||
let name = room.live_kit_room.clone();
|
||||
async {
|
||||
live_kit.delete_room(name).await.trace_err();
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
tracing::info!("deleting all live-kit rooms");
|
||||
if let Err(_) = tokio::time::timeout(
|
||||
Duration::from_secs(10),
|
||||
futures::future::join_all(deletions),
|
||||
)
|
||||
.await
|
||||
{
|
||||
tracing::error!("timed out waiting for live-kit room deletion");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
93
crates/collab/src/rpc/connection_pool.rs
Normal file
93
crates/collab/src/rpc/connection_pool.rs
Normal file
|
@ -0,0 +1,93 @@
|
|||
use crate::db::UserId;
|
||||
use anyhow::{anyhow, Result};
|
||||
use collections::{BTreeMap, HashSet};
|
||||
use rpc::ConnectionId;
|
||||
use serde::Serialize;
|
||||
use tracing::instrument;
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
pub struct ConnectionPool {
|
||||
connections: BTreeMap<ConnectionId, Connection>,
|
||||
connected_users: BTreeMap<UserId, ConnectedUser>,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize)]
|
||||
struct ConnectedUser {
|
||||
connection_ids: HashSet<ConnectionId>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Connection {
|
||||
pub user_id: UserId,
|
||||
pub admin: bool,
|
||||
}
|
||||
|
||||
impl ConnectionPool {
|
||||
#[instrument(skip(self))]
|
||||
pub fn add_connection(&mut self, connection_id: ConnectionId, user_id: UserId, admin: bool) {
|
||||
self.connections
|
||||
.insert(connection_id, Connection { user_id, admin });
|
||||
let connected_user = self.connected_users.entry(user_id).or_default();
|
||||
connected_user.connection_ids.insert(connection_id);
|
||||
}
|
||||
|
||||
#[instrument(skip(self))]
|
||||
pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
|
||||
let connection = self
|
||||
.connections
|
||||
.get_mut(&connection_id)
|
||||
.ok_or_else(|| anyhow!("no such connection"))?;
|
||||
|
||||
let user_id = connection.user_id;
|
||||
let connected_user = self.connected_users.get_mut(&user_id).unwrap();
|
||||
connected_user.connection_ids.remove(&connection_id);
|
||||
if connected_user.connection_ids.is_empty() {
|
||||
self.connected_users.remove(&user_id);
|
||||
}
|
||||
self.connections.remove(&connection_id).unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn connections(&self) -> impl Iterator<Item = &Connection> {
|
||||
self.connections.values()
|
||||
}
|
||||
|
||||
pub fn user_connection_ids(&self, user_id: UserId) -> impl Iterator<Item = ConnectionId> + '_ {
|
||||
self.connected_users
|
||||
.get(&user_id)
|
||||
.into_iter()
|
||||
.map(|state| &state.connection_ids)
|
||||
.flatten()
|
||||
.copied()
|
||||
}
|
||||
|
||||
pub fn is_user_online(&self, user_id: UserId) -> bool {
|
||||
!self
|
||||
.connected_users
|
||||
.get(&user_id)
|
||||
.unwrap_or(&Default::default())
|
||||
.connection_ids
|
||||
.is_empty()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn check_invariants(&self) {
|
||||
for (connection_id, connection) in &self.connections {
|
||||
assert!(self
|
||||
.connected_users
|
||||
.get(&connection.user_id)
|
||||
.unwrap()
|
||||
.connection_ids
|
||||
.contains(connection_id));
|
||||
}
|
||||
|
||||
for (user_id, state) in &self.connected_users {
|
||||
for connection_id in &state.connection_ids {
|
||||
assert_eq!(
|
||||
self.connections.get(connection_id).unwrap().user_id,
|
||||
*user_id
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -43,7 +43,6 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
|
|||
project_id,
|
||||
app_state.client.clone(),
|
||||
app_state.user_store.clone(),
|
||||
app_state.project_store.clone(),
|
||||
app_state.languages.clone(),
|
||||
app_state.fs.clone(),
|
||||
cx.clone(),
|
||||
|
@ -51,7 +50,13 @@ pub fn init(app_state: Arc<AppState>, cx: &mut MutableAppContext) {
|
|||
.await?;
|
||||
|
||||
let (_, workspace) = cx.add_window((app_state.build_window_options)(), |cx| {
|
||||
let mut workspace = Workspace::new(project, app_state.default_item_factory, cx);
|
||||
let mut workspace = Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project,
|
||||
app_state.dock_default_item_factory,
|
||||
cx,
|
||||
);
|
||||
(app_state.initialize_workspace)(&mut workspace, &app_state, cx);
|
||||
workspace
|
||||
});
|
||||
|
|
|
@ -6,7 +6,7 @@ use gpui::{
|
|||
elements::*, impl_internal_actions, Entity, ModelHandle, MutableAppContext, RenderContext,
|
||||
View, ViewContext,
|
||||
};
|
||||
use workspace::Notification;
|
||||
use workspace::notifications::Notification;
|
||||
|
||||
impl_internal_actions!(contact_notifications, [Dismiss, RespondToContactRequest]);
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ impl IncomingCallNotification {
|
|||
let active_call = ActiveCall::global(cx);
|
||||
if action.accept {
|
||||
let join = active_call.update(cx, |active_call, cx| active_call.accept_incoming(cx));
|
||||
let caller_user_id = self.call.caller.id;
|
||||
let caller_user_id = self.call.calling_user.id;
|
||||
let initial_project_id = self.call.initial_project.as_ref().map(|project| project.id);
|
||||
cx.spawn_weak(|_, mut cx| async move {
|
||||
join.await?;
|
||||
|
@ -105,7 +105,7 @@ impl IncomingCallNotification {
|
|||
.as_ref()
|
||||
.unwrap_or(&default_project);
|
||||
Flex::row()
|
||||
.with_children(self.call.caller.avatar.clone().map(|avatar| {
|
||||
.with_children(self.call.calling_user.avatar.clone().map(|avatar| {
|
||||
Image::new(avatar)
|
||||
.with_style(theme.caller_avatar)
|
||||
.aligned()
|
||||
|
@ -115,7 +115,7 @@ impl IncomingCallNotification {
|
|||
Flex::column()
|
||||
.with_child(
|
||||
Label::new(
|
||||
self.call.caller.github_login.clone(),
|
||||
self.call.calling_user.github_login.clone(),
|
||||
theme.caller_username.text.clone(),
|
||||
)
|
||||
.contained()
|
||||
|
|
|
@ -350,8 +350,9 @@ mod tests {
|
|||
});
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), [], cx).await;
|
||||
let (_, workspace) =
|
||||
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
|
||||
let (_, workspace) = cx.add_window(|cx| {
|
||||
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
|
||||
});
|
||||
let editor = cx.add_view(&workspace, |cx| {
|
||||
let mut editor = Editor::single_line(None, cx);
|
||||
editor.set_text("abc", cx);
|
||||
|
|
|
@ -12,16 +12,20 @@ test-support = []
|
|||
|
||||
[dependencies]
|
||||
collections = { path = "../collections" }
|
||||
gpui = { path = "../gpui" }
|
||||
sqlez = { path = "../sqlez" }
|
||||
sqlez_macros = { path = "../sqlez_macros" }
|
||||
util = { path = "../util" }
|
||||
anyhow = "1.0.57"
|
||||
indoc = "1.0.4"
|
||||
async-trait = "0.1"
|
||||
lazy_static = "1.4.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
parking_lot = "0.11.1"
|
||||
rusqlite = { version = "0.28.0", features = ["bundled", "serde_json"] }
|
||||
rusqlite_migration = { git = "https://github.com/cljoly/rusqlite_migration", rev = "c433555d7c1b41b103426e35756eb3144d0ebbc6" }
|
||||
serde = { workspace = true }
|
||||
serde_rusqlite = "0.31.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
smol = "1.2"
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { path = "../gpui", features = ["test-support"] }
|
||||
env_logger = "0.9.1"
|
||||
tempdir = { version = "0.3.7" }
|
||||
|
|
5
crates/db/README.md
Normal file
5
crates/db/README.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
# Building Queries
|
||||
|
||||
First, craft your test data. The examples folder shows a template for building a test-db, and can be ran with `cargo run --example [your-example]`.
|
||||
|
||||
To actually use and test your queries, import the generated DB file into https://sqliteonline.com/
|
|
@ -1,119 +1,365 @@
|
|||
mod kvp;
|
||||
mod migrations;
|
||||
pub mod kvp;
|
||||
pub mod query;
|
||||
|
||||
use std::fs;
|
||||
// Re-export
|
||||
pub use anyhow;
|
||||
use anyhow::Context;
|
||||
pub use indoc::indoc;
|
||||
pub use lazy_static;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
pub use smol;
|
||||
pub use sqlez;
|
||||
pub use sqlez_macros;
|
||||
pub use util::channel::{RELEASE_CHANNEL, RELEASE_CHANNEL_NAME};
|
||||
pub use util::paths::DB_DIR;
|
||||
|
||||
use sqlez::domain::Migrator;
|
||||
use sqlez::thread_safe_connection::ThreadSafeConnection;
|
||||
use sqlez_macros::sql;
|
||||
use std::fs::create_dir_all;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use util::{async_iife, ResultExt};
|
||||
use util::channel::ReleaseChannel;
|
||||
|
||||
use anyhow::Result;
|
||||
use log::error;
|
||||
use parking_lot::Mutex;
|
||||
use rusqlite::Connection;
|
||||
const CONNECTION_INITIALIZE_QUERY: &'static str = sql!(
|
||||
PRAGMA foreign_keys=TRUE;
|
||||
);
|
||||
|
||||
use migrations::MIGRATIONS;
|
||||
const DB_INITIALIZE_QUERY: &'static str = sql!(
|
||||
PRAGMA journal_mode=WAL;
|
||||
PRAGMA busy_timeout=1;
|
||||
PRAGMA case_sensitive_like=TRUE;
|
||||
PRAGMA synchronous=NORMAL;
|
||||
);
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Db {
|
||||
Real(Arc<RealDb>),
|
||||
Null,
|
||||
const FALLBACK_DB_NAME: &'static str = "FALLBACK_MEMORY_DB";
|
||||
|
||||
const DB_FILE_NAME: &'static str = "db.sqlite";
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref DB_FILE_OPERATIONS: Mutex<()> = Mutex::new(());
|
||||
pub static ref BACKUP_DB_PATH: RwLock<Option<PathBuf>> = RwLock::new(None);
|
||||
pub static ref ALL_FILE_DB_FAILED: AtomicBool = AtomicBool::new(false);
|
||||
}
|
||||
|
||||
pub struct RealDb {
|
||||
connection: Mutex<Connection>,
|
||||
path: Option<PathBuf>,
|
||||
}
|
||||
/// Open or create a database at the given directory path.
|
||||
/// This will retry a couple times if there are failures. If opening fails once, the db directory
|
||||
/// is moved to a backup folder and a new one is created. If that fails, a shared in memory db is created.
|
||||
/// In either case, static variables are set so that the user can be notified.
|
||||
pub async fn open_db<M: Migrator + 'static>(db_dir: &Path, release_channel: &ReleaseChannel) -> ThreadSafeConnection<M> {
|
||||
let release_channel_name = release_channel.dev_name();
|
||||
let main_db_dir = db_dir.join(Path::new(&format!("0-{}", release_channel_name)));
|
||||
|
||||
impl Db {
|
||||
/// Open or create a database at the given directory path.
|
||||
pub fn open(db_dir: &Path, channel: &'static str) -> Self {
|
||||
// Use 0 for now. Will implement incrementing and clearing of old db files soon TM
|
||||
let current_db_dir = db_dir.join(Path::new(&format!("0-{}", channel)));
|
||||
fs::create_dir_all(¤t_db_dir)
|
||||
.expect("Should be able to create the database directory");
|
||||
let db_path = current_db_dir.join(Path::new("db.sqlite"));
|
||||
|
||||
Connection::open(db_path)
|
||||
.map_err(Into::into)
|
||||
.and_then(|connection| Self::initialize(connection))
|
||||
.map(|connection| {
|
||||
Db::Real(Arc::new(RealDb {
|
||||
connection,
|
||||
path: Some(db_dir.to_path_buf()),
|
||||
}))
|
||||
})
|
||||
.unwrap_or_else(|e| {
|
||||
error!(
|
||||
"Connecting to file backed db failed. Reverting to null db. {}",
|
||||
e
|
||||
);
|
||||
Self::Null
|
||||
})
|
||||
}
|
||||
|
||||
/// Open a in memory database for testing and as a fallback.
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn open_in_memory() -> Self {
|
||||
Connection::open_in_memory()
|
||||
.map_err(Into::into)
|
||||
.and_then(|connection| Self::initialize(connection))
|
||||
.map(|connection| {
|
||||
Db::Real(Arc::new(RealDb {
|
||||
connection,
|
||||
path: None,
|
||||
}))
|
||||
})
|
||||
.unwrap_or_else(|e| {
|
||||
error!(
|
||||
"Connecting to in memory db failed. Reverting to null db. {}",
|
||||
e
|
||||
);
|
||||
Self::Null
|
||||
})
|
||||
}
|
||||
|
||||
fn initialize(mut conn: Connection) -> Result<Mutex<Connection>> {
|
||||
MIGRATIONS.to_latest(&mut conn)?;
|
||||
|
||||
conn.pragma_update(None, "journal_mode", "WAL")?;
|
||||
conn.pragma_update(None, "synchronous", "NORMAL")?;
|
||||
conn.pragma_update(None, "foreign_keys", true)?;
|
||||
conn.pragma_update(None, "case_sensitive_like", true)?;
|
||||
|
||||
Ok(Mutex::new(conn))
|
||||
}
|
||||
|
||||
pub fn persisting(&self) -> bool {
|
||||
self.real().and_then(|db| db.path.as_ref()).is_some()
|
||||
}
|
||||
|
||||
pub fn real(&self) -> Option<&RealDb> {
|
||||
match self {
|
||||
Db::Real(db) => Some(&db),
|
||||
_ => None,
|
||||
let connection = async_iife!({
|
||||
// Note: This still has a race condition where 1 set of migrations succeeds
|
||||
// (e.g. (Workspace, Editor)) and another fails (e.g. (Workspace, Terminal))
|
||||
// This will cause the first connection to have the database taken out
|
||||
// from under it. This *should* be fine though. The second dabatase failure will
|
||||
// cause errors in the log and so should be observed by developers while writing
|
||||
// soon-to-be good migrations. If user databases are corrupted, we toss them out
|
||||
// and try again from a blank. As long as running all migrations from start to end
|
||||
// on a blank database is ok, this race condition will never be triggered.
|
||||
//
|
||||
// Basically: Don't ever push invalid migrations to stable or everyone will have
|
||||
// a bad time.
|
||||
|
||||
// If no db folder, create one at 0-{channel}
|
||||
create_dir_all(&main_db_dir).context("Could not create db directory")?;
|
||||
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
|
||||
|
||||
// Optimistically open databases in parallel
|
||||
if !DB_FILE_OPERATIONS.is_locked() {
|
||||
// Try building a connection
|
||||
if let Some(connection) = open_main_db(&db_path).await {
|
||||
return Ok(connection)
|
||||
};
|
||||
}
|
||||
|
||||
// Take a lock in the failure case so that we move the db once per process instead
|
||||
// of potentially multiple times from different threads. This shouldn't happen in the
|
||||
// normal path
|
||||
let _lock = DB_FILE_OPERATIONS.lock();
|
||||
if let Some(connection) = open_main_db(&db_path).await {
|
||||
return Ok(connection)
|
||||
};
|
||||
|
||||
let backup_timestamp = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("System clock is set before the unix timestamp, Zed does not support this region of spacetime")
|
||||
.as_millis();
|
||||
|
||||
// If failed, move 0-{channel} to {current unix timestamp}-{channel}
|
||||
let backup_db_dir = db_dir.join(Path::new(&format!(
|
||||
"{}-{}",
|
||||
backup_timestamp,
|
||||
release_channel_name,
|
||||
)));
|
||||
|
||||
std::fs::rename(&main_db_dir, &backup_db_dir)
|
||||
.context("Failed clean up corrupted database, panicking.")?;
|
||||
|
||||
// Set a static ref with the failed timestamp and error so we can notify the user
|
||||
{
|
||||
let mut guard = BACKUP_DB_PATH.write();
|
||||
*guard = Some(backup_db_dir);
|
||||
}
|
||||
|
||||
// Create a new 0-{channel}
|
||||
create_dir_all(&main_db_dir).context("Should be able to create the database directory")?;
|
||||
let db_path = main_db_dir.join(Path::new(DB_FILE_NAME));
|
||||
|
||||
// Try again
|
||||
open_main_db(&db_path).await.context("Could not newly created db")
|
||||
}).await.log_err();
|
||||
|
||||
if let Some(connection) = connection {
|
||||
return connection;
|
||||
}
|
||||
|
||||
// Set another static ref so that we can escalate the notification
|
||||
ALL_FILE_DB_FAILED.store(true, Ordering::Release);
|
||||
|
||||
// If still failed, create an in memory db with a known name
|
||||
open_fallback_db().await
|
||||
}
|
||||
|
||||
impl Drop for Db {
|
||||
fn drop(&mut self) {
|
||||
match self {
|
||||
Db::Real(real_db) => {
|
||||
let lock = real_db.connection.lock();
|
||||
async fn open_main_db<M: Migrator>(db_path: &PathBuf) -> Option<ThreadSafeConnection<M>> {
|
||||
log::info!("Opening main db");
|
||||
ThreadSafeConnection::<M>::builder(db_path.to_string_lossy().as_ref(), true)
|
||||
.with_db_initialization_query(DB_INITIALIZE_QUERY)
|
||||
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
|
||||
.build()
|
||||
.await
|
||||
.log_err()
|
||||
}
|
||||
|
||||
let _ = lock.pragma_update(None, "analysis_limit", "500");
|
||||
let _ = lock.pragma_update(None, "optimize", "");
|
||||
async fn open_fallback_db<M: Migrator>() -> ThreadSafeConnection<M> {
|
||||
log::info!("Opening fallback db");
|
||||
ThreadSafeConnection::<M>::builder(FALLBACK_DB_NAME, false)
|
||||
.with_db_initialization_query(DB_INITIALIZE_QUERY)
|
||||
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
|
||||
.build()
|
||||
.await
|
||||
.expect(
|
||||
"Fallback in memory database failed. Likely initialization queries or migrations have fundamental errors",
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub async fn open_test_db<M: Migrator>(db_name: &str) -> ThreadSafeConnection<M> {
|
||||
use sqlez::thread_safe_connection::locking_queue;
|
||||
|
||||
ThreadSafeConnection::<M>::builder(db_name, false)
|
||||
.with_db_initialization_query(DB_INITIALIZE_QUERY)
|
||||
.with_connection_initialize_query(CONNECTION_INITIALIZE_QUERY)
|
||||
// Serialize queued writes via a mutex and run them synchronously
|
||||
.with_write_queue_constructor(locking_queue())
|
||||
.build()
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
/// Implements a basic DB wrapper for a given domain
|
||||
#[macro_export]
|
||||
macro_rules! define_connection {
|
||||
(pub static ref $id:ident: $t:ident<()> = $migrations:expr;) => {
|
||||
pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>);
|
||||
|
||||
impl ::std::ops::Deref for $t {
|
||||
type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection<$t>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
Db::Null => {}
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::sqlez::domain::Domain for $t {
|
||||
fn name() -> &'static str {
|
||||
stringify!($t)
|
||||
}
|
||||
|
||||
fn migrations() -> &'static [&'static str] {
|
||||
$migrations
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
$crate::lazy_static::lazy_static! {
|
||||
pub static ref $id: $t = $t($crate::smol::block_on($crate::open_test_db(stringify!($id))));
|
||||
}
|
||||
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
$crate::lazy_static::lazy_static! {
|
||||
pub static ref $id: $t = $t($crate::smol::block_on($crate::open_db(&$crate::DB_DIR, &$crate::RELEASE_CHANNEL)));
|
||||
}
|
||||
};
|
||||
(pub static ref $id:ident: $t:ident<$($d:ty),+> = $migrations:expr;) => {
|
||||
pub struct $t($crate::sqlez::thread_safe_connection::ThreadSafeConnection<( $($d),+, $t )>);
|
||||
|
||||
impl ::std::ops::Deref for $t {
|
||||
type Target = $crate::sqlez::thread_safe_connection::ThreadSafeConnection<($($d),+, $t)>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::sqlez::domain::Domain for $t {
|
||||
fn name() -> &'static str {
|
||||
stringify!($t)
|
||||
}
|
||||
|
||||
fn migrations() -> &'static [&'static str] {
|
||||
$migrations
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
$crate::lazy_static::lazy_static! {
|
||||
pub static ref $id: $t = $t($crate::smol::block_on($crate::open_test_db(stringify!($id))));
|
||||
}
|
||||
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
$crate::lazy_static::lazy_static! {
|
||||
pub static ref $id: $t = $t($crate::smol::block_on($crate::open_db(&$crate::DB_DIR, &$crate::RELEASE_CHANNEL)));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::migrations::MIGRATIONS;
|
||||
use std::{fs, thread};
|
||||
|
||||
#[test]
|
||||
fn test_migrations() {
|
||||
assert!(MIGRATIONS.validate().is_ok());
|
||||
use sqlez::{domain::Domain, connection::Connection};
|
||||
use sqlez_macros::sql;
|
||||
use tempdir::TempDir;
|
||||
|
||||
use crate::{open_db, DB_FILE_NAME};
|
||||
|
||||
// Test bad migration panics
|
||||
#[gpui::test]
|
||||
#[should_panic]
|
||||
async fn test_bad_migration_panics() {
|
||||
enum BadDB {}
|
||||
|
||||
impl Domain for BadDB {
|
||||
fn name() -> &'static str {
|
||||
"db_tests"
|
||||
}
|
||||
|
||||
fn migrations() -> &'static [&'static str] {
|
||||
&[sql!(CREATE TABLE test(value);),
|
||||
// failure because test already exists
|
||||
sql!(CREATE TABLE test(value);)]
|
||||
}
|
||||
}
|
||||
|
||||
let tempdir = TempDir::new("DbTests").unwrap();
|
||||
let _bad_db = open_db::<BadDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
|
||||
}
|
||||
|
||||
/// Test that DB exists but corrupted (causing recreate)
|
||||
#[gpui::test]
|
||||
async fn test_db_corruption() {
|
||||
enum CorruptedDB {}
|
||||
|
||||
impl Domain for CorruptedDB {
|
||||
fn name() -> &'static str {
|
||||
"db_tests"
|
||||
}
|
||||
|
||||
fn migrations() -> &'static [&'static str] {
|
||||
&[sql!(CREATE TABLE test(value);)]
|
||||
}
|
||||
}
|
||||
|
||||
enum GoodDB {}
|
||||
|
||||
impl Domain for GoodDB {
|
||||
fn name() -> &'static str {
|
||||
"db_tests" //Notice same name
|
||||
}
|
||||
|
||||
fn migrations() -> &'static [&'static str] {
|
||||
&[sql!(CREATE TABLE test2(value);)] //But different migration
|
||||
}
|
||||
}
|
||||
|
||||
let tempdir = TempDir::new("DbTests").unwrap();
|
||||
{
|
||||
let corrupt_db = open_db::<CorruptedDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
|
||||
assert!(corrupt_db.persistent());
|
||||
}
|
||||
|
||||
|
||||
let good_db = open_db::<GoodDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
|
||||
assert!(good_db.select_row::<usize>("SELECT * FROM test2").unwrap()().unwrap().is_none());
|
||||
|
||||
let mut corrupted_backup_dir = fs::read_dir(
|
||||
tempdir.path()
|
||||
).unwrap().find(|entry| {
|
||||
!entry.as_ref().unwrap().file_name().to_str().unwrap().starts_with("0")
|
||||
}
|
||||
).unwrap().unwrap().path();
|
||||
corrupted_backup_dir.push(DB_FILE_NAME);
|
||||
|
||||
dbg!(&corrupted_backup_dir);
|
||||
|
||||
let backup = Connection::open_file(&corrupted_backup_dir.to_string_lossy());
|
||||
assert!(backup.select_row::<usize>("SELECT * FROM test").unwrap()().unwrap().is_none());
|
||||
}
|
||||
|
||||
/// Test that DB exists but corrupted (causing recreate)
|
||||
#[gpui::test]
|
||||
async fn test_simultaneous_db_corruption() {
|
||||
enum CorruptedDB {}
|
||||
|
||||
impl Domain for CorruptedDB {
|
||||
fn name() -> &'static str {
|
||||
"db_tests"
|
||||
}
|
||||
|
||||
fn migrations() -> &'static [&'static str] {
|
||||
&[sql!(CREATE TABLE test(value);)]
|
||||
}
|
||||
}
|
||||
|
||||
enum GoodDB {}
|
||||
|
||||
impl Domain for GoodDB {
|
||||
fn name() -> &'static str {
|
||||
"db_tests" //Notice same name
|
||||
}
|
||||
|
||||
fn migrations() -> &'static [&'static str] {
|
||||
&[sql!(CREATE TABLE test2(value);)] //But different migration
|
||||
}
|
||||
}
|
||||
|
||||
let tempdir = TempDir::new("DbTests").unwrap();
|
||||
{
|
||||
// Setup the bad database
|
||||
let corrupt_db = open_db::<CorruptedDB>(tempdir.path(), &util::channel::ReleaseChannel::Dev).await;
|
||||
assert!(corrupt_db.persistent());
|
||||
}
|
||||
|
||||
// Try to connect to it a bunch of times at once
|
||||
let mut guards = vec![];
|
||||
for _ in 0..10 {
|
||||
let tmp_path = tempdir.path().to_path_buf();
|
||||
let guard = thread::spawn(move || {
|
||||
let good_db = smol::block_on(open_db::<GoodDB>(tmp_path.as_path(), &util::channel::ReleaseChannel::Dev));
|
||||
assert!(good_db.select_row::<usize>("SELECT * FROM test2").unwrap()().unwrap().is_none());
|
||||
});
|
||||
|
||||
guards.push(guard);
|
||||
|
||||
}
|
||||
|
||||
for guard in guards.into_iter() {
|
||||
assert!(guard.join().is_ok());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,311 +0,0 @@
|
|||
use std::{ffi::OsStr, fmt::Display, hash::Hash, os::unix::prelude::OsStrExt, path::PathBuf};
|
||||
|
||||
use anyhow::Result;
|
||||
use collections::HashSet;
|
||||
use rusqlite::{named_params, params};
|
||||
|
||||
use super::Db;
|
||||
|
||||
pub(crate) const ITEMS_M_1: &str = "
|
||||
CREATE TABLE items(
|
||||
id INTEGER PRIMARY KEY,
|
||||
kind TEXT
|
||||
) STRICT;
|
||||
CREATE TABLE item_path(
|
||||
item_id INTEGER PRIMARY KEY,
|
||||
path BLOB
|
||||
) STRICT;
|
||||
CREATE TABLE item_query(
|
||||
item_id INTEGER PRIMARY KEY,
|
||||
query TEXT
|
||||
) STRICT;
|
||||
";
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Debug)]
|
||||
pub enum SerializedItemKind {
|
||||
Editor,
|
||||
Terminal,
|
||||
ProjectSearch,
|
||||
Diagnostics,
|
||||
}
|
||||
|
||||
impl Display for SerializedItemKind {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(&format!("{:?}", self))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum SerializedItem {
|
||||
Editor(usize, PathBuf),
|
||||
Terminal(usize),
|
||||
ProjectSearch(usize, String),
|
||||
Diagnostics(usize),
|
||||
}
|
||||
|
||||
impl SerializedItem {
|
||||
fn kind(&self) -> SerializedItemKind {
|
||||
match self {
|
||||
SerializedItem::Editor(_, _) => SerializedItemKind::Editor,
|
||||
SerializedItem::Terminal(_) => SerializedItemKind::Terminal,
|
||||
SerializedItem::ProjectSearch(_, _) => SerializedItemKind::ProjectSearch,
|
||||
SerializedItem::Diagnostics(_) => SerializedItemKind::Diagnostics,
|
||||
}
|
||||
}
|
||||
|
||||
fn id(&self) -> usize {
|
||||
match self {
|
||||
SerializedItem::Editor(id, _)
|
||||
| SerializedItem::Terminal(id)
|
||||
| SerializedItem::ProjectSearch(id, _)
|
||||
| SerializedItem::Diagnostics(id) => *id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Db {
|
||||
fn write_item(&self, serialized_item: SerializedItem) -> Result<()> {
|
||||
self.real()
|
||||
.map(|db| {
|
||||
let mut lock = db.connection.lock();
|
||||
let tx = lock.transaction()?;
|
||||
|
||||
// Serialize the item
|
||||
let id = serialized_item.id();
|
||||
{
|
||||
let mut stmt = tx.prepare_cached(
|
||||
"INSERT OR REPLACE INTO items(id, kind) VALUES ((?), (?))",
|
||||
)?;
|
||||
|
||||
dbg!("inserting item");
|
||||
stmt.execute(params![id, serialized_item.kind().to_string()])?;
|
||||
}
|
||||
|
||||
// Serialize item data
|
||||
match &serialized_item {
|
||||
SerializedItem::Editor(_, path) => {
|
||||
dbg!("inserting path");
|
||||
let mut stmt = tx.prepare_cached(
|
||||
"INSERT OR REPLACE INTO item_path(item_id, path) VALUES ((?), (?))",
|
||||
)?;
|
||||
|
||||
let path_bytes = path.as_os_str().as_bytes();
|
||||
stmt.execute(params![id, path_bytes])?;
|
||||
}
|
||||
SerializedItem::ProjectSearch(_, query) => {
|
||||
dbg!("inserting query");
|
||||
let mut stmt = tx.prepare_cached(
|
||||
"INSERT OR REPLACE INTO item_query(item_id, query) VALUES ((?), (?))",
|
||||
)?;
|
||||
|
||||
stmt.execute(params![id, query])?;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
tx.commit()?;
|
||||
|
||||
let mut stmt = lock.prepare_cached("SELECT id, kind FROM items")?;
|
||||
let _ = stmt
|
||||
.query_map([], |row| {
|
||||
let zero: usize = row.get(0)?;
|
||||
let one: String = row.get(1)?;
|
||||
|
||||
dbg!(zero, one);
|
||||
Ok(())
|
||||
})?
|
||||
.collect::<Vec<Result<(), _>>>();
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.unwrap_or(Ok(()))
|
||||
}
|
||||
|
||||
fn delete_item(&self, item_id: usize) -> Result<()> {
|
||||
self.real()
|
||||
.map(|db| {
|
||||
let lock = db.connection.lock();
|
||||
|
||||
let mut stmt = lock.prepare_cached(
|
||||
r#"
|
||||
DELETE FROM items WHERE id = (:id);
|
||||
DELETE FROM item_path WHERE id = (:id);
|
||||
DELETE FROM item_query WHERE id = (:id);
|
||||
"#,
|
||||
)?;
|
||||
|
||||
stmt.execute(named_params! {":id": item_id})?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.unwrap_or(Ok(()))
|
||||
}
|
||||
|
||||
fn take_items(&self) -> Result<HashSet<SerializedItem>> {
|
||||
self.real()
|
||||
.map(|db| {
|
||||
let mut lock = db.connection.lock();
|
||||
|
||||
let tx = lock.transaction()?;
|
||||
|
||||
// When working with transactions in rusqlite, need to make this kind of scope
|
||||
// To make the borrow stuff work correctly. Don't know why, rust is wild.
|
||||
let result = {
|
||||
let mut editors_stmt = tx.prepare_cached(
|
||||
r#"
|
||||
SELECT items.id, item_path.path
|
||||
FROM items
|
||||
LEFT JOIN item_path
|
||||
ON items.id = item_path.item_id
|
||||
WHERE items.kind = ?;
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let editors_iter = editors_stmt.query_map(
|
||||
[SerializedItemKind::Editor.to_string()],
|
||||
|row| {
|
||||
let id: usize = row.get(0)?;
|
||||
|
||||
let buf: Vec<u8> = row.get(1)?;
|
||||
let path: PathBuf = OsStr::from_bytes(&buf).into();
|
||||
|
||||
Ok(SerializedItem::Editor(id, path))
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut terminals_stmt = tx.prepare_cached(
|
||||
r#"
|
||||
SELECT items.id
|
||||
FROM items
|
||||
WHERE items.kind = ?;
|
||||
"#,
|
||||
)?;
|
||||
let terminals_iter = terminals_stmt.query_map(
|
||||
[SerializedItemKind::Terminal.to_string()],
|
||||
|row| {
|
||||
let id: usize = row.get(0)?;
|
||||
|
||||
Ok(SerializedItem::Terminal(id))
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut search_stmt = tx.prepare_cached(
|
||||
r#"
|
||||
SELECT items.id, item_query.query
|
||||
FROM items
|
||||
LEFT JOIN item_query
|
||||
ON items.id = item_query.item_id
|
||||
WHERE items.kind = ?;
|
||||
"#,
|
||||
)?;
|
||||
let searches_iter = search_stmt.query_map(
|
||||
[SerializedItemKind::ProjectSearch.to_string()],
|
||||
|row| {
|
||||
let id: usize = row.get(0)?;
|
||||
let query = row.get(1)?;
|
||||
|
||||
Ok(SerializedItem::ProjectSearch(id, query))
|
||||
},
|
||||
)?;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
let tmp =
|
||||
searches_iter.collect::<Vec<Result<SerializedItem, rusqlite::Error>>>();
|
||||
#[cfg(debug_assertions)]
|
||||
debug_assert!(tmp.len() == 0 || tmp.len() == 1);
|
||||
#[cfg(debug_assertions)]
|
||||
let searches_iter = tmp.into_iter();
|
||||
|
||||
let mut diagnostic_stmt = tx.prepare_cached(
|
||||
r#"
|
||||
SELECT items.id
|
||||
FROM items
|
||||
WHERE items.kind = ?;
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let diagnostics_iter = diagnostic_stmt.query_map(
|
||||
[SerializedItemKind::Diagnostics.to_string()],
|
||||
|row| {
|
||||
let id: usize = row.get(0)?;
|
||||
|
||||
Ok(SerializedItem::Diagnostics(id))
|
||||
},
|
||||
)?;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
let tmp =
|
||||
diagnostics_iter.collect::<Vec<Result<SerializedItem, rusqlite::Error>>>();
|
||||
#[cfg(debug_assertions)]
|
||||
debug_assert!(tmp.len() == 0 || tmp.len() == 1);
|
||||
#[cfg(debug_assertions)]
|
||||
let diagnostics_iter = tmp.into_iter();
|
||||
|
||||
let res = editors_iter
|
||||
.chain(terminals_iter)
|
||||
.chain(diagnostics_iter)
|
||||
.chain(searches_iter)
|
||||
.collect::<Result<HashSet<SerializedItem>, rusqlite::Error>>()?;
|
||||
|
||||
let mut delete_stmt = tx.prepare_cached(
|
||||
r#"
|
||||
DELETE FROM items;
|
||||
DELETE FROM item_path;
|
||||
DELETE FROM item_query;
|
||||
"#,
|
||||
)?;
|
||||
|
||||
delete_stmt.execute([])?;
|
||||
|
||||
res
|
||||
};
|
||||
|
||||
tx.commit()?;
|
||||
|
||||
Ok(result)
|
||||
})
|
||||
.unwrap_or(Ok(HashSet::default()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use anyhow::Result;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_items_round_trip() -> Result<()> {
|
||||
let db = Db::open_in_memory();
|
||||
|
||||
let mut items = vec![
|
||||
SerializedItem::Editor(0, PathBuf::from("/tmp/test.txt")),
|
||||
SerializedItem::Terminal(1),
|
||||
SerializedItem::ProjectSearch(2, "Test query!".to_string()),
|
||||
SerializedItem::Diagnostics(3),
|
||||
]
|
||||
.into_iter()
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
for item in items.iter() {
|
||||
dbg!("Inserting... ");
|
||||
db.write_item(item.clone())?;
|
||||
}
|
||||
|
||||
assert_eq!(items, db.take_items()?);
|
||||
|
||||
// Check that it's empty, as expected
|
||||
assert_eq!(HashSet::default(), db.take_items()?);
|
||||
|
||||
for item in items.iter() {
|
||||
db.write_item(item.clone())?;
|
||||
}
|
||||
|
||||
items.remove(&SerializedItem::ProjectSearch(2, "Test query!".to_string()));
|
||||
db.delete_item(2)?;
|
||||
|
||||
assert_eq!(items, db.take_items()?);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,82 +1,62 @@
|
|||
use anyhow::Result;
|
||||
use rusqlite::OptionalExtension;
|
||||
use sqlez_macros::sql;
|
||||
|
||||
use super::Db;
|
||||
use crate::{define_connection, query};
|
||||
|
||||
pub(crate) const KVP_M_1_UP: &str = "
|
||||
CREATE TABLE kv_store(
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
) STRICT;
|
||||
";
|
||||
define_connection!(pub static ref KEY_VALUE_STORE: KeyValueStore<()> =
|
||||
&[sql!(
|
||||
CREATE TABLE IF NOT EXISTS kv_store(
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL
|
||||
) STRICT;
|
||||
)];
|
||||
);
|
||||
|
||||
impl Db {
|
||||
pub fn read_kvp(&self, key: &str) -> Result<Option<String>> {
|
||||
self.real()
|
||||
.map(|db| {
|
||||
let lock = db.connection.lock();
|
||||
let mut stmt = lock.prepare_cached("SELECT value FROM kv_store WHERE key = (?)")?;
|
||||
|
||||
Ok(stmt.query_row([key], |row| row.get(0)).optional()?)
|
||||
})
|
||||
.unwrap_or(Ok(None))
|
||||
impl KeyValueStore {
|
||||
query! {
|
||||
pub fn read_kvp(key: &str) -> Result<Option<String>> {
|
||||
SELECT value FROM kv_store WHERE key = (?)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_kvp(&self, key: &str, value: &str) -> Result<()> {
|
||||
self.real()
|
||||
.map(|db| {
|
||||
let lock = db.connection.lock();
|
||||
|
||||
let mut stmt = lock.prepare_cached(
|
||||
"INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?))",
|
||||
)?;
|
||||
|
||||
stmt.execute([key, value])?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.unwrap_or(Ok(()))
|
||||
query! {
|
||||
pub async fn write_kvp(key: String, value: String) -> Result<()> {
|
||||
INSERT OR REPLACE INTO kv_store(key, value) VALUES ((?), (?))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete_kvp(&self, key: &str) -> Result<()> {
|
||||
self.real()
|
||||
.map(|db| {
|
||||
let lock = db.connection.lock();
|
||||
|
||||
let mut stmt = lock.prepare_cached("DELETE FROM kv_store WHERE key = (?)")?;
|
||||
|
||||
stmt.execute([key])?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.unwrap_or(Ok(()))
|
||||
query! {
|
||||
pub async fn delete_kvp(key: String) -> Result<()> {
|
||||
DELETE FROM kv_store WHERE key = (?)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use crate::kvp::KeyValueStore;
|
||||
|
||||
use super::*;
|
||||
#[gpui::test]
|
||||
async fn test_kvp() {
|
||||
let db = KeyValueStore(crate::open_test_db("test_kvp").await);
|
||||
|
||||
#[test]
|
||||
fn test_kvp() -> Result<()> {
|
||||
let db = Db::open_in_memory();
|
||||
assert_eq!(db.read_kvp("key-1").unwrap(), None);
|
||||
|
||||
assert_eq!(db.read_kvp("key-1")?, None);
|
||||
db.write_kvp("key-1".to_string(), "one".to_string())
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.read_kvp("key-1").unwrap(), Some("one".to_string()));
|
||||
|
||||
db.write_kvp("key-1", "one")?;
|
||||
assert_eq!(db.read_kvp("key-1")?, Some("one".to_string()));
|
||||
db.write_kvp("key-1".to_string(), "one-2".to_string())
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.read_kvp("key-1").unwrap(), Some("one-2".to_string()));
|
||||
|
||||
db.write_kvp("key-1", "one-2")?;
|
||||
assert_eq!(db.read_kvp("key-1")?, Some("one-2".to_string()));
|
||||
db.write_kvp("key-2".to_string(), "two".to_string())
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(db.read_kvp("key-2").unwrap(), Some("two".to_string()));
|
||||
|
||||
db.write_kvp("key-2", "two")?;
|
||||
assert_eq!(db.read_kvp("key-2")?, Some("two".to_string()));
|
||||
|
||||
db.delete_kvp("key-1")?;
|
||||
assert_eq!(db.read_kvp("key-1")?, None);
|
||||
|
||||
Ok(())
|
||||
db.delete_kvp("key-1".to_string()).await.unwrap();
|
||||
assert_eq!(db.read_kvp("key-1").unwrap(), None);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
use rusqlite_migration::{Migrations, M};
|
||||
|
||||
// use crate::items::ITEMS_M_1;
|
||||
use crate::kvp::KVP_M_1_UP;
|
||||
|
||||
// This must be ordered by development time! Only ever add new migrations to the end!!
|
||||
// Bad things will probably happen if you don't monotonically edit this vec!!!!
|
||||
// And no re-ordering ever!!!!!!!!!! The results of these migrations are on the user's
|
||||
// file system and so everything we do here is locked in _f_o_r_e_v_e_r_.
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref MIGRATIONS: Migrations<'static> = Migrations::new(vec![
|
||||
M::up(KVP_M_1_UP),
|
||||
// M::up(ITEMS_M_1),
|
||||
]);
|
||||
}
|
314
crates/db/src/query.rs
Normal file
314
crates/db/src/query.rs
Normal file
|
@ -0,0 +1,314 @@
|
|||
#[macro_export]
|
||||
macro_rules! query {
|
||||
($vis:vis fn $id:ident() -> Result<()> { $($sql:tt)+ }) => {
|
||||
$vis fn $id(&self) -> $crate::anyhow::Result<()> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
self.exec(sql_stmt)?().context(::std::format!(
|
||||
"Error in {}, exec failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt,
|
||||
))
|
||||
}
|
||||
};
|
||||
($vis:vis async fn $id:ident() -> Result<()> { $($sql:tt)+ }) => {
|
||||
$vis async fn $id(&self) -> $crate::anyhow::Result<()> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
self.write(|connection| {
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
connection.exec(sql_stmt)?().context(::std::format!(
|
||||
"Error in {}, exec failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}).await
|
||||
}
|
||||
};
|
||||
($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<()> { $($sql:tt)+ }) => {
|
||||
$vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<()> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
self.exec_bound::<($($arg_type),+)>(sql_stmt)?(($($arg),+))
|
||||
.context(::std::format!(
|
||||
"Error in {}, exec_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}
|
||||
};
|
||||
($vis:vis async fn $id:ident($arg:ident: $arg_type:ty) -> Result<()> { $($sql:tt)+ }) => {
|
||||
$vis async fn $id(&self, $arg: $arg_type) -> $crate::anyhow::Result<()> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
self.write(move |connection| {
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
connection.exec_bound::<$arg_type>(sql_stmt)?($arg)
|
||||
.context(::std::format!(
|
||||
"Error in {}, exec_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}).await
|
||||
}
|
||||
};
|
||||
($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<()> { $($sql:tt)+ }) => {
|
||||
$vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<()> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
self.write(move |connection| {
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
connection.exec_bound::<($($arg_type),+)>(sql_stmt)?(($($arg),+))
|
||||
.context(::std::format!(
|
||||
"Error in {}, exec_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}).await
|
||||
}
|
||||
};
|
||||
($vis:vis fn $id:ident() -> Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
|
||||
$vis fn $id(&self) -> $crate::anyhow::Result<Vec<$return_type>> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
self.select::<$return_type>(sql_stmt)?(())
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}
|
||||
};
|
||||
($vis:vis async fn $id:ident() -> Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
|
||||
pub async fn $id(&self) -> $crate::anyhow::Result<Vec<$return_type>> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
self.write(|connection| {
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
connection.select::<$return_type>(sql_stmt)?(())
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}).await
|
||||
}
|
||||
};
|
||||
($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
|
||||
$vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Vec<$return_type>> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
self.select_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
|
||||
.context(::std::format!(
|
||||
"Error in {}, exec_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}
|
||||
};
|
||||
($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<Vec<$return_type:ty>> { $($sql:tt)+ }) => {
|
||||
$vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Vec<$return_type>> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
self.write(|connection| {
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
connection.select_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
|
||||
.context(::std::format!(
|
||||
"Error in {}, exec_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}).await
|
||||
}
|
||||
};
|
||||
($vis:vis fn $id:ident() -> Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
|
||||
$vis fn $id(&self) -> $crate::anyhow::Result<Option<$return_type>> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
self.select_row::<$return_type>(sql_stmt)?()
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}
|
||||
};
|
||||
($vis:vis async fn $id:ident() -> Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
|
||||
$vis async fn $id(&self) -> $crate::anyhow::Result<Option<$return_type>> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
self.write(|connection| {
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
connection.select_row::<$return_type>(sql_stmt)?()
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}).await
|
||||
}
|
||||
};
|
||||
($vis:vis fn $id:ident($arg:ident: $arg_type:ty) -> Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
|
||||
$vis fn $id(&self, $arg: $arg_type) -> $crate::anyhow::Result<Option<$return_type>> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
self.select_row_bound::<$arg_type, $return_type>(sql_stmt)?($arg)
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
|
||||
}
|
||||
};
|
||||
($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
|
||||
$vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Option<$return_type>> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
self.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
|
||||
}
|
||||
};
|
||||
($vis:vis async fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<Option<$return_type:ty>> { $($sql:tt)+ }) => {
|
||||
$vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<Option<$return_type>> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
|
||||
self.write(move |connection| {
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
connection.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}).await
|
||||
}
|
||||
};
|
||||
($vis:vis fn $id:ident() -> Result<$return_type:ty> { $($sql:tt)+ }) => {
|
||||
$vis fn $id(&self) -> $crate::anyhow::Result<$return_type> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
self.select_row::<$return_type>(indoc! { $sql })?()
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))?
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound expected single row result but found none for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}
|
||||
};
|
||||
($vis:vis async fn $id:ident() -> Result<$return_type:ty> { $($sql:tt)+ }) => {
|
||||
$vis async fn $id(&self) -> $crate::anyhow::Result<$return_type> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
self.write(|connection| {
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
connection.select_row::<$return_type>(sql_stmt)?()
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))?
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound expected single row result but found none for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}).await
|
||||
}
|
||||
};
|
||||
($vis:vis fn $id:ident($arg:ident: $arg_type:ty) -> Result<$return_type:ty> { $($sql:tt)+ }) => {
|
||||
pub fn $id(&self, $arg: $arg_type) -> $crate::anyhow::Result<$return_type> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
self.select_row_bound::<$arg_type, $return_type>(sql_stmt)?($arg)
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))?
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound expected single row result but found none for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}
|
||||
};
|
||||
($vis:vis fn $id:ident($($arg:ident: $arg_type:ty),+) -> Result<$return_type:ty> { $($sql:tt)+ }) => {
|
||||
$vis fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<$return_type> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
self.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))?
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound expected single row result but found none for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}
|
||||
};
|
||||
($vis:vis fn async $id:ident($($arg:ident: $arg_type:ty),+) -> Result<$return_type:ty> { $($sql:tt)+ }) => {
|
||||
$vis async fn $id(&self, $($arg: $arg_type),+) -> $crate::anyhow::Result<$return_type> {
|
||||
use $crate::anyhow::Context;
|
||||
|
||||
|
||||
self.write(|connection| {
|
||||
let sql_stmt = $crate::sqlez_macros::sql!($($sql)+);
|
||||
|
||||
connection.select_row_bound::<($($arg_type),+), $return_type>(sql_stmt)?(($($arg),+))
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound failed to execute or parse for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))?
|
||||
.context(::std::format!(
|
||||
"Error in {}, select_row_bound expected single row result but found none for: {}",
|
||||
::std::stringify!($id),
|
||||
sql_stmt
|
||||
))
|
||||
}).await
|
||||
}
|
||||
};
|
||||
}
|
|
@ -5,8 +5,9 @@ use collections::{BTreeMap, HashSet};
|
|||
use editor::{
|
||||
diagnostic_block_renderer,
|
||||
display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, RenderBlock},
|
||||
highlight_diagnostic_message, Autoscroll, Editor, ExcerptId, ExcerptRange, MultiBuffer,
|
||||
ToOffset,
|
||||
highlight_diagnostic_message,
|
||||
scroll::autoscroll::Autoscroll,
|
||||
Editor, ExcerptId, ExcerptRange, MultiBuffer, ToOffset,
|
||||
};
|
||||
use gpui::{
|
||||
actions, elements::*, fonts::TextStyle, impl_internal_actions, serde_json, AnyViewHandle,
|
||||
|
@ -29,7 +30,10 @@ use std::{
|
|||
sync::Arc,
|
||||
};
|
||||
use util::TryFutureExt;
|
||||
use workspace::{ItemHandle as _, ItemNavHistory, Workspace};
|
||||
use workspace::{
|
||||
item::{Item, ItemEvent, ItemHandle},
|
||||
ItemNavHistory, Pane, Workspace,
|
||||
};
|
||||
|
||||
actions!(diagnostics, [Deploy]);
|
||||
|
||||
|
@ -503,7 +507,7 @@ impl ProjectDiagnosticsEditor {
|
|||
}
|
||||
}
|
||||
|
||||
impl workspace::Item for ProjectDiagnosticsEditor {
|
||||
impl Item for ProjectDiagnosticsEditor {
|
||||
fn tab_content(
|
||||
&self,
|
||||
_detail: Option<usize>,
|
||||
|
@ -571,7 +575,7 @@ impl workspace::Item for ProjectDiagnosticsEditor {
|
|||
unreachable!()
|
||||
}
|
||||
|
||||
fn to_item_events(event: &Self::Event) -> Vec<workspace::ItemEvent> {
|
||||
fn to_item_events(event: &Self::Event) -> Vec<ItemEvent> {
|
||||
Editor::to_item_events(event)
|
||||
}
|
||||
|
||||
|
@ -581,7 +585,11 @@ impl workspace::Item for ProjectDiagnosticsEditor {
|
|||
});
|
||||
}
|
||||
|
||||
fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self>
|
||||
fn clone_on_split(
|
||||
&self,
|
||||
_workspace_id: workspace::WorkspaceId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
|
@ -610,6 +618,20 @@ impl workspace::Item for ProjectDiagnosticsEditor {
|
|||
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.editor.update(cx, |editor, cx| editor.deactivated(cx));
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
Some("diagnostics")
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
project: ModelHandle<Project>,
|
||||
workspace: WeakViewHandle<Workspace>,
|
||||
_workspace_id: workspace::WorkspaceId,
|
||||
_item_id: workspace::ItemId,
|
||||
cx: &mut ViewContext<Pane>,
|
||||
) -> Task<Result<ViewHandle<Self>>> {
|
||||
Task::ready(Ok(cx.add_view(|cx| Self::new(project, workspace, cx))))
|
||||
}
|
||||
}
|
||||
|
||||
fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
|
||||
|
@ -781,8 +803,15 @@ mod tests {
|
|||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
|
||||
let (_, workspace) =
|
||||
cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
|
||||
let (_, workspace) = cx.add_window(|cx| {
|
||||
Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project.clone(),
|
||||
|_, _| unimplemented!(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
// Create some diagnostics
|
||||
project.update(cx, |project, cx| {
|
||||
|
|
|
@ -7,7 +7,7 @@ use gpui::{
|
|||
use language::Diagnostic;
|
||||
use project::Project;
|
||||
use settings::Settings;
|
||||
use workspace::StatusItemView;
|
||||
use workspace::{item::ItemHandle, StatusItemView};
|
||||
|
||||
pub struct DiagnosticIndicator {
|
||||
summary: project::DiagnosticSummary,
|
||||
|
@ -219,7 +219,7 @@ impl View for DiagnosticIndicator {
|
|||
impl StatusItemView for DiagnosticIndicator {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
active_pane_item: Option<&dyn workspace::ItemHandle>,
|
||||
active_pane_item: Option<&dyn ItemHandle>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some(editor) = active_pane_item.and_then(|item| item.downcast::<Editor>()) {
|
||||
|
|
|
@ -9,11 +9,17 @@ use gpui::{
|
|||
View, WeakViewHandle,
|
||||
};
|
||||
|
||||
const DEAD_ZONE: f32 = 4.;
|
||||
|
||||
enum State<V: View> {
|
||||
Down {
|
||||
region_offset: Vector2F,
|
||||
region: RectF,
|
||||
},
|
||||
DeadZone {
|
||||
region_offset: Vector2F,
|
||||
region: RectF,
|
||||
},
|
||||
Dragging {
|
||||
window_id: usize,
|
||||
position: Vector2F,
|
||||
|
@ -35,6 +41,13 @@ impl<V: View> Clone for State<V> {
|
|||
region_offset,
|
||||
region,
|
||||
},
|
||||
&State::DeadZone {
|
||||
region_offset,
|
||||
region,
|
||||
} => State::DeadZone {
|
||||
region_offset,
|
||||
region,
|
||||
},
|
||||
State::Dragging {
|
||||
window_id,
|
||||
position,
|
||||
|
@ -101,7 +114,7 @@ impl<V: View> DragAndDrop<V> {
|
|||
pub fn drag_started(event: MouseDown, cx: &mut EventContext) {
|
||||
cx.update_global(|this: &mut Self, _| {
|
||||
this.currently_dragged = Some(State::Down {
|
||||
region_offset: event.region.origin() - event.position,
|
||||
region_offset: event.position - event.region.origin(),
|
||||
region: event.region,
|
||||
});
|
||||
})
|
||||
|
@ -122,7 +135,31 @@ impl<V: View> DragAndDrop<V> {
|
|||
region_offset,
|
||||
region,
|
||||
})
|
||||
| Some(&State::Dragging {
|
||||
| Some(&State::DeadZone {
|
||||
region_offset,
|
||||
region,
|
||||
}) => {
|
||||
if (dbg!(event.position) - (dbg!(region.origin() + region_offset))).length()
|
||||
> DEAD_ZONE
|
||||
{
|
||||
this.currently_dragged = Some(State::Dragging {
|
||||
window_id,
|
||||
region_offset,
|
||||
region,
|
||||
position: event.position,
|
||||
payload,
|
||||
render: Rc::new(move |payload, cx| {
|
||||
render(payload.downcast_ref::<T>().unwrap(), cx)
|
||||
}),
|
||||
});
|
||||
} else {
|
||||
this.currently_dragged = Some(State::DeadZone {
|
||||
region_offset,
|
||||
region,
|
||||
})
|
||||
}
|
||||
}
|
||||
Some(&State::Dragging {
|
||||
region_offset,
|
||||
region,
|
||||
..
|
||||
|
@ -151,6 +188,7 @@ impl<V: View> DragAndDrop<V> {
|
|||
.and_then(|state| {
|
||||
match state {
|
||||
State::Down { .. } => None,
|
||||
State::DeadZone { .. } => None,
|
||||
State::Dragging {
|
||||
window_id,
|
||||
region_offset,
|
||||
|
@ -163,7 +201,7 @@ impl<V: View> DragAndDrop<V> {
|
|||
return None;
|
||||
}
|
||||
|
||||
let position = position + region_offset;
|
||||
let position = position - region_offset;
|
||||
Some(
|
||||
Overlay::new(
|
||||
MouseEventHandler::<DraggedElementHandler>::new(0, cx, |_, cx| {
|
||||
|
|
|
@ -23,6 +23,7 @@ test-support = [
|
|||
drag_and_drop = { path = "../drag_and_drop" }
|
||||
text = { path = "../text" }
|
||||
clock = { path = "../clock" }
|
||||
db = { path = "../db" }
|
||||
collections = { path = "../collections" }
|
||||
context_menu = { path = "../context_menu" }
|
||||
fuzzy = { path = "../fuzzy" }
|
||||
|
@ -37,6 +38,7 @@ snippet = { path = "../snippet" }
|
|||
sum_tree = { path = "../sum_tree" }
|
||||
theme = { path = "../theme" }
|
||||
util = { path = "../util" }
|
||||
sqlez = { path = "../sqlez" }
|
||||
workspace = { path = "../workspace" }
|
||||
aho-corasick = "0.7"
|
||||
anyhow = "1.0"
|
||||
|
|
|
@ -9,6 +9,8 @@ mod link_go_to_definition;
|
|||
mod mouse_context_menu;
|
||||
pub mod movement;
|
||||
mod multi_buffer;
|
||||
mod persistence;
|
||||
pub mod scroll;
|
||||
pub mod selections_collection;
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -32,13 +34,13 @@ use gpui::{
|
|||
elements::*,
|
||||
executor,
|
||||
fonts::{self, HighlightStyle, TextStyle},
|
||||
geometry::vector::{vec2f, Vector2F},
|
||||
geometry::vector::Vector2F,
|
||||
impl_actions, impl_internal_actions,
|
||||
platform::CursorStyle,
|
||||
serde_json::json,
|
||||
text_layout, AnyViewHandle, AppContext, AsyncAppContext, Axis, ClipboardItem, Element,
|
||||
ElementBox, Entity, ModelHandle, MouseButton, MutableAppContext, RenderContext, Subscription,
|
||||
Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
AnyViewHandle, AppContext, AsyncAppContext, ClipboardItem, Element, ElementBox, Entity,
|
||||
ModelHandle, MouseButton, MutableAppContext, RenderContext, Subscription, Task, View,
|
||||
ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use highlight_matching_bracket::refresh_matching_bracket_highlights;
|
||||
use hover_popover::{hide_hover, HoverState};
|
||||
|
@ -60,11 +62,13 @@ pub use multi_buffer::{
|
|||
use multi_buffer::{MultiBufferChunks, ToOffsetUtf16};
|
||||
use ordered_float::OrderedFloat;
|
||||
use project::{FormatTrigger, LocationLink, Project, ProjectPath, ProjectTransaction};
|
||||
use scroll::{
|
||||
autoscroll::Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide,
|
||||
};
|
||||
use selections_collection::{resolve_multiple, MutableSelectionsCollection, SelectionsCollection};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use smallvec::SmallVec;
|
||||
use smol::Timer;
|
||||
use snippet::Snippet;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
|
@ -80,16 +84,14 @@ use std::{
|
|||
pub use sum_tree::Bias;
|
||||
use theme::{DiagnosticStyle, Theme};
|
||||
use util::{post_inc, ResultExt, TryFutureExt};
|
||||
use workspace::{ItemNavHistory, Workspace};
|
||||
use workspace::{ItemNavHistory, Workspace, WorkspaceId};
|
||||
|
||||
use crate::git::diff_hunk_to_display;
|
||||
|
||||
const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500);
|
||||
const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1);
|
||||
const MAX_LINE_LEN: usize = 1024;
|
||||
const MIN_NAVIGATION_HISTORY_ROW_DELTA: i64 = 10;
|
||||
const MAX_SELECTION_HISTORY_LEN: usize = 1024;
|
||||
pub const SCROLL_EVENT_SEPARATION: Duration = Duration::from_millis(28);
|
||||
|
||||
pub const FORMAT_TIMEOUT: Duration = Duration::from_secs(2);
|
||||
|
||||
|
@ -99,12 +101,6 @@ pub struct SelectNext {
|
|||
pub replace_newest: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct Scroll {
|
||||
pub scroll_position: Vector2F,
|
||||
pub axis: Option<Axis>,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct Select(pub SelectPhase);
|
||||
|
||||
|
@ -257,7 +253,7 @@ impl_actions!(
|
|||
]
|
||||
);
|
||||
|
||||
impl_internal_actions!(editor, [Scroll, Select, Jump]);
|
||||
impl_internal_actions!(editor, [Select, Jump]);
|
||||
|
||||
enum DocumentHighlightRead {}
|
||||
enum DocumentHighlightWrite {}
|
||||
|
@ -269,12 +265,8 @@ pub enum Direction {
|
|||
Next,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct ScrollbarAutoHide(bool);
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_action(Editor::new_file);
|
||||
cx.add_action(Editor::scroll);
|
||||
cx.add_action(Editor::select);
|
||||
cx.add_action(Editor::cancel);
|
||||
cx.add_action(Editor::newline);
|
||||
|
@ -304,12 +296,9 @@ pub fn init(cx: &mut MutableAppContext) {
|
|||
cx.add_action(Editor::redo);
|
||||
cx.add_action(Editor::move_up);
|
||||
cx.add_action(Editor::move_page_up);
|
||||
cx.add_action(Editor::page_up);
|
||||
cx.add_action(Editor::move_down);
|
||||
cx.add_action(Editor::move_page_down);
|
||||
cx.add_action(Editor::page_down);
|
||||
cx.add_action(Editor::next_screen);
|
||||
|
||||
cx.add_action(Editor::move_left);
|
||||
cx.add_action(Editor::move_right);
|
||||
cx.add_action(Editor::move_to_previous_word_start);
|
||||
|
@ -369,9 +358,11 @@ pub fn init(cx: &mut MutableAppContext) {
|
|||
hover_popover::init(cx);
|
||||
link_go_to_definition::init(cx);
|
||||
mouse_context_menu::init(cx);
|
||||
scroll::actions::init(cx);
|
||||
|
||||
workspace::register_project_item::<Editor>(cx);
|
||||
workspace::register_followable_item::<Editor>(cx);
|
||||
workspace::register_deserializable_item::<Editor>(cx);
|
||||
}
|
||||
|
||||
trait InvalidationRegion {
|
||||
|
@ -409,46 +400,6 @@ pub enum SelectMode {
|
|||
All,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub enum Autoscroll {
|
||||
Next,
|
||||
Strategy(AutoscrollStrategy),
|
||||
}
|
||||
|
||||
impl Autoscroll {
|
||||
pub fn fit() -> Self {
|
||||
Self::Strategy(AutoscrollStrategy::Fit)
|
||||
}
|
||||
|
||||
pub fn newest() -> Self {
|
||||
Self::Strategy(AutoscrollStrategy::Newest)
|
||||
}
|
||||
|
||||
pub fn center() -> Self {
|
||||
Self::Strategy(AutoscrollStrategy::Center)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Default)]
|
||||
pub enum AutoscrollStrategy {
|
||||
Fit,
|
||||
Newest,
|
||||
#[default]
|
||||
Center,
|
||||
Top,
|
||||
Bottom,
|
||||
}
|
||||
|
||||
impl AutoscrollStrategy {
|
||||
fn next(&self) -> Self {
|
||||
match self {
|
||||
AutoscrollStrategy::Center => AutoscrollStrategy::Top,
|
||||
AutoscrollStrategy::Top => AutoscrollStrategy::Bottom,
|
||||
_ => AutoscrollStrategy::Center,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
pub enum EditorMode {
|
||||
SingleLine,
|
||||
|
@ -475,74 +426,12 @@ type CompletionId = usize;
|
|||
type GetFieldEditorTheme = dyn Fn(&theme::Theme) -> theme::FieldEditor;
|
||||
type OverrideTextStyle = dyn Fn(&EditorStyle) -> Option<HighlightStyle>;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct OngoingScroll {
|
||||
last_timestamp: Instant,
|
||||
axis: Option<Axis>,
|
||||
}
|
||||
|
||||
impl OngoingScroll {
|
||||
fn initial() -> OngoingScroll {
|
||||
OngoingScroll {
|
||||
last_timestamp: Instant::now() - SCROLL_EVENT_SEPARATION,
|
||||
axis: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn update(&mut self, axis: Option<Axis>) {
|
||||
self.last_timestamp = Instant::now();
|
||||
self.axis = axis;
|
||||
}
|
||||
|
||||
pub fn filter(&self, delta: &mut Vector2F) -> Option<Axis> {
|
||||
const UNLOCK_PERCENT: f32 = 1.9;
|
||||
const UNLOCK_LOWER_BOUND: f32 = 6.;
|
||||
let mut axis = self.axis;
|
||||
|
||||
let x = delta.x().abs();
|
||||
let y = delta.y().abs();
|
||||
let duration = Instant::now().duration_since(self.last_timestamp);
|
||||
if duration > SCROLL_EVENT_SEPARATION {
|
||||
//New ongoing scroll will start, determine axis
|
||||
axis = if x <= y {
|
||||
Some(Axis::Vertical)
|
||||
} else {
|
||||
Some(Axis::Horizontal)
|
||||
};
|
||||
} else if x.max(y) >= UNLOCK_LOWER_BOUND {
|
||||
//Check if the current ongoing will need to unlock
|
||||
match axis {
|
||||
Some(Axis::Vertical) => {
|
||||
if x > y && x >= y * UNLOCK_PERCENT {
|
||||
axis = None;
|
||||
}
|
||||
}
|
||||
|
||||
Some(Axis::Horizontal) => {
|
||||
if y > x && y >= x * UNLOCK_PERCENT {
|
||||
axis = None;
|
||||
}
|
||||
}
|
||||
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
match axis {
|
||||
Some(Axis::Vertical) => *delta = vec2f(0., delta.y()),
|
||||
Some(Axis::Horizontal) => *delta = vec2f(delta.x(), 0.),
|
||||
None => {}
|
||||
}
|
||||
|
||||
axis
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Editor {
|
||||
handle: WeakViewHandle<Self>,
|
||||
buffer: ModelHandle<MultiBuffer>,
|
||||
display_map: ModelHandle<DisplayMap>,
|
||||
pub selections: SelectionsCollection,
|
||||
pub scroll_manager: ScrollManager,
|
||||
columnar_selection_tail: Option<Anchor>,
|
||||
add_selections_state: Option<AddSelectionsState>,
|
||||
select_next_state: Option<SelectNextState>,
|
||||
|
@ -552,10 +441,6 @@ pub struct Editor {
|
|||
select_larger_syntax_node_stack: Vec<Box<[Selection<usize>]>>,
|
||||
ime_transaction: Option<TransactionId>,
|
||||
active_diagnostics: Option<ActiveDiagnosticGroup>,
|
||||
ongoing_scroll: OngoingScroll,
|
||||
scroll_position: Vector2F,
|
||||
scroll_top_anchor: Anchor,
|
||||
autoscroll_request: Option<(Autoscroll, bool)>,
|
||||
soft_wrap_mode_override: Option<settings::SoftWrap>,
|
||||
get_field_editor_theme: Option<Arc<GetFieldEditorTheme>>,
|
||||
override_text_style: Option<Box<OverrideTextStyle>>,
|
||||
|
@ -563,10 +448,7 @@ pub struct Editor {
|
|||
focused: bool,
|
||||
blink_manager: ModelHandle<BlinkManager>,
|
||||
show_local_selections: bool,
|
||||
show_scrollbars: bool,
|
||||
hide_scrollbar_task: Option<Task<()>>,
|
||||
mode: EditorMode,
|
||||
vertical_scroll_margin: f32,
|
||||
placeholder_text: Option<Arc<str>>,
|
||||
highlighted_rows: Option<Range<u32>>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
|
@ -582,13 +464,12 @@ pub struct Editor {
|
|||
pending_rename: Option<RenameState>,
|
||||
searchable: bool,
|
||||
cursor_shape: CursorShape,
|
||||
workspace_id: Option<WorkspaceId>,
|
||||
keymap_context_layers: BTreeMap<TypeId, gpui::keymap::Context>,
|
||||
input_enabled: bool,
|
||||
leader_replica_id: Option<u16>,
|
||||
hover_state: HoverState,
|
||||
link_go_to_definition_state: LinkGoToDefinitionState,
|
||||
visible_line_count: Option<f32>,
|
||||
last_autoscroll: Option<(Vector2F, f32, f32, AutoscrollStrategy)>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
|
@ -597,9 +478,8 @@ pub struct EditorSnapshot {
|
|||
pub display_snapshot: DisplaySnapshot,
|
||||
pub placeholder_text: Option<Arc<str>>,
|
||||
is_focused: bool,
|
||||
scroll_anchor: ScrollAnchor,
|
||||
ongoing_scroll: OngoingScroll,
|
||||
scroll_position: Vector2F,
|
||||
scroll_top_anchor: Anchor,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
|
@ -1087,12 +967,9 @@ pub struct ClipboardSelection {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct NavigationData {
|
||||
// Matching offsets for anchor and scroll_top_anchor allows us to recreate the anchor if the buffer
|
||||
// has since been closed
|
||||
cursor_anchor: Anchor,
|
||||
cursor_position: Point,
|
||||
scroll_position: Vector2F,
|
||||
scroll_top_anchor: Anchor,
|
||||
scroll_anchor: ScrollAnchor,
|
||||
scroll_top_row: u32,
|
||||
}
|
||||
|
||||
|
@ -1160,9 +1037,8 @@ impl Editor {
|
|||
display_map.set_state(&snapshot, cx);
|
||||
});
|
||||
});
|
||||
clone.selections.set_state(&self.selections);
|
||||
clone.scroll_position = self.scroll_position;
|
||||
clone.scroll_top_anchor = self.scroll_top_anchor;
|
||||
clone.selections.clone_state(&self.selections);
|
||||
clone.scroll_manager.clone_state(&self.scroll_manager);
|
||||
clone.searchable = self.searchable;
|
||||
clone
|
||||
}
|
||||
|
@ -1197,6 +1073,7 @@ impl Editor {
|
|||
buffer: buffer.clone(),
|
||||
display_map: display_map.clone(),
|
||||
selections,
|
||||
scroll_manager: ScrollManager::new(),
|
||||
columnar_selection_tail: None,
|
||||
add_selections_state: None,
|
||||
select_next_state: None,
|
||||
|
@ -1209,17 +1086,10 @@ impl Editor {
|
|||
soft_wrap_mode_override: None,
|
||||
get_field_editor_theme,
|
||||
project,
|
||||
ongoing_scroll: OngoingScroll::initial(),
|
||||
scroll_position: Vector2F::zero(),
|
||||
scroll_top_anchor: Anchor::min(),
|
||||
autoscroll_request: None,
|
||||
focused: false,
|
||||
blink_manager: blink_manager.clone(),
|
||||
show_local_selections: true,
|
||||
show_scrollbars: true,
|
||||
hide_scrollbar_task: None,
|
||||
mode,
|
||||
vertical_scroll_margin: 3.0,
|
||||
placeholder_text: None,
|
||||
highlighted_rows: None,
|
||||
background_highlights: Default::default(),
|
||||
|
@ -1235,13 +1105,12 @@ impl Editor {
|
|||
searchable: true,
|
||||
override_text_style: None,
|
||||
cursor_shape: Default::default(),
|
||||
workspace_id: None,
|
||||
keymap_context_layers: Default::default(),
|
||||
input_enabled: true,
|
||||
leader_replica_id: None,
|
||||
hover_state: Default::default(),
|
||||
link_go_to_definition_state: Default::default(),
|
||||
visible_line_count: None,
|
||||
last_autoscroll: None,
|
||||
_subscriptions: vec![
|
||||
cx.observe(&buffer, Self::on_buffer_changed),
|
||||
cx.subscribe(&buffer, Self::on_buffer_event),
|
||||
|
@ -1250,7 +1119,7 @@ impl Editor {
|
|||
],
|
||||
};
|
||||
this.end_selection(cx);
|
||||
this.make_scrollbar_visible(cx);
|
||||
this.scroll_manager.show_scrollbar(cx);
|
||||
|
||||
let editor_created_event = EditorCreated(cx.handle());
|
||||
cx.emit_global(editor_created_event);
|
||||
|
@ -1303,9 +1172,8 @@ impl Editor {
|
|||
EditorSnapshot {
|
||||
mode: self.mode,
|
||||
display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)),
|
||||
ongoing_scroll: self.ongoing_scroll,
|
||||
scroll_position: self.scroll_position,
|
||||
scroll_top_anchor: self.scroll_top_anchor,
|
||||
scroll_anchor: self.scroll_manager.anchor(),
|
||||
ongoing_scroll: self.scroll_manager.ongoing_scroll(),
|
||||
placeholder_text: self.placeholder_text.clone(),
|
||||
is_focused: self
|
||||
.handle
|
||||
|
@ -1344,64 +1212,6 @@ impl Editor {
|
|||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn set_vertical_scroll_margin(&mut self, margin_rows: usize, cx: &mut ViewContext<Self>) {
|
||||
self.vertical_scroll_margin = margin_rows as f32;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn set_scroll_position(&mut self, scroll_position: Vector2F, cx: &mut ViewContext<Self>) {
|
||||
self.set_scroll_position_internal(scroll_position, true, cx);
|
||||
}
|
||||
|
||||
fn set_scroll_position_internal(
|
||||
&mut self,
|
||||
scroll_position: Vector2F,
|
||||
local: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
if scroll_position.y() <= 0. {
|
||||
self.scroll_top_anchor = Anchor::min();
|
||||
self.scroll_position = scroll_position.max(vec2f(0., 0.));
|
||||
} else {
|
||||
let scroll_top_buffer_offset =
|
||||
DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right);
|
||||
let anchor = map
|
||||
.buffer_snapshot
|
||||
.anchor_at(scroll_top_buffer_offset, Bias::Right);
|
||||
self.scroll_position = vec2f(
|
||||
scroll_position.x(),
|
||||
scroll_position.y() - anchor.to_display_point(&map).row() as f32,
|
||||
);
|
||||
self.scroll_top_anchor = anchor;
|
||||
}
|
||||
|
||||
self.make_scrollbar_visible(cx);
|
||||
self.autoscroll_request.take();
|
||||
hide_hover(self, cx);
|
||||
|
||||
cx.emit(Event::ScrollPositionChanged { local });
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn set_visible_line_count(&mut self, lines: f32) {
|
||||
self.visible_line_count = Some(lines)
|
||||
}
|
||||
|
||||
fn set_scroll_top_anchor(
|
||||
&mut self,
|
||||
anchor: Anchor,
|
||||
position: Vector2F,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.scroll_top_anchor = anchor;
|
||||
self.scroll_position = position;
|
||||
self.make_scrollbar_visible(cx);
|
||||
cx.emit(Event::ScrollPositionChanged { local: false });
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn set_cursor_shape(&mut self, cursor_shape: CursorShape, cx: &mut ViewContext<Self>) {
|
||||
self.cursor_shape = cursor_shape;
|
||||
cx.notify();
|
||||
|
@ -1427,199 +1237,6 @@ impl Editor {
|
|||
self.input_enabled = input_enabled;
|
||||
}
|
||||
|
||||
pub fn scroll_position(&self, cx: &mut ViewContext<Self>) -> Vector2F {
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
compute_scroll_position(&display_map, self.scroll_position, &self.scroll_top_anchor)
|
||||
}
|
||||
|
||||
pub fn clamp_scroll_left(&mut self, max: f32) -> bool {
|
||||
if max < self.scroll_position.x() {
|
||||
self.scroll_position.set_x(max);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn autoscroll_vertically(
|
||||
&mut self,
|
||||
viewport_height: f32,
|
||||
line_height: f32,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> bool {
|
||||
let visible_lines = viewport_height / line_height;
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let mut scroll_position =
|
||||
compute_scroll_position(&display_map, self.scroll_position, &self.scroll_top_anchor);
|
||||
let max_scroll_top = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
|
||||
(display_map.max_point().row() as f32 - visible_lines + 1.).max(0.)
|
||||
} else {
|
||||
display_map.max_point().row() as f32
|
||||
};
|
||||
if scroll_position.y() > max_scroll_top {
|
||||
scroll_position.set_y(max_scroll_top);
|
||||
self.set_scroll_position(scroll_position, cx);
|
||||
}
|
||||
|
||||
let (autoscroll, local) = if let Some(autoscroll) = self.autoscroll_request.take() {
|
||||
autoscroll
|
||||
} else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let first_cursor_top;
|
||||
let last_cursor_bottom;
|
||||
if let Some(highlighted_rows) = &self.highlighted_rows {
|
||||
first_cursor_top = highlighted_rows.start as f32;
|
||||
last_cursor_bottom = first_cursor_top + 1.;
|
||||
} else if autoscroll == Autoscroll::newest() {
|
||||
let newest_selection = self.selections.newest::<Point>(cx);
|
||||
first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32;
|
||||
last_cursor_bottom = first_cursor_top + 1.;
|
||||
} else {
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
first_cursor_top = selections
|
||||
.first()
|
||||
.unwrap()
|
||||
.head()
|
||||
.to_display_point(&display_map)
|
||||
.row() as f32;
|
||||
last_cursor_bottom = selections
|
||||
.last()
|
||||
.unwrap()
|
||||
.head()
|
||||
.to_display_point(&display_map)
|
||||
.row() as f32
|
||||
+ 1.0;
|
||||
}
|
||||
|
||||
let margin = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
|
||||
0.
|
||||
} else {
|
||||
((visible_lines - (last_cursor_bottom - first_cursor_top)) / 2.0).floor()
|
||||
};
|
||||
if margin < 0.0 {
|
||||
return false;
|
||||
}
|
||||
|
||||
let strategy = match autoscroll {
|
||||
Autoscroll::Strategy(strategy) => strategy,
|
||||
Autoscroll::Next => {
|
||||
let last_autoscroll = &self.last_autoscroll;
|
||||
if let Some(last_autoscroll) = last_autoscroll {
|
||||
if self.scroll_position == last_autoscroll.0
|
||||
&& first_cursor_top == last_autoscroll.1
|
||||
&& last_cursor_bottom == last_autoscroll.2
|
||||
{
|
||||
last_autoscroll.3.next()
|
||||
} else {
|
||||
AutoscrollStrategy::default()
|
||||
}
|
||||
} else {
|
||||
AutoscrollStrategy::default()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match strategy {
|
||||
AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => {
|
||||
let margin = margin.min(self.vertical_scroll_margin);
|
||||
let target_top = (first_cursor_top - margin).max(0.0);
|
||||
let target_bottom = last_cursor_bottom + margin;
|
||||
let start_row = scroll_position.y();
|
||||
let end_row = start_row + visible_lines;
|
||||
|
||||
if target_top < start_row {
|
||||
scroll_position.set_y(target_top);
|
||||
self.set_scroll_position_internal(scroll_position, local, cx);
|
||||
} else if target_bottom >= end_row {
|
||||
scroll_position.set_y(target_bottom - visible_lines);
|
||||
self.set_scroll_position_internal(scroll_position, local, cx);
|
||||
}
|
||||
}
|
||||
AutoscrollStrategy::Center => {
|
||||
scroll_position.set_y((first_cursor_top - margin).max(0.0));
|
||||
self.set_scroll_position_internal(scroll_position, local, cx);
|
||||
}
|
||||
AutoscrollStrategy::Top => {
|
||||
scroll_position.set_y((first_cursor_top).max(0.0));
|
||||
self.set_scroll_position_internal(scroll_position, local, cx);
|
||||
}
|
||||
AutoscrollStrategy::Bottom => {
|
||||
scroll_position.set_y((last_cursor_bottom - visible_lines).max(0.0));
|
||||
self.set_scroll_position_internal(scroll_position, local, cx);
|
||||
}
|
||||
}
|
||||
|
||||
self.last_autoscroll = Some((
|
||||
self.scroll_position,
|
||||
first_cursor_top,
|
||||
last_cursor_bottom,
|
||||
strategy,
|
||||
));
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub fn autoscroll_horizontally(
|
||||
&mut self,
|
||||
start_row: u32,
|
||||
viewport_width: f32,
|
||||
scroll_width: f32,
|
||||
max_glyph_width: f32,
|
||||
layouts: &[text_layout::Line],
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> bool {
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
|
||||
let mut target_left;
|
||||
let mut target_right;
|
||||
|
||||
if self.highlighted_rows.is_some() {
|
||||
target_left = 0.0_f32;
|
||||
target_right = 0.0_f32;
|
||||
} else {
|
||||
target_left = std::f32::INFINITY;
|
||||
target_right = 0.0_f32;
|
||||
for selection in selections {
|
||||
let head = selection.head().to_display_point(&display_map);
|
||||
if head.row() >= start_row && head.row() < start_row + layouts.len() as u32 {
|
||||
let start_column = head.column().saturating_sub(3);
|
||||
let end_column = cmp::min(display_map.line_len(head.row()), head.column() + 3);
|
||||
target_left = target_left.min(
|
||||
layouts[(head.row() - start_row) as usize]
|
||||
.x_for_index(start_column as usize),
|
||||
);
|
||||
target_right = target_right.max(
|
||||
layouts[(head.row() - start_row) as usize].x_for_index(end_column as usize)
|
||||
+ max_glyph_width,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
target_right = target_right.min(scroll_width);
|
||||
|
||||
if target_right - target_left > viewport_width {
|
||||
return false;
|
||||
}
|
||||
|
||||
let scroll_left = self.scroll_position.x() * max_glyph_width;
|
||||
let scroll_right = scroll_left + viewport_width;
|
||||
|
||||
if target_left < scroll_left {
|
||||
self.scroll_position.set_x(target_left / max_glyph_width);
|
||||
true
|
||||
} else if target_right > scroll_right {
|
||||
self.scroll_position
|
||||
.set_x((target_right - viewport_width) / max_glyph_width);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn selections_did_change(
|
||||
&mut self,
|
||||
local: bool,
|
||||
|
@ -1742,11 +1359,6 @@ impl Editor {
|
|||
});
|
||||
}
|
||||
|
||||
fn scroll(&mut self, action: &Scroll, cx: &mut ViewContext<Self>) {
|
||||
self.ongoing_scroll.update(action.axis);
|
||||
self.set_scroll_position(action.scroll_position, cx);
|
||||
}
|
||||
|
||||
fn select(&mut self, Select(phase): &Select, cx: &mut ViewContext<Self>) {
|
||||
self.hide_context_menu(cx);
|
||||
|
||||
|
@ -2810,7 +2422,7 @@ impl Editor {
|
|||
let all_edits_within_excerpt = buffer.read_with(&cx, |buffer, _| {
|
||||
let excerpt_range = excerpt_range.to_offset(buffer);
|
||||
buffer
|
||||
.edited_ranges_for_transaction(transaction)
|
||||
.edited_ranges_for_transaction::<usize>(transaction)
|
||||
.all(|range| {
|
||||
excerpt_range.start <= range.start
|
||||
&& excerpt_range.end >= range.end
|
||||
|
@ -4069,23 +3681,6 @@ impl Editor {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn next_screen(&mut self, _: &NextScreen, cx: &mut ViewContext<Editor>) {
|
||||
if self.take_rename(true, cx).is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(_) = self.context_menu.as_mut() {
|
||||
return;
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
cx.propagate_action();
|
||||
return;
|
||||
}
|
||||
|
||||
self.request_autoscroll(Autoscroll::Next, cx);
|
||||
}
|
||||
|
||||
pub fn move_up(&mut self, _: &MoveUp, cx: &mut ViewContext<Self>) {
|
||||
if self.take_rename(true, cx).is_some() {
|
||||
return;
|
||||
|
@ -4119,10 +3714,13 @@ impl Editor {
|
|||
return;
|
||||
}
|
||||
|
||||
if let Some(context_menu) = self.context_menu.as_mut() {
|
||||
if context_menu.select_first(cx) {
|
||||
return;
|
||||
}
|
||||
if self
|
||||
.context_menu
|
||||
.as_mut()
|
||||
.map(|menu| menu.select_first(cx))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
|
@ -4130,9 +3728,10 @@ impl Editor {
|
|||
return;
|
||||
}
|
||||
|
||||
let row_count = match self.visible_line_count {
|
||||
Some(row_count) => row_count as u32 - 1,
|
||||
None => return,
|
||||
let row_count = if let Some(row_count) = self.visible_line_count() {
|
||||
row_count as u32 - 1
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
let autoscroll = if action.center_cursor {
|
||||
|
@ -4154,32 +3753,6 @@ impl Editor {
|
|||
});
|
||||
}
|
||||
|
||||
pub fn page_up(&mut self, _: &PageUp, cx: &mut ViewContext<Self>) {
|
||||
if self.take_rename(true, cx).is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(context_menu) = self.context_menu.as_mut() {
|
||||
if context_menu.select_first(cx) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
cx.propagate_action();
|
||||
return;
|
||||
}
|
||||
|
||||
let lines = match self.visible_line_count {
|
||||
Some(lines) => lines,
|
||||
None => return,
|
||||
};
|
||||
|
||||
let cur_position = self.scroll_position(cx);
|
||||
let new_pos = cur_position - vec2f(0., lines + 1.);
|
||||
self.set_scroll_position(new_pos, cx);
|
||||
}
|
||||
|
||||
pub fn select_up(&mut self, _: &SelectUp, cx: &mut ViewContext<Self>) {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_heads_with(|map, head, goal| movement::up(map, head, goal, false))
|
||||
|
@ -4217,10 +3790,13 @@ impl Editor {
|
|||
return;
|
||||
}
|
||||
|
||||
if let Some(context_menu) = self.context_menu.as_mut() {
|
||||
if context_menu.select_last(cx) {
|
||||
return;
|
||||
}
|
||||
if self
|
||||
.context_menu
|
||||
.as_mut()
|
||||
.map(|menu| menu.select_last(cx))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
|
@ -4228,9 +3804,10 @@ impl Editor {
|
|||
return;
|
||||
}
|
||||
|
||||
let row_count = match self.visible_line_count {
|
||||
Some(row_count) => row_count as u32 - 1,
|
||||
None => return,
|
||||
let row_count = if let Some(row_count) = self.visible_line_count() {
|
||||
row_count as u32 - 1
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
let autoscroll = if action.center_cursor {
|
||||
|
@ -4252,32 +3829,6 @@ impl Editor {
|
|||
});
|
||||
}
|
||||
|
||||
pub fn page_down(&mut self, _: &PageDown, cx: &mut ViewContext<Self>) {
|
||||
if self.take_rename(true, cx).is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(context_menu) = self.context_menu.as_mut() {
|
||||
if context_menu.select_last(cx) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
cx.propagate_action();
|
||||
return;
|
||||
}
|
||||
|
||||
let lines = match self.visible_line_count {
|
||||
Some(lines) => lines,
|
||||
None => return,
|
||||
};
|
||||
|
||||
let cur_position = self.scroll_position(cx);
|
||||
let new_pos = cur_position + vec2f(0., lines - 1.);
|
||||
self.set_scroll_position(new_pos, cx);
|
||||
}
|
||||
|
||||
pub fn select_down(&mut self, _: &SelectDown, cx: &mut ViewContext<Self>) {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_heads_with(|map, head, goal| movement::down(map, head, goal, false))
|
||||
|
@ -4598,18 +4149,19 @@ impl Editor {
|
|||
|
||||
fn push_to_nav_history(
|
||||
&self,
|
||||
position: Anchor,
|
||||
cursor_anchor: Anchor,
|
||||
new_position: Option<Point>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some(nav_history) = &self.nav_history {
|
||||
let buffer = self.buffer.read(cx).read(cx);
|
||||
let point = position.to_point(&buffer);
|
||||
let scroll_top_row = self.scroll_top_anchor.to_point(&buffer).row;
|
||||
let cursor_position = cursor_anchor.to_point(&buffer);
|
||||
let scroll_state = self.scroll_manager.anchor();
|
||||
let scroll_top_row = scroll_state.top_row(&buffer);
|
||||
drop(buffer);
|
||||
|
||||
if let Some(new_position) = new_position {
|
||||
let row_delta = (new_position.row as i64 - point.row as i64).abs();
|
||||
let row_delta = (new_position.row as i64 - cursor_position.row as i64).abs();
|
||||
if row_delta < MIN_NAVIGATION_HISTORY_ROW_DELTA {
|
||||
return;
|
||||
}
|
||||
|
@ -4617,10 +4169,9 @@ impl Editor {
|
|||
|
||||
nav_history.push(
|
||||
Some(NavigationData {
|
||||
cursor_anchor: position,
|
||||
cursor_position: point,
|
||||
scroll_position: self.scroll_position,
|
||||
scroll_top_anchor: self.scroll_top_anchor,
|
||||
cursor_anchor,
|
||||
cursor_position,
|
||||
scroll_anchor: scroll_state,
|
||||
scroll_top_row,
|
||||
}),
|
||||
cx,
|
||||
|
@ -5918,16 +5469,6 @@ impl Editor {
|
|||
});
|
||||
}
|
||||
|
||||
pub fn request_autoscroll(&mut self, autoscroll: Autoscroll, cx: &mut ViewContext<Self>) {
|
||||
self.autoscroll_request = Some((autoscroll, true));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn request_autoscroll_remotely(&mut self, autoscroll: Autoscroll, cx: &mut ViewContext<Self>) {
|
||||
self.autoscroll_request = Some((autoscroll, false));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn transact(
|
||||
&mut self,
|
||||
cx: &mut ViewContext<Self>,
|
||||
|
@ -6336,31 +5877,6 @@ impl Editor {
|
|||
self.blink_manager.read(cx).visible() && self.focused
|
||||
}
|
||||
|
||||
pub fn show_scrollbars(&self) -> bool {
|
||||
self.show_scrollbars
|
||||
}
|
||||
|
||||
fn make_scrollbar_visible(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if !self.show_scrollbars {
|
||||
self.show_scrollbars = true;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
if cx.default_global::<ScrollbarAutoHide>().0 {
|
||||
self.hide_scrollbar_task = Some(cx.spawn_weak(|this, mut cx| async move {
|
||||
Timer::after(SCROLLBAR_SHOW_INTERVAL).await;
|
||||
if let Some(this) = this.upgrade(&cx) {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.show_scrollbars = false;
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}));
|
||||
} else {
|
||||
self.hide_scrollbar_task = None;
|
||||
}
|
||||
}
|
||||
|
||||
fn on_buffer_changed(&mut self, _: ModelHandle<MultiBuffer>, cx: &mut ViewContext<Self>) {
|
||||
cx.notify();
|
||||
}
|
||||
|
@ -6568,11 +6084,7 @@ impl EditorSnapshot {
|
|||
}
|
||||
|
||||
pub fn scroll_position(&self) -> Vector2F {
|
||||
compute_scroll_position(
|
||||
&self.display_snapshot,
|
||||
self.scroll_position,
|
||||
&self.scroll_top_anchor,
|
||||
)
|
||||
self.scroll_anchor.scroll_position(&self.display_snapshot)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -6584,20 +6096,6 @@ impl Deref for EditorSnapshot {
|
|||
}
|
||||
}
|
||||
|
||||
fn compute_scroll_position(
|
||||
snapshot: &DisplaySnapshot,
|
||||
mut scroll_position: Vector2F,
|
||||
scroll_top_anchor: &Anchor,
|
||||
) -> Vector2F {
|
||||
if *scroll_top_anchor != Anchor::min() {
|
||||
let scroll_top = scroll_top_anchor.to_display_point(snapshot).row() as f32;
|
||||
scroll_position.set_y(scroll_top + scroll_position.y());
|
||||
} else {
|
||||
scroll_position.set_y(0.);
|
||||
}
|
||||
scroll_position
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Event {
|
||||
ExcerptsAdded {
|
||||
|
@ -6622,7 +6120,6 @@ pub enum Event {
|
|||
local: bool,
|
||||
},
|
||||
Closed,
|
||||
IgnoredInput,
|
||||
}
|
||||
|
||||
pub struct EditorFocused(pub ViewHandle<Editor>);
|
||||
|
@ -6808,7 +6305,6 @@ impl View for Editor {
|
|||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if !self.input_enabled {
|
||||
cx.emit(Event::IgnoredInput);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -6845,7 +6341,6 @@ impl View for Editor {
|
|||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if !self.input_enabled {
|
||||
cx.emit(Event::IgnoredInput);
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ use crate::test::{
|
|||
};
|
||||
use gpui::{
|
||||
executor::Deterministic,
|
||||
geometry::rect::RectF,
|
||||
geometry::{rect::RectF, vector::vec2f},
|
||||
platform::{WindowBounds, WindowOptions},
|
||||
};
|
||||
use language::{FakeLspAdapter, LanguageConfig, LanguageRegistry, Point};
|
||||
|
@ -22,7 +22,10 @@ use util::{
|
|||
assert_set_eq,
|
||||
test::{marked_text_ranges, marked_text_ranges_by, sample_text, TextRangeMarker},
|
||||
};
|
||||
use workspace::{FollowableItem, ItemHandle, NavigationEntry, Pane};
|
||||
use workspace::{
|
||||
item::{FollowableItem, ItemHandle},
|
||||
NavigationEntry, Pane,
|
||||
};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_edit_events(cx: &mut MutableAppContext) {
|
||||
|
@ -475,7 +478,7 @@ fn test_clone(cx: &mut gpui::MutableAppContext) {
|
|||
fn test_navigation_history(cx: &mut gpui::MutableAppContext) {
|
||||
cx.set_global(Settings::test(cx));
|
||||
cx.set_global(DragAndDrop::<Workspace>::default());
|
||||
use workspace::Item;
|
||||
use workspace::item::Item;
|
||||
let (_, pane) = cx.add_window(Default::default(), |cx| Pane::new(None, cx));
|
||||
let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx);
|
||||
|
||||
|
@ -541,31 +544,30 @@ fn test_navigation_history(cx: &mut gpui::MutableAppContext) {
|
|||
|
||||
// Set scroll position to check later
|
||||
editor.set_scroll_position(Vector2F::new(5.5, 5.5), cx);
|
||||
let original_scroll_position = editor.scroll_position;
|
||||
let original_scroll_top_anchor = editor.scroll_top_anchor;
|
||||
let original_scroll_position = editor.scroll_manager.anchor();
|
||||
|
||||
// Jump to the end of the document and adjust scroll
|
||||
editor.move_to_end(&MoveToEnd, cx);
|
||||
editor.set_scroll_position(Vector2F::new(-2.5, -0.5), cx);
|
||||
assert_ne!(editor.scroll_position, original_scroll_position);
|
||||
assert_ne!(editor.scroll_top_anchor, original_scroll_top_anchor);
|
||||
assert_ne!(editor.scroll_manager.anchor(), original_scroll_position);
|
||||
|
||||
let nav_entry = pop_history(&mut editor, cx).unwrap();
|
||||
editor.navigate(nav_entry.data.unwrap(), cx);
|
||||
assert_eq!(editor.scroll_position, original_scroll_position);
|
||||
assert_eq!(editor.scroll_top_anchor, original_scroll_top_anchor);
|
||||
assert_eq!(editor.scroll_manager.anchor(), original_scroll_position);
|
||||
|
||||
// Ensure we don't panic when navigation data contains invalid anchors *and* points.
|
||||
let mut invalid_anchor = editor.scroll_top_anchor;
|
||||
let mut invalid_anchor = editor.scroll_manager.anchor().top_anchor;
|
||||
invalid_anchor.text_anchor.buffer_id = Some(999);
|
||||
let invalid_point = Point::new(9999, 0);
|
||||
editor.navigate(
|
||||
Box::new(NavigationData {
|
||||
cursor_anchor: invalid_anchor,
|
||||
cursor_position: invalid_point,
|
||||
scroll_top_anchor: invalid_anchor,
|
||||
scroll_anchor: ScrollAnchor {
|
||||
top_anchor: invalid_anchor,
|
||||
offset: Default::default(),
|
||||
},
|
||||
scroll_top_row: invalid_point.row,
|
||||
scroll_position: Default::default(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use super::{
|
||||
display_map::{BlockContext, ToDisplayPoint},
|
||||
Anchor, DisplayPoint, Editor, EditorMode, EditorSnapshot, Scroll, Select, SelectPhase,
|
||||
SoftWrap, ToPoint, MAX_LINE_LEN,
|
||||
Anchor, DisplayPoint, Editor, EditorMode, EditorSnapshot, Select, SelectPhase, SoftWrap,
|
||||
ToPoint, MAX_LINE_LEN,
|
||||
};
|
||||
use crate::{
|
||||
display_map::{BlockStyle, DisplaySnapshot, TransformBlock},
|
||||
|
@ -13,6 +13,7 @@ use crate::{
|
|||
GoToFetchedDefinition, GoToFetchedTypeDefinition, UpdateGoToDefinitionLink,
|
||||
},
|
||||
mouse_context_menu::DeployMouseContextMenu,
|
||||
scroll::actions::Scroll,
|
||||
EditorStyle,
|
||||
};
|
||||
use clock::ReplicaId;
|
||||
|
@ -955,7 +956,7 @@ impl EditorElement {
|
|||
move |_, cx| {
|
||||
if let Some(view) = view.upgrade(cx.deref_mut()) {
|
||||
view.update(cx.deref_mut(), |view, cx| {
|
||||
view.make_scrollbar_visible(cx);
|
||||
view.scroll_manager.show_scrollbar(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -977,7 +978,7 @@ impl EditorElement {
|
|||
position.set_y(top_row as f32);
|
||||
view.set_scroll_position(position, cx);
|
||||
} else {
|
||||
view.make_scrollbar_visible(cx);
|
||||
view.scroll_manager.show_scrollbar(cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -1298,7 +1299,7 @@ impl EditorElement {
|
|||
};
|
||||
|
||||
let tooltip_style = cx.global::<Settings>().theme.tooltip.clone();
|
||||
let scroll_x = snapshot.scroll_position.x();
|
||||
let scroll_x = snapshot.scroll_anchor.offset.x();
|
||||
let (fixed_blocks, non_fixed_blocks) = snapshot
|
||||
.blocks_in_range(rows.clone())
|
||||
.partition::<Vec<_>, _>(|(_, block)| match block {
|
||||
|
@ -1670,7 +1671,7 @@ impl Element for EditorElement {
|
|||
));
|
||||
}
|
||||
|
||||
show_scrollbars = view.show_scrollbars();
|
||||
show_scrollbars = view.scroll_manager.scrollbars_visible();
|
||||
include_root = view
|
||||
.project
|
||||
.as_ref()
|
||||
|
@ -1725,7 +1726,7 @@ impl Element for EditorElement {
|
|||
);
|
||||
|
||||
self.update_view(cx.app, |view, cx| {
|
||||
let clamped = view.clamp_scroll_left(scroll_max.x());
|
||||
let clamped = view.scroll_manager.clamp_scroll_left(scroll_max.x());
|
||||
|
||||
let autoscrolled = if autoscroll_horizontally {
|
||||
view.autoscroll_horizontally(
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
use crate::{
|
||||
display_map::ToDisplayPoint, link_go_to_definition::hide_link_definition,
|
||||
movement::surrounding_word, Anchor, Autoscroll, Editor, Event, ExcerptId, ExcerptRange,
|
||||
MultiBuffer, MultiBufferSnapshot, NavigationData, ToPoint as _, FORMAT_TIMEOUT,
|
||||
movement::surrounding_word, persistence::DB, scroll::ScrollAnchor, Anchor, Autoscroll, Editor,
|
||||
Event, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, NavigationData, ToPoint as _,
|
||||
FORMAT_TIMEOUT,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use collections::HashSet;
|
||||
use futures::future::try_join_all;
|
||||
use futures::FutureExt;
|
||||
use gpui::{
|
||||
elements::*, geometry::vector::vec2f, AppContext, Entity, ModelHandle, MutableAppContext,
|
||||
RenderContext, Subscription, Task, View, ViewContext, ViewHandle,
|
||||
RenderContext, Subscription, Task, View, ViewContext, ViewHandle, WeakViewHandle,
|
||||
};
|
||||
use language::proto::serialize_anchor as serialize_text_anchor;
|
||||
use language::{Bias, Buffer, File as _, OffsetRangeExt, Point, SelectionGoal};
|
||||
|
@ -26,11 +27,11 @@ use std::{
|
|||
path::{Path, PathBuf},
|
||||
};
|
||||
use text::Selection;
|
||||
use util::TryFutureExt;
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use workspace::{
|
||||
item::{FollowableItem, Item, ItemEvent, ItemHandle, ProjectItem},
|
||||
searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle},
|
||||
FollowableItem, Item, ItemEvent, ItemHandle, ItemNavHistory, ProjectItem, StatusItemView,
|
||||
ToolbarItemLocation,
|
||||
ItemId, ItemNavHistory, Pane, StatusItemView, ToolbarItemLocation, Workspace, WorkspaceId,
|
||||
};
|
||||
|
||||
pub const MAX_TAB_TITLE_LEN: usize = 24;
|
||||
|
@ -135,10 +136,13 @@ impl FollowableItem for Editor {
|
|||
if !selections.is_empty() {
|
||||
editor.set_selections_from_remote(selections, cx);
|
||||
}
|
||||
|
||||
if let Some(scroll_top_anchor) = scroll_top_anchor {
|
||||
editor.set_scroll_top_anchor(
|
||||
scroll_top_anchor,
|
||||
vec2f(state.scroll_x, state.scroll_y),
|
||||
editor.set_scroll_anchor(
|
||||
ScrollAnchor {
|
||||
top_anchor: scroll_top_anchor,
|
||||
offset: vec2f(state.scroll_x, state.scroll_y),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
@ -177,6 +181,7 @@ impl FollowableItem for Editor {
|
|||
|
||||
fn to_state_proto(&self, cx: &AppContext) -> Option<proto::view::Variant> {
|
||||
let buffer = self.buffer.read(cx);
|
||||
let scroll_anchor = self.scroll_manager.anchor();
|
||||
let excerpts = buffer
|
||||
.read(cx)
|
||||
.excerpts()
|
||||
|
@ -200,9 +205,9 @@ impl FollowableItem for Editor {
|
|||
singleton: buffer.is_singleton(),
|
||||
title: (!buffer.is_singleton()).then(|| buffer.title(cx).into()),
|
||||
excerpts,
|
||||
scroll_top_anchor: Some(serialize_anchor(&self.scroll_top_anchor)),
|
||||
scroll_x: self.scroll_position.x(),
|
||||
scroll_y: self.scroll_position.y(),
|
||||
scroll_top_anchor: Some(serialize_anchor(&scroll_anchor.top_anchor)),
|
||||
scroll_x: scroll_anchor.offset.x(),
|
||||
scroll_y: scroll_anchor.offset.y(),
|
||||
selections: self
|
||||
.selections
|
||||
.disjoint_anchors()
|
||||
|
@ -251,9 +256,10 @@ impl FollowableItem for Editor {
|
|||
true
|
||||
}
|
||||
Event::ScrollPositionChanged { .. } => {
|
||||
update.scroll_top_anchor = Some(serialize_anchor(&self.scroll_top_anchor));
|
||||
update.scroll_x = self.scroll_position.x();
|
||||
update.scroll_y = self.scroll_position.y();
|
||||
let scroll_anchor = self.scroll_manager.anchor();
|
||||
update.scroll_top_anchor = Some(serialize_anchor(&scroll_anchor.top_anchor));
|
||||
update.scroll_x = scroll_anchor.offset.x();
|
||||
update.scroll_y = scroll_anchor.offset.y();
|
||||
true
|
||||
}
|
||||
Event::SelectionsChanged { .. } => {
|
||||
|
@ -357,7 +363,7 @@ impl FollowableItem for Editor {
|
|||
this.set_selections_from_remote(selections, cx);
|
||||
this.request_autoscroll_remotely(Autoscroll::newest(), cx);
|
||||
} else if let Some(anchor) = scroll_top_anchor {
|
||||
this.set_scroll_top_anchor(anchor, vec2f(message.scroll_x, message.scroll_y), cx);
|
||||
this.set_scroll_anchor(ScrollAnchor {top_anchor: anchor, offset: vec2f(message.scroll_x, message.scroll_y) }, cx);
|
||||
}
|
||||
});
|
||||
Ok(())
|
||||
|
@ -461,13 +467,12 @@ impl Item for Editor {
|
|||
buffer.clip_point(data.cursor_position, Bias::Left)
|
||||
};
|
||||
|
||||
let scroll_top_anchor = if buffer.can_resolve(&data.scroll_top_anchor) {
|
||||
data.scroll_top_anchor
|
||||
} else {
|
||||
buffer.anchor_before(
|
||||
let mut scroll_anchor = data.scroll_anchor;
|
||||
if !buffer.can_resolve(&scroll_anchor.top_anchor) {
|
||||
scroll_anchor.top_anchor = buffer.anchor_before(
|
||||
buffer.clip_point(Point::new(data.scroll_top_row, 0), Bias::Left),
|
||||
)
|
||||
};
|
||||
);
|
||||
}
|
||||
|
||||
drop(buffer);
|
||||
|
||||
|
@ -475,8 +480,7 @@ impl Item for Editor {
|
|||
false
|
||||
} else {
|
||||
let nav_history = self.nav_history.take();
|
||||
self.scroll_position = data.scroll_position;
|
||||
self.scroll_top_anchor = scroll_top_anchor;
|
||||
self.set_scroll_anchor(scroll_anchor, cx);
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select_ranges([offset..offset])
|
||||
});
|
||||
|
@ -550,7 +554,7 @@ impl Item for Editor {
|
|||
self.buffer.read(cx).is_singleton()
|
||||
}
|
||||
|
||||
fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self>
|
||||
fn clone_on_split(&self, _workspace_id: WorkspaceId, cx: &mut ViewContext<Self>) -> Option<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
|
@ -673,7 +677,7 @@ impl Item for Editor {
|
|||
Task::ready(Ok(()))
|
||||
}
|
||||
|
||||
fn to_item_events(event: &Self::Event) -> Vec<workspace::ItemEvent> {
|
||||
fn to_item_events(event: &Self::Event) -> Vec<ItemEvent> {
|
||||
let mut result = Vec::new();
|
||||
match event {
|
||||
Event::Closed => result.push(ItemEvent::CloseItem),
|
||||
|
@ -735,6 +739,87 @@ impl Item for Editor {
|
|||
}));
|
||||
Some(breadcrumbs)
|
||||
}
|
||||
|
||||
fn added_to_workspace(&mut self, workspace: &mut Workspace, cx: &mut ViewContext<Self>) {
|
||||
let workspace_id = workspace.database_id();
|
||||
let item_id = cx.view_id();
|
||||
|
||||
fn serialize(
|
||||
buffer: ModelHandle<Buffer>,
|
||||
workspace_id: WorkspaceId,
|
||||
item_id: ItemId,
|
||||
cx: &mut MutableAppContext,
|
||||
) {
|
||||
if let Some(file) = buffer.read(cx).file().and_then(|file| file.as_local()) {
|
||||
let path = file.abs_path(cx);
|
||||
|
||||
cx.background()
|
||||
.spawn(async move {
|
||||
DB.save_path(item_id, workspace_id, path.clone())
|
||||
.await
|
||||
.log_err()
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(buffer) = self.buffer().read(cx).as_singleton() {
|
||||
serialize(buffer.clone(), workspace_id, item_id, cx);
|
||||
|
||||
cx.subscribe(&buffer, |this, buffer, event, cx| {
|
||||
if let Some(workspace_id) = this.workspace_id {
|
||||
if let language::Event::FileHandleChanged = event {
|
||||
serialize(buffer, workspace_id, cx.view_id(), cx);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
Some("Editor")
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
project: ModelHandle<Project>,
|
||||
_workspace: WeakViewHandle<Workspace>,
|
||||
workspace_id: workspace::WorkspaceId,
|
||||
item_id: ItemId,
|
||||
cx: &mut ViewContext<Pane>,
|
||||
) -> Task<Result<ViewHandle<Self>>> {
|
||||
let project_item: Result<_> = project.update(cx, |project, cx| {
|
||||
// Look up the path with this key associated, create a self with that path
|
||||
let path = DB
|
||||
.get_path(item_id, workspace_id)?
|
||||
.context("No path stored for this editor")?;
|
||||
|
||||
let (worktree, path) = project
|
||||
.find_local_worktree(&path, cx)
|
||||
.with_context(|| format!("No worktree for path: {path:?}"))?;
|
||||
let project_path = ProjectPath {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: path.into(),
|
||||
};
|
||||
|
||||
Ok(project.open_path(project_path, cx))
|
||||
});
|
||||
|
||||
project_item
|
||||
.map(|project_item| {
|
||||
cx.spawn(|pane, mut cx| async move {
|
||||
let (_, project_item) = project_item.await?;
|
||||
let buffer = project_item
|
||||
.downcast::<Buffer>()
|
||||
.context("Project item at stored path was not a buffer")?;
|
||||
|
||||
Ok(cx.update(|cx| {
|
||||
cx.add_view(pane, |cx| Editor::for_buffer(buffer, Some(project), cx))
|
||||
}))
|
||||
})
|
||||
})
|
||||
.unwrap_or_else(|error| Task::ready(Err(error)))
|
||||
}
|
||||
}
|
||||
|
||||
impl ProjectItem for Editor {
|
||||
|
|
36
crates/editor/src/persistence.rs
Normal file
36
crates/editor/src/persistence.rs
Normal file
|
@ -0,0 +1,36 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use db::sqlez_macros::sql;
|
||||
use db::{define_connection, query};
|
||||
use workspace::{ItemId, WorkspaceDb, WorkspaceId};
|
||||
|
||||
define_connection!(
|
||||
pub static ref DB: EditorDb<WorkspaceDb> =
|
||||
&[sql! (
|
||||
CREATE TABLE editors(
|
||||
item_id INTEGER NOT NULL,
|
||||
workspace_id INTEGER NOT NULL,
|
||||
path BLOB NOT NULL,
|
||||
PRIMARY KEY(item_id, workspace_id),
|
||||
FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
|
||||
ON DELETE CASCADE
|
||||
ON UPDATE CASCADE
|
||||
) STRICT;
|
||||
)];
|
||||
);
|
||||
|
||||
impl EditorDb {
|
||||
query! {
|
||||
pub fn get_path(item_id: ItemId, workspace_id: WorkspaceId) -> Result<Option<PathBuf>> {
|
||||
SELECT path FROM editors
|
||||
WHERE item_id = ? AND workspace_id = ?
|
||||
}
|
||||
}
|
||||
|
||||
query! {
|
||||
pub async fn save_path(item_id: ItemId, workspace_id: WorkspaceId, path: PathBuf) -> Result<()> {
|
||||
INSERT OR REPLACE INTO editors(item_id, workspace_id, path)
|
||||
VALUES (?, ?, ?)
|
||||
}
|
||||
}
|
||||
}
|
348
crates/editor/src/scroll.rs
Normal file
348
crates/editor/src/scroll.rs
Normal file
|
@ -0,0 +1,348 @@
|
|||
pub mod actions;
|
||||
pub mod autoscroll;
|
||||
pub mod scroll_amount;
|
||||
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use gpui::{
|
||||
geometry::vector::{vec2f, Vector2F},
|
||||
Axis, MutableAppContext, Task, ViewContext,
|
||||
};
|
||||
use language::Bias;
|
||||
|
||||
use crate::{
|
||||
display_map::{DisplaySnapshot, ToDisplayPoint},
|
||||
hover_popover::hide_hover,
|
||||
Anchor, DisplayPoint, Editor, EditorMode, Event, MultiBufferSnapshot, ToPoint,
|
||||
};
|
||||
|
||||
use self::{
|
||||
autoscroll::{Autoscroll, AutoscrollStrategy},
|
||||
scroll_amount::ScrollAmount,
|
||||
};
|
||||
|
||||
pub const SCROLL_EVENT_SEPARATION: Duration = Duration::from_millis(28);
|
||||
const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1);
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ScrollbarAutoHide(pub bool);
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
pub struct ScrollAnchor {
|
||||
pub offset: Vector2F,
|
||||
pub top_anchor: Anchor,
|
||||
}
|
||||
|
||||
impl ScrollAnchor {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
offset: Vector2F::zero(),
|
||||
top_anchor: Anchor::min(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> Vector2F {
|
||||
let mut scroll_position = self.offset;
|
||||
if self.top_anchor != Anchor::min() {
|
||||
let scroll_top = self.top_anchor.to_display_point(snapshot).row() as f32;
|
||||
scroll_position.set_y(scroll_top + scroll_position.y());
|
||||
} else {
|
||||
scroll_position.set_y(0.);
|
||||
}
|
||||
scroll_position
|
||||
}
|
||||
|
||||
pub fn top_row(&self, buffer: &MultiBufferSnapshot) -> u32 {
|
||||
self.top_anchor.to_point(buffer).row
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct OngoingScroll {
|
||||
last_event: Instant,
|
||||
axis: Option<Axis>,
|
||||
}
|
||||
|
||||
impl OngoingScroll {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
last_event: Instant::now() - SCROLL_EVENT_SEPARATION,
|
||||
axis: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn filter(&self, delta: &mut Vector2F) -> Option<Axis> {
|
||||
const UNLOCK_PERCENT: f32 = 1.9;
|
||||
const UNLOCK_LOWER_BOUND: f32 = 6.;
|
||||
let mut axis = self.axis;
|
||||
|
||||
let x = delta.x().abs();
|
||||
let y = delta.y().abs();
|
||||
let duration = Instant::now().duration_since(self.last_event);
|
||||
if duration > SCROLL_EVENT_SEPARATION {
|
||||
//New ongoing scroll will start, determine axis
|
||||
axis = if x <= y {
|
||||
Some(Axis::Vertical)
|
||||
} else {
|
||||
Some(Axis::Horizontal)
|
||||
};
|
||||
} else if x.max(y) >= UNLOCK_LOWER_BOUND {
|
||||
//Check if the current ongoing will need to unlock
|
||||
match axis {
|
||||
Some(Axis::Vertical) => {
|
||||
if x > y && x >= y * UNLOCK_PERCENT {
|
||||
axis = None;
|
||||
}
|
||||
}
|
||||
|
||||
Some(Axis::Horizontal) => {
|
||||
if y > x && y >= x * UNLOCK_PERCENT {
|
||||
axis = None;
|
||||
}
|
||||
}
|
||||
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
|
||||
match axis {
|
||||
Some(Axis::Vertical) => *delta = vec2f(0., delta.y()),
|
||||
Some(Axis::Horizontal) => *delta = vec2f(delta.x(), 0.),
|
||||
None => {}
|
||||
}
|
||||
|
||||
axis
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ScrollManager {
|
||||
vertical_scroll_margin: f32,
|
||||
anchor: ScrollAnchor,
|
||||
ongoing: OngoingScroll,
|
||||
autoscroll_request: Option<(Autoscroll, bool)>,
|
||||
last_autoscroll: Option<(Vector2F, f32, f32, AutoscrollStrategy)>,
|
||||
show_scrollbars: bool,
|
||||
hide_scrollbar_task: Option<Task<()>>,
|
||||
visible_line_count: Option<f32>,
|
||||
}
|
||||
|
||||
impl ScrollManager {
|
||||
pub fn new() -> Self {
|
||||
ScrollManager {
|
||||
vertical_scroll_margin: 3.0,
|
||||
anchor: ScrollAnchor::new(),
|
||||
ongoing: OngoingScroll::new(),
|
||||
autoscroll_request: None,
|
||||
show_scrollbars: true,
|
||||
hide_scrollbar_task: None,
|
||||
last_autoscroll: None,
|
||||
visible_line_count: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clone_state(&mut self, other: &Self) {
|
||||
self.anchor = other.anchor;
|
||||
self.ongoing = other.ongoing;
|
||||
}
|
||||
|
||||
pub fn anchor(&self) -> ScrollAnchor {
|
||||
self.anchor
|
||||
}
|
||||
|
||||
pub fn ongoing_scroll(&self) -> OngoingScroll {
|
||||
self.ongoing
|
||||
}
|
||||
|
||||
pub fn update_ongoing_scroll(&mut self, axis: Option<Axis>) {
|
||||
self.ongoing.last_event = Instant::now();
|
||||
self.ongoing.axis = axis;
|
||||
}
|
||||
|
||||
pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> Vector2F {
|
||||
self.anchor.scroll_position(snapshot)
|
||||
}
|
||||
|
||||
fn set_scroll_position(
|
||||
&mut self,
|
||||
scroll_position: Vector2F,
|
||||
map: &DisplaySnapshot,
|
||||
local: bool,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let new_anchor = if scroll_position.y() <= 0. {
|
||||
ScrollAnchor {
|
||||
top_anchor: Anchor::min(),
|
||||
offset: scroll_position.max(vec2f(0., 0.)),
|
||||
}
|
||||
} else {
|
||||
let scroll_top_buffer_offset =
|
||||
DisplayPoint::new(scroll_position.y() as u32, 0).to_offset(&map, Bias::Right);
|
||||
let top_anchor = map
|
||||
.buffer_snapshot
|
||||
.anchor_at(scroll_top_buffer_offset, Bias::Right);
|
||||
|
||||
ScrollAnchor {
|
||||
top_anchor,
|
||||
offset: vec2f(
|
||||
scroll_position.x(),
|
||||
scroll_position.y() - top_anchor.to_display_point(&map).row() as f32,
|
||||
),
|
||||
}
|
||||
};
|
||||
|
||||
self.set_anchor(new_anchor, local, cx);
|
||||
}
|
||||
|
||||
fn set_anchor(&mut self, anchor: ScrollAnchor, local: bool, cx: &mut ViewContext<Editor>) {
|
||||
self.anchor = anchor;
|
||||
cx.emit(Event::ScrollPositionChanged { local });
|
||||
self.show_scrollbar(cx);
|
||||
self.autoscroll_request.take();
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn show_scrollbar(&mut self, cx: &mut ViewContext<Editor>) {
|
||||
if !self.show_scrollbars {
|
||||
self.show_scrollbars = true;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
if cx.default_global::<ScrollbarAutoHide>().0 {
|
||||
self.hide_scrollbar_task = Some(cx.spawn_weak(|editor, mut cx| async move {
|
||||
cx.background().timer(SCROLLBAR_SHOW_INTERVAL).await;
|
||||
if let Some(editor) = editor.upgrade(&cx) {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
editor.scroll_manager.show_scrollbars = false;
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}));
|
||||
} else {
|
||||
self.hide_scrollbar_task = None;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scrollbars_visible(&self) -> bool {
|
||||
self.show_scrollbars
|
||||
}
|
||||
|
||||
pub fn has_autoscroll_request(&self) -> bool {
|
||||
self.autoscroll_request.is_some()
|
||||
}
|
||||
|
||||
pub fn clamp_scroll_left(&mut self, max: f32) -> bool {
|
||||
if max < self.anchor.offset.x() {
|
||||
self.anchor.offset.set_x(max);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Editor {
|
||||
pub fn vertical_scroll_margin(&mut self) -> usize {
|
||||
self.scroll_manager.vertical_scroll_margin as usize
|
||||
}
|
||||
|
||||
pub fn set_vertical_scroll_margin(&mut self, margin_rows: usize, cx: &mut ViewContext<Self>) {
|
||||
self.scroll_manager.vertical_scroll_margin = margin_rows as f32;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn visible_line_count(&self) -> Option<f32> {
|
||||
self.scroll_manager.visible_line_count
|
||||
}
|
||||
|
||||
pub(crate) fn set_visible_line_count(&mut self, lines: f32) {
|
||||
self.scroll_manager.visible_line_count = Some(lines)
|
||||
}
|
||||
|
||||
pub fn set_scroll_position(&mut self, scroll_position: Vector2F, cx: &mut ViewContext<Self>) {
|
||||
self.set_scroll_position_internal(scroll_position, true, cx);
|
||||
}
|
||||
|
||||
pub(crate) fn set_scroll_position_internal(
|
||||
&mut self,
|
||||
scroll_position: Vector2F,
|
||||
local: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
hide_hover(self, cx);
|
||||
self.scroll_manager
|
||||
.set_scroll_position(scroll_position, &map, local, cx);
|
||||
}
|
||||
|
||||
pub fn scroll_position(&self, cx: &mut ViewContext<Self>) -> Vector2F {
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
self.scroll_manager.anchor.scroll_position(&display_map)
|
||||
}
|
||||
|
||||
pub fn set_scroll_anchor(&mut self, scroll_anchor: ScrollAnchor, cx: &mut ViewContext<Self>) {
|
||||
self.set_scroll_anchor_internal(scroll_anchor, true, cx);
|
||||
}
|
||||
|
||||
pub(crate) fn set_scroll_anchor_internal(
|
||||
&mut self,
|
||||
scroll_anchor: ScrollAnchor,
|
||||
local: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
hide_hover(self, cx);
|
||||
self.scroll_manager.set_anchor(scroll_anchor, local, cx);
|
||||
}
|
||||
|
||||
pub fn scroll_screen(&mut self, amount: &ScrollAmount, cx: &mut ViewContext<Self>) {
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
cx.propagate_action();
|
||||
return;
|
||||
}
|
||||
|
||||
if self.take_rename(true, cx).is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
if amount.move_context_menu_selection(self, cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
let cur_position = self.scroll_position(cx);
|
||||
let new_pos = cur_position + vec2f(0., amount.lines(self) - 1.);
|
||||
self.set_scroll_position(new_pos, cx);
|
||||
}
|
||||
|
||||
/// Returns an ordering. The newest selection is:
|
||||
/// Ordering::Equal => on screen
|
||||
/// Ordering::Less => above the screen
|
||||
/// Ordering::Greater => below the screen
|
||||
pub fn newest_selection_on_screen(&self, cx: &mut MutableAppContext) -> Ordering {
|
||||
let snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let newest_head = self
|
||||
.selections
|
||||
.newest_anchor()
|
||||
.head()
|
||||
.to_display_point(&snapshot);
|
||||
let screen_top = self
|
||||
.scroll_manager
|
||||
.anchor
|
||||
.top_anchor
|
||||
.to_display_point(&snapshot);
|
||||
|
||||
if screen_top > newest_head {
|
||||
return Ordering::Less;
|
||||
}
|
||||
|
||||
if let Some(visible_lines) = self.visible_line_count() {
|
||||
if newest_head.row() < screen_top.row() + visible_lines as u32 {
|
||||
return Ordering::Equal;
|
||||
}
|
||||
}
|
||||
|
||||
Ordering::Greater
|
||||
}
|
||||
}
|
159
crates/editor/src/scroll/actions.rs
Normal file
159
crates/editor/src/scroll/actions.rs
Normal file
|
@ -0,0 +1,159 @@
|
|||
use gpui::{
|
||||
actions, geometry::vector::Vector2F, impl_internal_actions, Axis, MutableAppContext,
|
||||
ViewContext,
|
||||
};
|
||||
use language::Bias;
|
||||
|
||||
use crate::{Editor, EditorMode};
|
||||
|
||||
use super::{autoscroll::Autoscroll, scroll_amount::ScrollAmount, ScrollAnchor};
|
||||
|
||||
actions!(
|
||||
editor,
|
||||
[
|
||||
LineDown,
|
||||
LineUp,
|
||||
HalfPageDown,
|
||||
HalfPageUp,
|
||||
PageDown,
|
||||
PageUp,
|
||||
NextScreen,
|
||||
ScrollCursorTop,
|
||||
ScrollCursorCenter,
|
||||
ScrollCursorBottom,
|
||||
]
|
||||
);
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct Scroll {
|
||||
pub scroll_position: Vector2F,
|
||||
pub axis: Option<Axis>,
|
||||
}
|
||||
|
||||
impl_internal_actions!(editor, [Scroll]);
|
||||
|
||||
pub fn init(cx: &mut MutableAppContext) {
|
||||
cx.add_action(Editor::next_screen);
|
||||
cx.add_action(Editor::scroll);
|
||||
cx.add_action(Editor::scroll_cursor_top);
|
||||
cx.add_action(Editor::scroll_cursor_center);
|
||||
cx.add_action(Editor::scroll_cursor_bottom);
|
||||
cx.add_action(|this: &mut Editor, _: &LineDown, cx| {
|
||||
this.scroll_screen(&ScrollAmount::LineDown, cx)
|
||||
});
|
||||
cx.add_action(|this: &mut Editor, _: &LineUp, cx| {
|
||||
this.scroll_screen(&ScrollAmount::LineUp, cx)
|
||||
});
|
||||
cx.add_action(|this: &mut Editor, _: &HalfPageDown, cx| {
|
||||
this.scroll_screen(&ScrollAmount::HalfPageDown, cx)
|
||||
});
|
||||
cx.add_action(|this: &mut Editor, _: &HalfPageUp, cx| {
|
||||
this.scroll_screen(&ScrollAmount::HalfPageUp, cx)
|
||||
});
|
||||
cx.add_action(|this: &mut Editor, _: &PageDown, cx| {
|
||||
this.scroll_screen(&ScrollAmount::PageDown, cx)
|
||||
});
|
||||
cx.add_action(|this: &mut Editor, _: &PageUp, cx| {
|
||||
this.scroll_screen(&ScrollAmount::PageUp, cx)
|
||||
});
|
||||
}
|
||||
|
||||
impl Editor {
|
||||
pub fn next_screen(&mut self, _: &NextScreen, cx: &mut ViewContext<Editor>) -> Option<()> {
|
||||
if self.take_rename(true, cx).is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
self.context_menu.as_mut()?;
|
||||
|
||||
if matches!(self.mode, EditorMode::SingleLine) {
|
||||
cx.propagate_action();
|
||||
return None;
|
||||
}
|
||||
|
||||
self.request_autoscroll(Autoscroll::Next, cx);
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn scroll(&mut self, action: &Scroll, cx: &mut ViewContext<Self>) {
|
||||
self.scroll_manager.update_ongoing_scroll(action.axis);
|
||||
self.set_scroll_position(action.scroll_position, cx);
|
||||
}
|
||||
|
||||
fn scroll_cursor_top(editor: &mut Editor, _: &ScrollCursorTop, cx: &mut ViewContext<Editor>) {
|
||||
let snapshot = editor.snapshot(cx).display_snapshot;
|
||||
let scroll_margin_rows = editor.vertical_scroll_margin() as u32;
|
||||
|
||||
let mut new_screen_top = editor.selections.newest_display(cx).head();
|
||||
*new_screen_top.row_mut() = new_screen_top.row().saturating_sub(scroll_margin_rows);
|
||||
*new_screen_top.column_mut() = 0;
|
||||
let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left);
|
||||
let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top);
|
||||
|
||||
editor.set_scroll_anchor(
|
||||
ScrollAnchor {
|
||||
top_anchor: new_anchor,
|
||||
offset: Default::default(),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
fn scroll_cursor_center(
|
||||
editor: &mut Editor,
|
||||
_: &ScrollCursorCenter,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let snapshot = editor.snapshot(cx).display_snapshot;
|
||||
let visible_rows = if let Some(visible_rows) = editor.visible_line_count() {
|
||||
visible_rows as u32
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
let mut new_screen_top = editor.selections.newest_display(cx).head();
|
||||
*new_screen_top.row_mut() = new_screen_top.row().saturating_sub(visible_rows / 2);
|
||||
*new_screen_top.column_mut() = 0;
|
||||
let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left);
|
||||
let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top);
|
||||
|
||||
editor.set_scroll_anchor(
|
||||
ScrollAnchor {
|
||||
top_anchor: new_anchor,
|
||||
offset: Default::default(),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
fn scroll_cursor_bottom(
|
||||
editor: &mut Editor,
|
||||
_: &ScrollCursorBottom,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let snapshot = editor.snapshot(cx).display_snapshot;
|
||||
let scroll_margin_rows = editor.vertical_scroll_margin() as u32;
|
||||
let visible_rows = if let Some(visible_rows) = editor.visible_line_count() {
|
||||
visible_rows as u32
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
let mut new_screen_top = editor.selections.newest_display(cx).head();
|
||||
*new_screen_top.row_mut() = new_screen_top
|
||||
.row()
|
||||
.saturating_sub(visible_rows.saturating_sub(scroll_margin_rows));
|
||||
*new_screen_top.column_mut() = 0;
|
||||
let new_screen_top = new_screen_top.to_offset(&snapshot, Bias::Left);
|
||||
let new_anchor = snapshot.buffer_snapshot.anchor_before(new_screen_top);
|
||||
|
||||
editor.set_scroll_anchor(
|
||||
ScrollAnchor {
|
||||
top_anchor: new_anchor,
|
||||
offset: Default::default(),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
}
|
||||
}
|
246
crates/editor/src/scroll/autoscroll.rs
Normal file
246
crates/editor/src/scroll/autoscroll.rs
Normal file
|
@ -0,0 +1,246 @@
|
|||
use std::cmp;
|
||||
|
||||
use gpui::{text_layout, ViewContext};
|
||||
use language::Point;
|
||||
|
||||
use crate::{display_map::ToDisplayPoint, Editor, EditorMode};
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub enum Autoscroll {
|
||||
Next,
|
||||
Strategy(AutoscrollStrategy),
|
||||
}
|
||||
|
||||
impl Autoscroll {
|
||||
pub fn fit() -> Self {
|
||||
Self::Strategy(AutoscrollStrategy::Fit)
|
||||
}
|
||||
|
||||
pub fn newest() -> Self {
|
||||
Self::Strategy(AutoscrollStrategy::Newest)
|
||||
}
|
||||
|
||||
pub fn center() -> Self {
|
||||
Self::Strategy(AutoscrollStrategy::Center)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Default)]
|
||||
pub enum AutoscrollStrategy {
|
||||
Fit,
|
||||
Newest,
|
||||
#[default]
|
||||
Center,
|
||||
Top,
|
||||
Bottom,
|
||||
}
|
||||
|
||||
impl AutoscrollStrategy {
|
||||
fn next(&self) -> Self {
|
||||
match self {
|
||||
AutoscrollStrategy::Center => AutoscrollStrategy::Top,
|
||||
AutoscrollStrategy::Top => AutoscrollStrategy::Bottom,
|
||||
_ => AutoscrollStrategy::Center,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Editor {
|
||||
pub fn autoscroll_vertically(
|
||||
&mut self,
|
||||
viewport_height: f32,
|
||||
line_height: f32,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) -> bool {
|
||||
let visible_lines = viewport_height / line_height;
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let mut scroll_position = self.scroll_manager.scroll_position(&display_map);
|
||||
let max_scroll_top = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
|
||||
(display_map.max_point().row() as f32 - visible_lines + 1.).max(0.)
|
||||
} else {
|
||||
display_map.max_point().row() as f32
|
||||
};
|
||||
if scroll_position.y() > max_scroll_top {
|
||||
scroll_position.set_y(max_scroll_top);
|
||||
self.set_scroll_position(scroll_position, cx);
|
||||
}
|
||||
|
||||
let (autoscroll, local) =
|
||||
if let Some(autoscroll) = self.scroll_manager.autoscroll_request.take() {
|
||||
autoscroll
|
||||
} else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let first_cursor_top;
|
||||
let last_cursor_bottom;
|
||||
if let Some(highlighted_rows) = &self.highlighted_rows {
|
||||
first_cursor_top = highlighted_rows.start as f32;
|
||||
last_cursor_bottom = first_cursor_top + 1.;
|
||||
} else if autoscroll == Autoscroll::newest() {
|
||||
let newest_selection = self.selections.newest::<Point>(cx);
|
||||
first_cursor_top = newest_selection.head().to_display_point(&display_map).row() as f32;
|
||||
last_cursor_bottom = first_cursor_top + 1.;
|
||||
} else {
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
first_cursor_top = selections
|
||||
.first()
|
||||
.unwrap()
|
||||
.head()
|
||||
.to_display_point(&display_map)
|
||||
.row() as f32;
|
||||
last_cursor_bottom = selections
|
||||
.last()
|
||||
.unwrap()
|
||||
.head()
|
||||
.to_display_point(&display_map)
|
||||
.row() as f32
|
||||
+ 1.0;
|
||||
}
|
||||
|
||||
let margin = if matches!(self.mode, EditorMode::AutoHeight { .. }) {
|
||||
0.
|
||||
} else {
|
||||
((visible_lines - (last_cursor_bottom - first_cursor_top)) / 2.0).floor()
|
||||
};
|
||||
if margin < 0.0 {
|
||||
return false;
|
||||
}
|
||||
|
||||
let strategy = match autoscroll {
|
||||
Autoscroll::Strategy(strategy) => strategy,
|
||||
Autoscroll::Next => {
|
||||
let last_autoscroll = &self.scroll_manager.last_autoscroll;
|
||||
if let Some(last_autoscroll) = last_autoscroll {
|
||||
if self.scroll_manager.anchor.offset == last_autoscroll.0
|
||||
&& first_cursor_top == last_autoscroll.1
|
||||
&& last_cursor_bottom == last_autoscroll.2
|
||||
{
|
||||
last_autoscroll.3.next()
|
||||
} else {
|
||||
AutoscrollStrategy::default()
|
||||
}
|
||||
} else {
|
||||
AutoscrollStrategy::default()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match strategy {
|
||||
AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => {
|
||||
let margin = margin.min(self.scroll_manager.vertical_scroll_margin);
|
||||
let target_top = (first_cursor_top - margin).max(0.0);
|
||||
let target_bottom = last_cursor_bottom + margin;
|
||||
let start_row = scroll_position.y();
|
||||
let end_row = start_row + visible_lines;
|
||||
|
||||
if target_top < start_row {
|
||||
scroll_position.set_y(target_top);
|
||||
self.set_scroll_position_internal(scroll_position, local, cx);
|
||||
} else if target_bottom >= end_row {
|
||||
scroll_position.set_y(target_bottom - visible_lines);
|
||||
self.set_scroll_position_internal(scroll_position, local, cx);
|
||||
}
|
||||
}
|
||||
AutoscrollStrategy::Center => {
|
||||
scroll_position.set_y((first_cursor_top - margin).max(0.0));
|
||||
self.set_scroll_position_internal(scroll_position, local, cx);
|
||||
}
|
||||
AutoscrollStrategy::Top => {
|
||||
scroll_position.set_y((first_cursor_top).max(0.0));
|
||||
self.set_scroll_position_internal(scroll_position, local, cx);
|
||||
}
|
||||
AutoscrollStrategy::Bottom => {
|
||||
scroll_position.set_y((last_cursor_bottom - visible_lines).max(0.0));
|
||||
self.set_scroll_position_internal(scroll_position, local, cx);
|
||||
}
|
||||
}
|
||||
|
||||
self.scroll_manager.last_autoscroll = Some((
|
||||
self.scroll_manager.anchor.offset,
|
||||
first_cursor_top,
|
||||
last_cursor_bottom,
|
||||
strategy,
|
||||
));
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub fn autoscroll_horizontally(
|
||||
&mut self,
|
||||
start_row: u32,
|
||||
viewport_width: f32,
|
||||
scroll_width: f32,
|
||||
max_glyph_width: f32,
|
||||
layouts: &[text_layout::Line],
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> bool {
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
|
||||
let mut target_left;
|
||||
let mut target_right;
|
||||
|
||||
if self.highlighted_rows.is_some() {
|
||||
target_left = 0.0_f32;
|
||||
target_right = 0.0_f32;
|
||||
} else {
|
||||
target_left = std::f32::INFINITY;
|
||||
target_right = 0.0_f32;
|
||||
for selection in selections {
|
||||
let head = selection.head().to_display_point(&display_map);
|
||||
if head.row() >= start_row && head.row() < start_row + layouts.len() as u32 {
|
||||
let start_column = head.column().saturating_sub(3);
|
||||
let end_column = cmp::min(display_map.line_len(head.row()), head.column() + 3);
|
||||
target_left = target_left.min(
|
||||
layouts[(head.row() - start_row) as usize]
|
||||
.x_for_index(start_column as usize),
|
||||
);
|
||||
target_right = target_right.max(
|
||||
layouts[(head.row() - start_row) as usize].x_for_index(end_column as usize)
|
||||
+ max_glyph_width,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
target_right = target_right.min(scroll_width);
|
||||
|
||||
if target_right - target_left > viewport_width {
|
||||
return false;
|
||||
}
|
||||
|
||||
let scroll_left = self.scroll_manager.anchor.offset.x() * max_glyph_width;
|
||||
let scroll_right = scroll_left + viewport_width;
|
||||
|
||||
if target_left < scroll_left {
|
||||
self.scroll_manager
|
||||
.anchor
|
||||
.offset
|
||||
.set_x(target_left / max_glyph_width);
|
||||
true
|
||||
} else if target_right > scroll_right {
|
||||
self.scroll_manager
|
||||
.anchor
|
||||
.offset
|
||||
.set_x((target_right - viewport_width) / max_glyph_width);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn request_autoscroll(&mut self, autoscroll: Autoscroll, cx: &mut ViewContext<Self>) {
|
||||
self.scroll_manager.autoscroll_request = Some((autoscroll, true));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub(crate) fn request_autoscroll_remotely(
|
||||
&mut self,
|
||||
autoscroll: Autoscroll,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.scroll_manager.autoscroll_request = Some((autoscroll, false));
|
||||
cx.notify();
|
||||
}
|
||||
}
|
48
crates/editor/src/scroll/scroll_amount.rs
Normal file
48
crates/editor/src/scroll/scroll_amount.rs
Normal file
|
@ -0,0 +1,48 @@
|
|||
use gpui::ViewContext;
|
||||
use serde::Deserialize;
|
||||
use util::iife;
|
||||
|
||||
use crate::Editor;
|
||||
|
||||
#[derive(Clone, PartialEq, Deserialize)]
|
||||
pub enum ScrollAmount {
|
||||
LineUp,
|
||||
LineDown,
|
||||
HalfPageUp,
|
||||
HalfPageDown,
|
||||
PageUp,
|
||||
PageDown,
|
||||
}
|
||||
|
||||
impl ScrollAmount {
|
||||
pub fn move_context_menu_selection(
|
||||
&self,
|
||||
editor: &mut Editor,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) -> bool {
|
||||
iife!({
|
||||
let context_menu = editor.context_menu.as_mut()?;
|
||||
|
||||
match self {
|
||||
Self::LineDown | Self::HalfPageDown => context_menu.select_next(cx),
|
||||
Self::LineUp | Self::HalfPageUp => context_menu.select_prev(cx),
|
||||
Self::PageDown => context_menu.select_last(cx),
|
||||
Self::PageUp => context_menu.select_first(cx),
|
||||
}
|
||||
.then_some(())
|
||||
})
|
||||
.is_some()
|
||||
}
|
||||
|
||||
pub fn lines(&self, editor: &mut Editor) -> f32 {
|
||||
match self {
|
||||
Self::LineDown => 1.,
|
||||
Self::LineUp => -1.,
|
||||
Self::HalfPageDown => editor.visible_line_count().map(|l| l / 2.).unwrap_or(1.),
|
||||
Self::HalfPageUp => -editor.visible_line_count().map(|l| l / 2.).unwrap_or(1.),
|
||||
// Minus 1. here so that there is a pivot line that stays on the screen
|
||||
Self::PageDown => editor.visible_line_count().unwrap_or(1.) - 1.,
|
||||
Self::PageUp => -editor.visible_line_count().unwrap_or(1.) - 1.,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -61,7 +61,7 @@ impl SelectionsCollection {
|
|||
self.buffer.read(cx).read(cx)
|
||||
}
|
||||
|
||||
pub fn set_state(&mut self, other: &SelectionsCollection) {
|
||||
pub fn clone_state(&mut self, other: &SelectionsCollection) {
|
||||
self.next_selection_id = other.next_selection_id;
|
||||
self.line_mode = other.line_mode;
|
||||
self.disjoint = other.disjoint.clone();
|
||||
|
|
|
@ -63,8 +63,15 @@ impl<'a> EditorLspTestContext<'a> {
|
|||
.insert_tree("/root", json!({ "dir": { file_name: "" }}))
|
||||
.await;
|
||||
|
||||
let (window_id, workspace) =
|
||||
cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
|
||||
let (window_id, workspace) = cx.add_window(|cx| {
|
||||
Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project.clone(),
|
||||
|_, _| unimplemented!(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
project.find_or_create_local_worktree("/root", true, cx)
|
||||
|
|
|
@ -316,8 +316,9 @@ mod tests {
|
|||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
|
||||
let (window_id, workspace) =
|
||||
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
|
||||
let (window_id, workspace) = cx.add_window(|cx| {
|
||||
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
|
||||
});
|
||||
cx.dispatch_action(window_id, Toggle);
|
||||
|
||||
let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap());
|
||||
|
@ -371,8 +372,9 @@ mod tests {
|
|||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/dir".as_ref()], cx).await;
|
||||
let (_, workspace) =
|
||||
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
|
||||
let (_, workspace) = cx.add_window(|cx| {
|
||||
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
|
||||
});
|
||||
let (_, finder) =
|
||||
cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
|
||||
|
||||
|
@ -446,8 +448,9 @@ mod tests {
|
|||
cx,
|
||||
)
|
||||
.await;
|
||||
let (_, workspace) =
|
||||
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
|
||||
let (_, workspace) = cx.add_window(|cx| {
|
||||
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
|
||||
});
|
||||
let (_, finder) =
|
||||
cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
|
||||
finder
|
||||
|
@ -471,8 +474,9 @@ mod tests {
|
|||
cx,
|
||||
)
|
||||
.await;
|
||||
let (_, workspace) =
|
||||
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
|
||||
let (_, workspace) = cx.add_window(|cx| {
|
||||
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
|
||||
});
|
||||
let (_, finder) =
|
||||
cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
|
||||
|
||||
|
@ -524,8 +528,9 @@ mod tests {
|
|||
cx,
|
||||
)
|
||||
.await;
|
||||
let (_, workspace) =
|
||||
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
|
||||
let (_, workspace) = cx.add_window(|cx| {
|
||||
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
|
||||
});
|
||||
let (_, finder) =
|
||||
cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
|
||||
|
||||
|
@ -563,8 +568,9 @@ mod tests {
|
|||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
|
||||
let (_, workspace) =
|
||||
cx.add_window(|cx| Workspace::new(project, |_, _| unimplemented!(), cx));
|
||||
let (_, workspace) = cx.add_window(|cx| {
|
||||
Workspace::new(Default::default(), 0, project, |_, _| unimplemented!(), cx)
|
||||
});
|
||||
let (_, finder) =
|
||||
cx.add_window(|cx| FileFinder::new(workspace.read(cx).project().clone(), cx));
|
||||
finder
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use editor::{display_map::ToDisplayPoint, Autoscroll, DisplayPoint, Editor};
|
||||
use editor::{display_map::ToDisplayPoint, scroll::autoscroll::Autoscroll, DisplayPoint, Editor};
|
||||
use gpui::{
|
||||
actions, elements::*, geometry::vector::Vector2F, AnyViewHandle, Axis, Entity,
|
||||
MutableAppContext, RenderContext, View, ViewContext, ViewHandle,
|
||||
|
|
|
@ -17,6 +17,7 @@ collections = { path = "../collections" }
|
|||
gpui_macros = { path = "../gpui_macros" }
|
||||
util = { path = "../util" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
sqlez = { path = "../sqlez" }
|
||||
async-task = "4.0.3"
|
||||
backtrace = { version = "0.3", optional = true }
|
||||
ctor = "0.1"
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
#include "nan.h"
|
||||
#include "tree_sitter/parser.h"
|
||||
#include <node.h>
|
||||
#include "nan.h"
|
||||
|
||||
using namespace v8;
|
||||
|
||||
extern "C" TSLanguage * tree_sitter_context_predicate();
|
||||
extern "C" TSLanguage *tree_sitter_context_predicate();
|
||||
|
||||
namespace {
|
||||
|
||||
|
@ -16,13 +16,15 @@ void Init(Local<Object> exports, Local<Object> module) {
|
|||
tpl->InstanceTemplate()->SetInternalFieldCount(1);
|
||||
|
||||
Local<Function> constructor = Nan::GetFunction(tpl).ToLocalChecked();
|
||||
Local<Object> instance = constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
|
||||
Local<Object> instance =
|
||||
constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
|
||||
Nan::SetInternalFieldPointer(instance, 0, tree_sitter_context_predicate());
|
||||
|
||||
Nan::Set(instance, Nan::New("name").ToLocalChecked(), Nan::New("context_predicate").ToLocalChecked());
|
||||
Nan::Set(instance, Nan::New("name").ToLocalChecked(),
|
||||
Nan::New("context_predicate").ToLocalChecked());
|
||||
Nan::Set(module, Nan::New("exports").ToLocalChecked(), instance);
|
||||
}
|
||||
|
||||
NODE_MODULE(tree_sitter_context_predicate_binding, Init)
|
||||
|
||||
} // namespace
|
||||
} // namespace
|
||||
|
|
|
@ -594,6 +594,9 @@ type ReleaseObservationCallback = Box<dyn FnOnce(&dyn Any, &mut MutableAppContex
|
|||
type ActionObservationCallback = Box<dyn FnMut(TypeId, &mut MutableAppContext)>;
|
||||
type WindowActivationCallback = Box<dyn FnMut(bool, &mut MutableAppContext) -> bool>;
|
||||
type WindowFullscreenCallback = Box<dyn FnMut(bool, &mut MutableAppContext) -> bool>;
|
||||
type KeystrokeCallback = Box<
|
||||
dyn FnMut(&Keystroke, &MatchResult, Option<&Box<dyn Action>>, &mut MutableAppContext) -> bool,
|
||||
>;
|
||||
type DeserializeActionCallback = fn(json: &str) -> anyhow::Result<Box<dyn Action>>;
|
||||
type WindowShouldCloseSubscriptionCallback = Box<dyn FnMut(&mut MutableAppContext) -> bool>;
|
||||
|
||||
|
@ -619,6 +622,7 @@ pub struct MutableAppContext {
|
|||
observations: CallbackCollection<usize, ObservationCallback>,
|
||||
window_activation_observations: CallbackCollection<usize, WindowActivationCallback>,
|
||||
window_fullscreen_observations: CallbackCollection<usize, WindowFullscreenCallback>,
|
||||
keystroke_observations: CallbackCollection<usize, KeystrokeCallback>,
|
||||
|
||||
release_observations: Arc<Mutex<HashMap<usize, BTreeMap<usize, ReleaseObservationCallback>>>>,
|
||||
action_dispatch_observations: Arc<Mutex<BTreeMap<usize, ActionObservationCallback>>>,
|
||||
|
@ -678,6 +682,7 @@ impl MutableAppContext {
|
|||
global_observations: Default::default(),
|
||||
window_activation_observations: Default::default(),
|
||||
window_fullscreen_observations: Default::default(),
|
||||
keystroke_observations: Default::default(),
|
||||
action_dispatch_observations: Default::default(),
|
||||
presenters_and_platform_windows: Default::default(),
|
||||
foreground,
|
||||
|
@ -763,11 +768,11 @@ impl MutableAppContext {
|
|||
.with_context(|| format!("invalid data for action {}", name))
|
||||
}
|
||||
|
||||
pub fn add_action<A, V, F>(&mut self, handler: F)
|
||||
pub fn add_action<A, V, F, R>(&mut self, handler: F)
|
||||
where
|
||||
A: Action,
|
||||
V: View,
|
||||
F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>),
|
||||
F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>) -> R,
|
||||
{
|
||||
self.add_action_internal(handler, false)
|
||||
}
|
||||
|
@ -781,11 +786,11 @@ impl MutableAppContext {
|
|||
self.add_action_internal(handler, true)
|
||||
}
|
||||
|
||||
fn add_action_internal<A, V, F>(&mut self, mut handler: F, capture: bool)
|
||||
fn add_action_internal<A, V, F, R>(&mut self, mut handler: F, capture: bool)
|
||||
where
|
||||
A: Action,
|
||||
V: View,
|
||||
F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>),
|
||||
F: 'static + FnMut(&mut V, &A, &mut ViewContext<V>) -> R,
|
||||
{
|
||||
let handler = Box::new(
|
||||
move |view: &mut dyn AnyView,
|
||||
|
@ -1255,6 +1260,27 @@ impl MutableAppContext {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn observe_keystrokes<F>(&mut self, window_id: usize, callback: F) -> Subscription
|
||||
where
|
||||
F: 'static
|
||||
+ FnMut(
|
||||
&Keystroke,
|
||||
&MatchResult,
|
||||
Option<&Box<dyn Action>>,
|
||||
&mut MutableAppContext,
|
||||
) -> bool,
|
||||
{
|
||||
let subscription_id = post_inc(&mut self.next_subscription_id);
|
||||
self.keystroke_observations
|
||||
.add_callback(window_id, subscription_id, Box::new(callback));
|
||||
|
||||
Subscription::KeystrokeObservation {
|
||||
id: subscription_id,
|
||||
window_id,
|
||||
observations: Some(self.keystroke_observations.downgrade()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn defer(&mut self, callback: impl 'static + FnOnce(&mut MutableAppContext)) {
|
||||
self.pending_effects.push_back(Effect::Deferred {
|
||||
callback: Box::new(callback),
|
||||
|
@ -1405,8 +1431,8 @@ impl MutableAppContext {
|
|||
true
|
||||
}
|
||||
|
||||
// Returns an iterator over all of the view ids from the passed view up to the root of the window
|
||||
// Includes the passed view itself
|
||||
/// Returns an iterator over all of the view ids from the passed view up to the root of the window
|
||||
/// Includes the passed view itself
|
||||
fn ancestors(&self, window_id: usize, mut view_id: usize) -> impl Iterator<Item = usize> + '_ {
|
||||
std::iter::once(view_id)
|
||||
.into_iter()
|
||||
|
@ -1538,27 +1564,39 @@ impl MutableAppContext {
|
|||
})
|
||||
.collect();
|
||||
|
||||
match self
|
||||
let match_result = self
|
||||
.keystroke_matcher
|
||||
.push_keystroke(keystroke.clone(), dispatch_path)
|
||||
{
|
||||
.push_keystroke(keystroke.clone(), dispatch_path);
|
||||
let mut handled_by = None;
|
||||
|
||||
let keystroke_handled = match &match_result {
|
||||
MatchResult::None => false,
|
||||
MatchResult::Pending => true,
|
||||
MatchResult::Matches(matches) => {
|
||||
for (view_id, action) in matches {
|
||||
if self.handle_dispatch_action_from_effect(
|
||||
window_id,
|
||||
Some(view_id),
|
||||
Some(*view_id),
|
||||
action.as_ref(),
|
||||
) {
|
||||
self.keystroke_matcher.clear_pending();
|
||||
return true;
|
||||
handled_by = Some(action.boxed_clone());
|
||||
break;
|
||||
}
|
||||
}
|
||||
false
|
||||
handled_by.is_some()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
self.keystroke(
|
||||
window_id,
|
||||
keystroke.clone(),
|
||||
handled_by,
|
||||
match_result.clone(),
|
||||
);
|
||||
keystroke_handled
|
||||
} else {
|
||||
self.keystroke(window_id, keystroke.clone(), None, MatchResult::None);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
@ -2110,6 +2148,12 @@ impl MutableAppContext {
|
|||
} => {
|
||||
self.handle_window_should_close_subscription_effect(window_id, callback)
|
||||
}
|
||||
Effect::Keystroke {
|
||||
window_id,
|
||||
keystroke,
|
||||
handled_by,
|
||||
result,
|
||||
} => self.handle_keystroke_effect(window_id, keystroke, handled_by, result),
|
||||
}
|
||||
self.pending_notifications.clear();
|
||||
self.remove_dropped_entities();
|
||||
|
@ -2188,6 +2232,21 @@ impl MutableAppContext {
|
|||
});
|
||||
}
|
||||
|
||||
fn keystroke(
|
||||
&mut self,
|
||||
window_id: usize,
|
||||
keystroke: Keystroke,
|
||||
handled_by: Option<Box<dyn Action>>,
|
||||
result: MatchResult,
|
||||
) {
|
||||
self.pending_effects.push_back(Effect::Keystroke {
|
||||
window_id,
|
||||
keystroke,
|
||||
handled_by,
|
||||
result,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn refresh_windows(&mut self) {
|
||||
self.pending_effects.push_back(Effect::RefreshWindows);
|
||||
}
|
||||
|
@ -2299,6 +2358,21 @@ impl MutableAppContext {
|
|||
});
|
||||
}
|
||||
|
||||
fn handle_keystroke_effect(
|
||||
&mut self,
|
||||
window_id: usize,
|
||||
keystroke: Keystroke,
|
||||
handled_by: Option<Box<dyn Action>>,
|
||||
result: MatchResult,
|
||||
) {
|
||||
self.update(|this| {
|
||||
let mut observations = this.keystroke_observations.clone();
|
||||
observations.emit_and_cleanup(window_id, this, {
|
||||
move |callback, this| callback(&keystroke, &result, handled_by.as_ref(), this)
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_window_activation_effect(&mut self, window_id: usize, active: bool) {
|
||||
//Short circuit evaluation if we're already g2g
|
||||
if self
|
||||
|
@ -2852,6 +2926,12 @@ pub enum Effect {
|
|||
subscription_id: usize,
|
||||
callback: WindowFullscreenCallback,
|
||||
},
|
||||
Keystroke {
|
||||
window_id: usize,
|
||||
keystroke: Keystroke,
|
||||
handled_by: Option<Box<dyn Action>>,
|
||||
result: MatchResult,
|
||||
},
|
||||
RefreshWindows,
|
||||
DispatchActionFrom {
|
||||
window_id: usize,
|
||||
|
@ -2995,6 +3075,21 @@ impl Debug for Effect {
|
|||
.debug_struct("Effect::WindowShouldCloseSubscription")
|
||||
.field("window_id", window_id)
|
||||
.finish(),
|
||||
Effect::Keystroke {
|
||||
window_id,
|
||||
keystroke,
|
||||
handled_by,
|
||||
result,
|
||||
} => f
|
||||
.debug_struct("Effect::Keystroke")
|
||||
.field("window_id", window_id)
|
||||
.field("keystroke", keystroke)
|
||||
.field(
|
||||
"keystroke",
|
||||
&handled_by.as_ref().map(|handled_by| handled_by.name()),
|
||||
)
|
||||
.field("result", result)
|
||||
.finish(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3600,6 +3695,7 @@ impl<'a, T: View> ViewContext<'a, T> {
|
|||
return false;
|
||||
}
|
||||
self.ancestors(view.window_id, view.view_id)
|
||||
.skip(1) // Skip self id
|
||||
.any(|parent| parent == self.view_id)
|
||||
}
|
||||
|
||||
|
@ -3826,6 +3922,33 @@ impl<'a, T: View> ViewContext<'a, T> {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn observe_keystroke<F>(&mut self, mut callback: F) -> Subscription
|
||||
where
|
||||
F: 'static
|
||||
+ FnMut(
|
||||
&mut T,
|
||||
&Keystroke,
|
||||
Option<&Box<dyn Action>>,
|
||||
&MatchResult,
|
||||
&mut ViewContext<T>,
|
||||
) -> bool,
|
||||
{
|
||||
let observer = self.weak_handle();
|
||||
self.app.observe_keystrokes(
|
||||
self.window_id(),
|
||||
move |keystroke, result, handled_by, cx| {
|
||||
if let Some(observer) = observer.upgrade(cx) {
|
||||
observer.update(cx, |observer, cx| {
|
||||
callback(observer, keystroke, handled_by, result, cx);
|
||||
});
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn emit(&mut self, payload: T::Event) {
|
||||
self.app.pending_effects.push_back(Effect::Event {
|
||||
entity_id: self.view_id,
|
||||
|
@ -5018,6 +5141,11 @@ pub enum Subscription {
|
|||
window_id: usize,
|
||||
observations: Option<Weak<Mapping<usize, WindowFullscreenCallback>>>,
|
||||
},
|
||||
KeystrokeObservation {
|
||||
id: usize,
|
||||
window_id: usize,
|
||||
observations: Option<Weak<Mapping<usize, KeystrokeCallback>>>,
|
||||
},
|
||||
|
||||
ReleaseObservation {
|
||||
id: usize,
|
||||
|
@ -5056,6 +5184,9 @@ impl Subscription {
|
|||
Subscription::ActionObservation { observations, .. } => {
|
||||
observations.take();
|
||||
}
|
||||
Subscription::KeystrokeObservation { observations, .. } => {
|
||||
observations.take();
|
||||
}
|
||||
Subscription::WindowActivationObservation { observations, .. } => {
|
||||
observations.take();
|
||||
}
|
||||
|
@ -5175,6 +5306,27 @@ impl Drop for Subscription {
|
|||
observations.lock().remove(id);
|
||||
}
|
||||
}
|
||||
Subscription::KeystrokeObservation {
|
||||
id,
|
||||
window_id,
|
||||
observations,
|
||||
} => {
|
||||
if let Some(observations) = observations.as_ref().and_then(Weak::upgrade) {
|
||||
match observations
|
||||
.lock()
|
||||
.entry(*window_id)
|
||||
.or_default()
|
||||
.entry(*id)
|
||||
{
|
||||
btree_map::Entry::Vacant(entry) => {
|
||||
entry.insert(None);
|
||||
}
|
||||
btree_map::Entry::Occupied(entry) => {
|
||||
entry.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Subscription::WindowActivationObservation {
|
||||
id,
|
||||
window_id,
|
||||
|
|
|
@ -115,6 +115,7 @@ impl Tooltip {
|
|||
} else {
|
||||
state.visible.set(false);
|
||||
state.debounce.take();
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
})
|
||||
|
|
|
@ -66,21 +66,32 @@ struct DeterministicState {
|
|||
rng: rand::prelude::StdRng,
|
||||
seed: u64,
|
||||
scheduled_from_foreground: collections::HashMap<usize, Vec<ForegroundRunnable>>,
|
||||
scheduled_from_background: Vec<Runnable>,
|
||||
scheduled_from_background: Vec<BackgroundRunnable>,
|
||||
forbid_parking: bool,
|
||||
block_on_ticks: std::ops::RangeInclusive<usize>,
|
||||
now: std::time::Instant,
|
||||
next_timer_id: usize,
|
||||
pending_timers: Vec<(usize, std::time::Instant, postage::barrier::Sender)>,
|
||||
waiting_backtrace: Option<backtrace::Backtrace>,
|
||||
next_runnable_id: usize,
|
||||
poll_history: Vec<usize>,
|
||||
enable_runnable_backtraces: bool,
|
||||
runnable_backtraces: collections::HashMap<usize, backtrace::Backtrace>,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
struct ForegroundRunnable {
|
||||
id: usize,
|
||||
runnable: Runnable,
|
||||
main: bool,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
struct BackgroundRunnable {
|
||||
id: usize,
|
||||
runnable: Runnable,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub struct Deterministic {
|
||||
state: Arc<parking_lot::Mutex<DeterministicState>>,
|
||||
|
@ -117,11 +128,29 @@ impl Deterministic {
|
|||
next_timer_id: Default::default(),
|
||||
pending_timers: Default::default(),
|
||||
waiting_backtrace: None,
|
||||
next_runnable_id: 0,
|
||||
poll_history: Default::default(),
|
||||
enable_runnable_backtraces: false,
|
||||
runnable_backtraces: Default::default(),
|
||||
})),
|
||||
parker: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn runnable_history(&self) -> Vec<usize> {
|
||||
self.state.lock().poll_history.clone()
|
||||
}
|
||||
|
||||
pub fn enable_runnable_backtrace(&self) {
|
||||
self.state.lock().enable_runnable_backtraces = true;
|
||||
}
|
||||
|
||||
pub fn runnable_backtrace(&self, runnable_id: usize) -> backtrace::Backtrace {
|
||||
let mut backtrace = self.state.lock().runnable_backtraces[&runnable_id].clone();
|
||||
backtrace.resolve();
|
||||
backtrace
|
||||
}
|
||||
|
||||
pub fn build_background(self: &Arc<Self>) -> Arc<Background> {
|
||||
Arc::new(Background::Deterministic {
|
||||
executor: self.clone(),
|
||||
|
@ -142,6 +171,17 @@ impl Deterministic {
|
|||
main: bool,
|
||||
) -> AnyLocalTask {
|
||||
let state = self.state.clone();
|
||||
let id;
|
||||
{
|
||||
let mut state = state.lock();
|
||||
id = util::post_inc(&mut state.next_runnable_id);
|
||||
if state.enable_runnable_backtraces {
|
||||
state
|
||||
.runnable_backtraces
|
||||
.insert(id, backtrace::Backtrace::new_unresolved());
|
||||
}
|
||||
}
|
||||
|
||||
let unparker = self.parker.lock().unparker();
|
||||
let (runnable, task) = async_task::spawn_local(future, move |runnable| {
|
||||
let mut state = state.lock();
|
||||
|
@ -149,7 +189,7 @@ impl Deterministic {
|
|||
.scheduled_from_foreground
|
||||
.entry(cx_id)
|
||||
.or_default()
|
||||
.push(ForegroundRunnable { runnable, main });
|
||||
.push(ForegroundRunnable { id, runnable, main });
|
||||
unparker.unpark();
|
||||
});
|
||||
runnable.schedule();
|
||||
|
@ -158,10 +198,23 @@ impl Deterministic {
|
|||
|
||||
fn spawn(&self, future: AnyFuture) -> AnyTask {
|
||||
let state = self.state.clone();
|
||||
let id;
|
||||
{
|
||||
let mut state = state.lock();
|
||||
id = util::post_inc(&mut state.next_runnable_id);
|
||||
if state.enable_runnable_backtraces {
|
||||
state
|
||||
.runnable_backtraces
|
||||
.insert(id, backtrace::Backtrace::new_unresolved());
|
||||
}
|
||||
}
|
||||
|
||||
let unparker = self.parker.lock().unparker();
|
||||
let (runnable, task) = async_task::spawn(future, move |runnable| {
|
||||
let mut state = state.lock();
|
||||
state.scheduled_from_background.push(runnable);
|
||||
state
|
||||
.scheduled_from_background
|
||||
.push(BackgroundRunnable { id, runnable });
|
||||
unparker.unpark();
|
||||
});
|
||||
runnable.schedule();
|
||||
|
@ -178,15 +231,27 @@ impl Deterministic {
|
|||
let woken = Arc::new(AtomicBool::new(false));
|
||||
|
||||
let state = self.state.clone();
|
||||
let id;
|
||||
{
|
||||
let mut state = state.lock();
|
||||
id = util::post_inc(&mut state.next_runnable_id);
|
||||
if state.enable_runnable_backtraces {
|
||||
state
|
||||
.runnable_backtraces
|
||||
.insert(id, backtrace::Backtrace::new_unresolved());
|
||||
}
|
||||
}
|
||||
|
||||
let unparker = self.parker.lock().unparker();
|
||||
let (runnable, mut main_task) = unsafe {
|
||||
async_task::spawn_unchecked(main_future, move |runnable| {
|
||||
let mut state = state.lock();
|
||||
let state = &mut *state.lock();
|
||||
state
|
||||
.scheduled_from_foreground
|
||||
.entry(cx_id)
|
||||
.or_default()
|
||||
.push(ForegroundRunnable {
|
||||
id: util::post_inc(&mut state.next_runnable_id),
|
||||
runnable,
|
||||
main: true,
|
||||
});
|
||||
|
@ -248,9 +313,10 @@ impl Deterministic {
|
|||
if !state.scheduled_from_background.is_empty() && state.rng.gen() {
|
||||
let background_len = state.scheduled_from_background.len();
|
||||
let ix = state.rng.gen_range(0..background_len);
|
||||
let runnable = state.scheduled_from_background.remove(ix);
|
||||
let background_runnable = state.scheduled_from_background.remove(ix);
|
||||
state.poll_history.push(background_runnable.id);
|
||||
drop(state);
|
||||
runnable.run();
|
||||
background_runnable.runnable.run();
|
||||
} else if !state.scheduled_from_foreground.is_empty() {
|
||||
let available_cx_ids = state
|
||||
.scheduled_from_foreground
|
||||
|
@ -266,6 +332,7 @@ impl Deterministic {
|
|||
if scheduled_from_cx.is_empty() {
|
||||
state.scheduled_from_foreground.remove(&cx_id_to_run);
|
||||
}
|
||||
state.poll_history.push(foreground_runnable.id);
|
||||
|
||||
drop(state);
|
||||
|
||||
|
@ -298,9 +365,10 @@ impl Deterministic {
|
|||
let runnable_count = state.scheduled_from_background.len();
|
||||
let ix = state.rng.gen_range(0..=runnable_count);
|
||||
if ix < state.scheduled_from_background.len() {
|
||||
let runnable = state.scheduled_from_background.remove(ix);
|
||||
let background_runnable = state.scheduled_from_background.remove(ix);
|
||||
state.poll_history.push(background_runnable.id);
|
||||
drop(state);
|
||||
runnable.run();
|
||||
background_runnable.runnable.run();
|
||||
} else {
|
||||
drop(state);
|
||||
if let Poll::Ready(result) = future.poll(&mut cx) {
|
||||
|
|
|
@ -112,6 +112,21 @@ impl PartialEq for MatchResult {
|
|||
|
||||
impl Eq for MatchResult {}
|
||||
|
||||
impl Clone for MatchResult {
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
MatchResult::None => MatchResult::None,
|
||||
MatchResult::Pending => MatchResult::Pending,
|
||||
MatchResult::Matches(matches) => MatchResult::Matches(
|
||||
matches
|
||||
.iter()
|
||||
.map(|(view_id, action)| (*view_id, Action::boxed_clone(action.as_ref())))
|
||||
.collect(),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Matcher {
|
||||
pub fn new(keymap: Keymap) -> Self {
|
||||
Self {
|
||||
|
|
|
@ -17,10 +17,15 @@ use crate::{
|
|||
SceneBuilder, UpgradeModelHandle, UpgradeViewHandle, View, ViewHandle, WeakModelHandle,
|
||||
WeakViewHandle,
|
||||
};
|
||||
use anyhow::bail;
|
||||
use collections::{HashMap, HashSet};
|
||||
use pathfinder_geometry::vector::{vec2f, Vector2F};
|
||||
use serde_json::json;
|
||||
use smallvec::SmallVec;
|
||||
use sqlez::{
|
||||
bindable::{Bind, Column},
|
||||
statement::Statement,
|
||||
};
|
||||
use std::{
|
||||
marker::PhantomData,
|
||||
ops::{Deref, DerefMut, Range},
|
||||
|
@ -863,8 +868,9 @@ pub struct DebugContext<'a> {
|
|||
pub app: &'a AppContext,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
|
||||
pub enum Axis {
|
||||
#[default]
|
||||
Horizontal,
|
||||
Vertical,
|
||||
}
|
||||
|
@ -894,6 +900,31 @@ impl ToJson for Axis {
|
|||
}
|
||||
}
|
||||
|
||||
impl Bind for Axis {
|
||||
fn bind(&self, statement: &Statement, start_index: i32) -> anyhow::Result<i32> {
|
||||
match self {
|
||||
Axis::Horizontal => "Horizontal",
|
||||
Axis::Vertical => "Vertical",
|
||||
}
|
||||
.bind(statement, start_index)
|
||||
}
|
||||
}
|
||||
|
||||
impl Column for Axis {
|
||||
fn column(statement: &mut Statement, start_index: i32) -> anyhow::Result<(Self, i32)> {
|
||||
String::column(statement, start_index).and_then(|(axis_text, next_index)| {
|
||||
Ok((
|
||||
match axis_text.as_str() {
|
||||
"Horizontal" => Axis::Horizontal,
|
||||
"Vertical" => Axis::Vertical,
|
||||
_ => bail!("Stored serialized item kind is incorrect"),
|
||||
},
|
||||
next_index,
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Vector2FExt {
|
||||
fn along(self, axis: Axis) -> f32;
|
||||
}
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
use crate::{
|
||||
elements::Empty, executor, platform, Element, ElementBox, Entity, FontCache, Handle,
|
||||
LeakDetector, MutableAppContext, Platform, RenderContext, Subscription, TestAppContext, View,
|
||||
elements::Empty, executor, platform, util::CwdBacktrace, Element, ElementBox, Entity,
|
||||
FontCache, Handle, LeakDetector, MutableAppContext, Platform, RenderContext, Subscription,
|
||||
TestAppContext, View,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use parking_lot::Mutex;
|
||||
use smol::channel;
|
||||
use std::{
|
||||
fmt::Write,
|
||||
panic::{self, RefUnwindSafe},
|
||||
rc::Rc,
|
||||
sync::{
|
||||
|
@ -29,13 +31,13 @@ pub fn run_test(
|
|||
mut num_iterations: u64,
|
||||
mut starting_seed: u64,
|
||||
max_retries: usize,
|
||||
detect_nondeterminism: bool,
|
||||
test_fn: &mut (dyn RefUnwindSafe
|
||||
+ Fn(
|
||||
&mut MutableAppContext,
|
||||
Rc<platform::test::ForegroundPlatform>,
|
||||
Arc<executor::Deterministic>,
|
||||
u64,
|
||||
bool,
|
||||
)),
|
||||
fn_name: String,
|
||||
) {
|
||||
|
@ -60,16 +62,20 @@ pub fn run_test(
|
|||
let platform = Arc::new(platform::test::platform());
|
||||
let font_system = platform.fonts();
|
||||
let font_cache = Arc::new(FontCache::new(font_system));
|
||||
let mut prev_runnable_history: Option<Vec<usize>> = None;
|
||||
|
||||
loop {
|
||||
let seed = atomic_seed.fetch_add(1, SeqCst);
|
||||
let is_last_iteration = seed + 1 >= starting_seed + num_iterations;
|
||||
for _ in 0..num_iterations {
|
||||
let seed = atomic_seed.load(SeqCst);
|
||||
|
||||
if is_randomized {
|
||||
dbg!(seed);
|
||||
}
|
||||
|
||||
let deterministic = executor::Deterministic::new(seed);
|
||||
if detect_nondeterminism {
|
||||
deterministic.enable_runnable_backtrace();
|
||||
}
|
||||
|
||||
let leak_detector = Arc::new(Mutex::new(LeakDetector::default()));
|
||||
let mut cx = TestAppContext::new(
|
||||
foreground_platform.clone(),
|
||||
|
@ -82,13 +88,7 @@ pub fn run_test(
|
|||
fn_name.clone(),
|
||||
);
|
||||
cx.update(|cx| {
|
||||
test_fn(
|
||||
cx,
|
||||
foreground_platform.clone(),
|
||||
deterministic.clone(),
|
||||
seed,
|
||||
is_last_iteration,
|
||||
);
|
||||
test_fn(cx, foreground_platform.clone(), deterministic.clone(), seed);
|
||||
});
|
||||
|
||||
cx.update(|cx| cx.remove_all_windows());
|
||||
|
@ -96,8 +96,64 @@ pub fn run_test(
|
|||
cx.update(|cx| cx.clear_globals());
|
||||
|
||||
leak_detector.lock().detect();
|
||||
if is_last_iteration {
|
||||
break;
|
||||
|
||||
if detect_nondeterminism {
|
||||
let curr_runnable_history = deterministic.runnable_history();
|
||||
if let Some(prev_runnable_history) = prev_runnable_history {
|
||||
let mut prev_entries = prev_runnable_history.iter().fuse();
|
||||
let mut curr_entries = curr_runnable_history.iter().fuse();
|
||||
|
||||
let mut nondeterministic = false;
|
||||
let mut common_history_prefix = Vec::new();
|
||||
let mut prev_history_suffix = Vec::new();
|
||||
let mut curr_history_suffix = Vec::new();
|
||||
loop {
|
||||
match (prev_entries.next(), curr_entries.next()) {
|
||||
(None, None) => break,
|
||||
(None, Some(curr_id)) => curr_history_suffix.push(*curr_id),
|
||||
(Some(prev_id), None) => prev_history_suffix.push(*prev_id),
|
||||
(Some(prev_id), Some(curr_id)) => {
|
||||
if nondeterministic {
|
||||
prev_history_suffix.push(*prev_id);
|
||||
curr_history_suffix.push(*curr_id);
|
||||
} else if prev_id == curr_id {
|
||||
common_history_prefix.push(*curr_id);
|
||||
} else {
|
||||
nondeterministic = true;
|
||||
prev_history_suffix.push(*prev_id);
|
||||
curr_history_suffix.push(*curr_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if nondeterministic {
|
||||
let mut error = String::new();
|
||||
writeln!(&mut error, "Common prefix: {:?}", common_history_prefix)
|
||||
.unwrap();
|
||||
writeln!(&mut error, "Previous suffix: {:?}", prev_history_suffix)
|
||||
.unwrap();
|
||||
writeln!(&mut error, "Current suffix: {:?}", curr_history_suffix)
|
||||
.unwrap();
|
||||
|
||||
let last_common_backtrace = common_history_prefix
|
||||
.last()
|
||||
.map(|runnable_id| deterministic.runnable_backtrace(*runnable_id));
|
||||
|
||||
writeln!(
|
||||
&mut error,
|
||||
"Last future that ran on both executions: {:?}",
|
||||
last_common_backtrace.as_ref().map(CwdBacktrace)
|
||||
)
|
||||
.unwrap();
|
||||
panic!("Detected non-determinism.\n{}", error);
|
||||
}
|
||||
}
|
||||
prev_runnable_history = Some(curr_runnable_history);
|
||||
}
|
||||
|
||||
if !detect_nondeterminism {
|
||||
atomic_seed.fetch_add(1, SeqCst);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -112,7 +168,7 @@ pub fn run_test(
|
|||
println!("retrying: attempt {}", retries);
|
||||
} else {
|
||||
if is_randomized {
|
||||
eprintln!("failing seed: {}", atomic_seed.load(SeqCst) - 1);
|
||||
eprintln!("failing seed: {}", atomic_seed.load(SeqCst));
|
||||
}
|
||||
panic::resume_unwind(error);
|
||||
}
|
||||
|
|
|
@ -12,3 +12,4 @@ doctest = false
|
|||
syn = "1.0"
|
||||
quote = "1.0"
|
||||
proc-macro2 = "1.0"
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
let mut max_retries = 0;
|
||||
let mut num_iterations = 1;
|
||||
let mut starting_seed = 0;
|
||||
let mut detect_nondeterminism = false;
|
||||
|
||||
for arg in args {
|
||||
match arg {
|
||||
|
@ -26,6 +27,9 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
let key_name = meta.path.get_ident().map(|i| i.to_string());
|
||||
let result = (|| {
|
||||
match key_name.as_deref() {
|
||||
Some("detect_nondeterminism") => {
|
||||
detect_nondeterminism = parse_bool(&meta.lit)?
|
||||
}
|
||||
Some("retries") => max_retries = parse_int(&meta.lit)?,
|
||||
Some("iterations") => num_iterations = parse_int(&meta.lit)?,
|
||||
Some("seed") => starting_seed = parse_int(&meta.lit)?,
|
||||
|
@ -77,10 +81,6 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(seed),));
|
||||
continue;
|
||||
}
|
||||
Some("bool") => {
|
||||
inner_fn_args.extend(quote!(is_last_iteration,));
|
||||
continue;
|
||||
}
|
||||
Some("Arc") => {
|
||||
if let syn::PathArguments::AngleBracketed(args) =
|
||||
&last_segment.unwrap().arguments
|
||||
|
@ -146,7 +146,8 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
#num_iterations as u64,
|
||||
#starting_seed as u64,
|
||||
#max_retries,
|
||||
&mut |cx, foreground_platform, deterministic, seed, is_last_iteration| {
|
||||
#detect_nondeterminism,
|
||||
&mut |cx, foreground_platform, deterministic, seed| {
|
||||
#cx_vars
|
||||
cx.foreground().run(#inner_fn_name(#inner_fn_args));
|
||||
#cx_teardowns
|
||||
|
@ -165,9 +166,6 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
Some("StdRng") => {
|
||||
inner_fn_args.extend(quote!(rand::SeedableRng::seed_from_u64(seed),));
|
||||
}
|
||||
Some("bool") => {
|
||||
inner_fn_args.extend(quote!(is_last_iteration,));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
} else {
|
||||
|
@ -189,7 +187,8 @@ pub fn test(args: TokenStream, function: TokenStream) -> TokenStream {
|
|||
#num_iterations as u64,
|
||||
#starting_seed as u64,
|
||||
#max_retries,
|
||||
&mut |cx, _, _, seed, is_last_iteration| #inner_fn_name(#inner_fn_args),
|
||||
#detect_nondeterminism,
|
||||
&mut |cx, _, _, seed| #inner_fn_name(#inner_fn_args),
|
||||
stringify!(#outer_fn_name).to_string(),
|
||||
);
|
||||
}
|
||||
|
@ -209,3 +208,13 @@ fn parse_int(literal: &Lit) -> Result<usize, TokenStream> {
|
|||
|
||||
result.map_err(|err| TokenStream::from(err.into_compile_error()))
|
||||
}
|
||||
|
||||
fn parse_bool(literal: &Lit) -> Result<bool, TokenStream> {
|
||||
let result = if let Lit::Bool(result) = &literal {
|
||||
Ok(result.value)
|
||||
} else {
|
||||
Err(syn::Error::new(literal.span(), "must be a boolean"))
|
||||
};
|
||||
|
||||
result.map_err(|err| TokenStream::from(err.into_compile_error()))
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use chrono::{Datelike, Local, NaiveTime, Timelike};
|
||||
use editor::{Autoscroll, Editor};
|
||||
use editor::{scroll::autoscroll::Autoscroll, Editor};
|
||||
use gpui::{actions, MutableAppContext};
|
||||
use settings::{HourFormat, Settings};
|
||||
use std::{
|
||||
|
@ -115,7 +115,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_heading_entry_defaults_to_hour_12() {
|
||||
let naive_time = NaiveTime::from_hms_milli(15, 0, 0, 0);
|
||||
let naive_time = NaiveTime::from_hms_milli_opt(15, 0, 0, 0).unwrap();
|
||||
let actual_heading_entry = heading_entry(naive_time, &None);
|
||||
let expected_heading_entry = "# 3:00 PM";
|
||||
|
||||
|
@ -124,7 +124,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_heading_entry_is_hour_12() {
|
||||
let naive_time = NaiveTime::from_hms_milli(15, 0, 0, 0);
|
||||
let naive_time = NaiveTime::from_hms_milli_opt(15, 0, 0, 0).unwrap();
|
||||
let actual_heading_entry = heading_entry(naive_time, &Some(HourFormat::Hour12));
|
||||
let expected_heading_entry = "# 3:00 PM";
|
||||
|
||||
|
@ -133,7 +133,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_heading_entry_is_hour_24() {
|
||||
let naive_time = NaiveTime::from_hms_milli(15, 0, 0, 0);
|
||||
let naive_time = NaiveTime::from_hms_milli_opt(15, 0, 0, 0).unwrap();
|
||||
let actual_heading_entry = heading_entry(naive_time, &Some(HourFormat::Hour24));
|
||||
let expected_heading_entry = "# 15:00";
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use editor::{
|
||||
combine_syntax_and_fuzzy_match_highlights, display_map::ToDisplayPoint, Anchor, AnchorRangeExt,
|
||||
Autoscroll, DisplayPoint, Editor, ToPoint,
|
||||
combine_syntax_and_fuzzy_match_highlights, display_map::ToDisplayPoint,
|
||||
scroll::autoscroll::Autoscroll, Anchor, AnchorRangeExt, DisplayPoint, Editor, ToPoint,
|
||||
};
|
||||
use fuzzy::StringMatch;
|
||||
use gpui::{
|
||||
|
|
|
@ -32,6 +32,7 @@ lsp = { path = "../lsp" }
|
|||
rpc = { path = "../rpc" }
|
||||
settings = { path = "../settings" }
|
||||
sum_tree = { path = "../sum_tree" }
|
||||
terminal = { path = "../terminal" }
|
||||
util = { path = "../util" }
|
||||
aho-corasick = "0.7"
|
||||
anyhow = "1.0.57"
|
||||
|
|
|
@ -10,7 +10,11 @@ use anyhow::{anyhow, Context, Result};
|
|||
use client::{proto, Client, PeerId, TypedEnvelope, UserStore};
|
||||
use clock::ReplicaId;
|
||||
use collections::{hash_map, BTreeMap, HashMap, HashSet};
|
||||
use futures::{future::Shared, AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt};
|
||||
use futures::{
|
||||
channel::{mpsc, oneshot},
|
||||
future::Shared,
|
||||
AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
|
||||
};
|
||||
|
||||
use gpui::{
|
||||
AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
|
||||
|
@ -45,12 +49,10 @@ use std::{
|
|||
cell::RefCell,
|
||||
cmp::{self, Ordering},
|
||||
convert::TryInto,
|
||||
ffi::OsString,
|
||||
hash::Hash,
|
||||
mem,
|
||||
num::NonZeroU32,
|
||||
ops::Range,
|
||||
os::unix::{ffi::OsStrExt, prelude::OsStringExt},
|
||||
path::{Component, Path, PathBuf},
|
||||
rc::Rc,
|
||||
str,
|
||||
|
@ -60,10 +62,10 @@ use std::{
|
|||
},
|
||||
time::Instant,
|
||||
};
|
||||
use terminal::{Terminal, TerminalBuilder};
|
||||
use thiserror::Error;
|
||||
use util::{defer, post_inc, ResultExt, TryFutureExt as _};
|
||||
|
||||
pub use db::Db;
|
||||
pub use fs::*;
|
||||
pub use worktree::*;
|
||||
|
||||
|
@ -71,10 +73,6 @@ pub trait Item: Entity {
|
|||
fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
|
||||
}
|
||||
|
||||
pub struct ProjectStore {
|
||||
projects: Vec<WeakModelHandle<Project>>,
|
||||
}
|
||||
|
||||
// Language server state is stored across 3 collections:
|
||||
// language_servers =>
|
||||
// a mapping from unique server id to LanguageServerState which can either be a task for a
|
||||
|
@ -103,7 +101,6 @@ pub struct Project {
|
|||
next_entry_id: Arc<AtomicUsize>,
|
||||
next_diagnostic_group_id: usize,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
project_store: ModelHandle<ProjectStore>,
|
||||
fs: Arc<dyn Fs>,
|
||||
client_state: Option<ProjectClientState>,
|
||||
collaborators: HashMap<PeerId, Collaborator>,
|
||||
|
@ -153,6 +150,8 @@ enum WorktreeHandle {
|
|||
enum ProjectClientState {
|
||||
Local {
|
||||
remote_id: u64,
|
||||
metadata_changed: mpsc::UnboundedSender<oneshot::Sender<()>>,
|
||||
_maintain_metadata: Task<()>,
|
||||
_detect_unshare: Task<Option<()>>,
|
||||
},
|
||||
Remote {
|
||||
|
@ -413,46 +412,39 @@ impl Project {
|
|||
pub fn local(
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
project_store: ModelHandle<ProjectStore>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut MutableAppContext,
|
||||
) -> ModelHandle<Self> {
|
||||
cx.add_model(|cx: &mut ModelContext<Self>| {
|
||||
let handle = cx.weak_handle();
|
||||
project_store.update(cx, |store, cx| store.add_project(handle, cx));
|
||||
|
||||
Self {
|
||||
worktrees: Default::default(),
|
||||
collaborators: Default::default(),
|
||||
opened_buffers: Default::default(),
|
||||
shared_buffers: Default::default(),
|
||||
incomplete_buffers: Default::default(),
|
||||
loading_buffers: Default::default(),
|
||||
loading_local_worktrees: Default::default(),
|
||||
buffer_snapshots: Default::default(),
|
||||
client_state: None,
|
||||
opened_buffer: watch::channel(),
|
||||
client_subscriptions: Vec::new(),
|
||||
_subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
|
||||
_maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
|
||||
active_entry: None,
|
||||
languages,
|
||||
client,
|
||||
user_store,
|
||||
project_store,
|
||||
fs,
|
||||
next_entry_id: Default::default(),
|
||||
next_diagnostic_group_id: Default::default(),
|
||||
language_servers: Default::default(),
|
||||
language_server_ids: Default::default(),
|
||||
language_server_statuses: Default::default(),
|
||||
last_workspace_edits_by_language_server: Default::default(),
|
||||
language_server_settings: Default::default(),
|
||||
buffers_being_formatted: Default::default(),
|
||||
next_language_server_id: 0,
|
||||
nonce: StdRng::from_entropy().gen(),
|
||||
}
|
||||
cx.add_model(|cx: &mut ModelContext<Self>| Self {
|
||||
worktrees: Default::default(),
|
||||
collaborators: Default::default(),
|
||||
opened_buffers: Default::default(),
|
||||
shared_buffers: Default::default(),
|
||||
incomplete_buffers: Default::default(),
|
||||
loading_buffers: Default::default(),
|
||||
loading_local_worktrees: Default::default(),
|
||||
buffer_snapshots: Default::default(),
|
||||
client_state: None,
|
||||
opened_buffer: watch::channel(),
|
||||
client_subscriptions: Vec::new(),
|
||||
_subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
|
||||
_maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
|
||||
active_entry: None,
|
||||
languages,
|
||||
client,
|
||||
user_store,
|
||||
fs,
|
||||
next_entry_id: Default::default(),
|
||||
next_diagnostic_group_id: Default::default(),
|
||||
language_servers: Default::default(),
|
||||
language_server_ids: Default::default(),
|
||||
language_server_statuses: Default::default(),
|
||||
last_workspace_edits_by_language_server: Default::default(),
|
||||
language_server_settings: Default::default(),
|
||||
buffers_being_formatted: Default::default(),
|
||||
next_language_server_id: 0,
|
||||
nonce: StdRng::from_entropy().gen(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -460,31 +452,28 @@ impl Project {
|
|||
remote_id: u64,
|
||||
client: Arc<Client>,
|
||||
user_store: ModelHandle<UserStore>,
|
||||
project_store: ModelHandle<ProjectStore>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
fs: Arc<dyn Fs>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<ModelHandle<Self>, JoinProjectError> {
|
||||
client.authenticate_and_connect(true, &cx).await?;
|
||||
|
||||
let subscription = client.subscribe_to_entity(remote_id);
|
||||
let response = client
|
||||
.request(proto::JoinProject {
|
||||
project_id: remote_id,
|
||||
})
|
||||
.await?;
|
||||
let this = cx.add_model(|cx| {
|
||||
let replica_id = response.replica_id as ReplicaId;
|
||||
|
||||
let replica_id = response.replica_id as ReplicaId;
|
||||
|
||||
let mut worktrees = Vec::new();
|
||||
for worktree in response.worktrees {
|
||||
let worktree = cx
|
||||
.update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
|
||||
worktrees.push(worktree);
|
||||
}
|
||||
|
||||
let this = cx.add_model(|cx: &mut ModelContext<Self>| {
|
||||
let handle = cx.weak_handle();
|
||||
project_store.update(cx, |store, cx| store.add_project(handle, cx));
|
||||
let mut worktrees = Vec::new();
|
||||
for worktree in response.worktrees {
|
||||
let worktree = cx.update(|cx| {
|
||||
Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx)
|
||||
});
|
||||
worktrees.push(worktree);
|
||||
}
|
||||
|
||||
let mut this = Self {
|
||||
worktrees: Vec::new(),
|
||||
|
@ -498,11 +487,10 @@ impl Project {
|
|||
_maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
|
||||
languages,
|
||||
user_store: user_store.clone(),
|
||||
project_store,
|
||||
fs,
|
||||
next_entry_id: Default::default(),
|
||||
next_diagnostic_group_id: Default::default(),
|
||||
client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
|
||||
client_subscriptions: Default::default(),
|
||||
_subscriptions: Default::default(),
|
||||
client: client.clone(),
|
||||
client_state: Some(ProjectClientState::Remote {
|
||||
|
@ -551,10 +539,11 @@ impl Project {
|
|||
nonce: StdRng::from_entropy().gen(),
|
||||
};
|
||||
for worktree in worktrees {
|
||||
this.add_worktree(&worktree, cx);
|
||||
let _ = this.add_worktree(&worktree, cx);
|
||||
}
|
||||
this
|
||||
});
|
||||
let subscription = subscription.set_model(&this, &mut cx);
|
||||
|
||||
let user_ids = response
|
||||
.collaborators
|
||||
|
@ -572,6 +561,7 @@ impl Project {
|
|||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.collaborators = collaborators;
|
||||
this.client_subscriptions.push(subscription);
|
||||
});
|
||||
|
||||
Ok(this)
|
||||
|
@ -594,9 +584,7 @@ impl Project {
|
|||
let http_client = client::test::FakeHttpClient::with_404_response();
|
||||
let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
|
||||
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
|
||||
let project_store = cx.add_model(|_| ProjectStore::new());
|
||||
let project =
|
||||
cx.update(|cx| Project::local(client, user_store, project_store, languages, fs, cx));
|
||||
let project = cx.update(|cx| Project::local(client, user_store, languages, fs, cx));
|
||||
for path in root_paths {
|
||||
let (tree, _) = project
|
||||
.update(cx, |project, cx| {
|
||||
|
@ -677,10 +665,6 @@ impl Project {
|
|||
self.user_store.clone()
|
||||
}
|
||||
|
||||
pub fn project_store(&self) -> ModelHandle<ProjectStore> {
|
||||
self.project_store.clone()
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn check_invariants(&self, cx: &AppContext) {
|
||||
if self.is_local() {
|
||||
|
@ -752,59 +736,29 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if let Some(ProjectClientState::Local { remote_id, .. }) = &self.client_state {
|
||||
let project_id = *remote_id;
|
||||
// Broadcast worktrees only if the project is online.
|
||||
let worktrees = self
|
||||
.worktrees
|
||||
.iter()
|
||||
.filter_map(|worktree| {
|
||||
worktree
|
||||
.upgrade(cx)
|
||||
.map(|worktree| worktree.read(cx).as_local().unwrap().metadata_proto())
|
||||
})
|
||||
.collect();
|
||||
self.client
|
||||
.send(proto::UpdateProject {
|
||||
project_id,
|
||||
worktrees,
|
||||
})
|
||||
.log_err();
|
||||
|
||||
let worktrees = self.visible_worktrees(cx).collect::<Vec<_>>();
|
||||
let scans_complete = futures::future::join_all(
|
||||
worktrees
|
||||
.iter()
|
||||
.filter_map(|worktree| Some(worktree.read(cx).as_local()?.scan_complete())),
|
||||
);
|
||||
|
||||
let worktrees = worktrees.into_iter().map(|handle| handle.downgrade());
|
||||
|
||||
cx.spawn_weak(move |_, cx| async move {
|
||||
scans_complete.await;
|
||||
cx.read(|cx| {
|
||||
for worktree in worktrees {
|
||||
if let Some(worktree) = worktree
|
||||
.upgrade(cx)
|
||||
.and_then(|worktree| worktree.read(cx).as_local())
|
||||
{
|
||||
worktree.send_extension_counts(project_id);
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) -> impl Future<Output = ()> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
if let Some(ProjectClientState::Local {
|
||||
metadata_changed, ..
|
||||
}) = &mut self.client_state
|
||||
{
|
||||
let _ = metadata_changed.unbounded_send(tx);
|
||||
}
|
||||
|
||||
self.project_store.update(cx, |_, cx| cx.notify());
|
||||
cx.notify();
|
||||
|
||||
async move {
|
||||
// If the project is shared, this will resolve when the `_maintain_metadata` task has
|
||||
// a chance to update the metadata. Otherwise, it will resolve right away because `tx`
|
||||
// will get dropped.
|
||||
let _ = rx.await;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
|
||||
&self.collaborators
|
||||
}
|
||||
|
||||
/// Collect all worktrees, including ones that don't appear in the project panel
|
||||
pub fn worktrees<'a>(
|
||||
&'a self,
|
||||
cx: &'a AppContext,
|
||||
|
@ -814,6 +768,7 @@ impl Project {
|
|||
.filter_map(move |worktree| worktree.upgrade(cx))
|
||||
}
|
||||
|
||||
/// Collect all user-visible worktrees, the ones that appear in the project panel
|
||||
pub fn visible_worktrees<'a>(
|
||||
&'a self,
|
||||
cx: &'a AppContext,
|
||||
|
@ -898,7 +853,7 @@ impl Project {
|
|||
.request(proto::CreateProjectEntry {
|
||||
worktree_id: project_path.worktree_id.to_proto(),
|
||||
project_id,
|
||||
path: project_path.path.as_os_str().as_bytes().to_vec(),
|
||||
path: project_path.path.to_string_lossy().into(),
|
||||
is_directory,
|
||||
})
|
||||
.await?;
|
||||
|
@ -942,7 +897,7 @@ impl Project {
|
|||
.request(proto::CopyProjectEntry {
|
||||
project_id,
|
||||
entry_id: entry_id.to_proto(),
|
||||
new_path: new_path.as_os_str().as_bytes().to_vec(),
|
||||
new_path: new_path.to_string_lossy().into(),
|
||||
})
|
||||
.await?;
|
||||
let entry = response
|
||||
|
@ -985,7 +940,7 @@ impl Project {
|
|||
.request(proto::RenameProjectEntry {
|
||||
project_id,
|
||||
entry_id: entry_id.to_proto(),
|
||||
new_path: new_path.as_os_str().as_bytes().to_vec(),
|
||||
new_path: new_path.to_string_lossy().into(),
|
||||
})
|
||||
.await?;
|
||||
let entry = response
|
||||
|
@ -1086,15 +1041,51 @@ impl Project {
|
|||
});
|
||||
}
|
||||
|
||||
self.client_subscriptions
|
||||
.push(self.client.add_model_for_remote_entity(project_id, cx));
|
||||
self.metadata_changed(cx);
|
||||
self.client_subscriptions.push(
|
||||
self.client
|
||||
.subscribe_to_entity(project_id)
|
||||
.set_model(&cx.handle(), &mut cx.to_async()),
|
||||
);
|
||||
let _ = self.metadata_changed(cx);
|
||||
cx.emit(Event::RemoteIdChanged(Some(project_id)));
|
||||
cx.notify();
|
||||
|
||||
let mut status = self.client.status();
|
||||
let (metadata_changed_tx, mut metadata_changed_rx) = mpsc::unbounded();
|
||||
self.client_state = Some(ProjectClientState::Local {
|
||||
remote_id: project_id,
|
||||
metadata_changed: metadata_changed_tx,
|
||||
_maintain_metadata: cx.spawn_weak(move |this, cx| async move {
|
||||
while let Some(tx) = metadata_changed_rx.next().await {
|
||||
let mut txs = vec![tx];
|
||||
while let Ok(Some(next_tx)) = metadata_changed_rx.try_next() {
|
||||
txs.push(next_tx);
|
||||
}
|
||||
|
||||
let Some(this) = this.upgrade(&cx) else { break };
|
||||
this.read_with(&cx, |this, cx| {
|
||||
let worktrees = this
|
||||
.worktrees
|
||||
.iter()
|
||||
.filter_map(|worktree| {
|
||||
worktree.upgrade(cx).map(|worktree| {
|
||||
worktree.read(cx).as_local().unwrap().metadata_proto()
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
this.client.request(proto::UpdateProject {
|
||||
project_id,
|
||||
worktrees,
|
||||
})
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
for tx in txs {
|
||||
let _ = tx.send(());
|
||||
}
|
||||
}
|
||||
}),
|
||||
_detect_unshare: cx.spawn_weak(move |this, mut cx| {
|
||||
async move {
|
||||
let is_connected = status.next().await.map_or(false, |s| s.is_connected());
|
||||
|
@ -1144,7 +1135,7 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
self.metadata_changed(cx);
|
||||
let _ = self.metadata_changed(cx);
|
||||
cx.notify();
|
||||
self.client.send(proto::UnshareProject {
|
||||
project_id: remote_id,
|
||||
|
@ -1203,6 +1194,34 @@ impl Project {
|
|||
!self.is_local()
|
||||
}
|
||||
|
||||
pub fn create_terminal(
|
||||
&mut self,
|
||||
working_directory: Option<PathBuf>,
|
||||
window_id: usize,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Result<ModelHandle<Terminal>> {
|
||||
if self.is_remote() {
|
||||
return Err(anyhow!(
|
||||
"creating terminals as a guest is not supported yet"
|
||||
));
|
||||
} else {
|
||||
let settings = cx.global::<Settings>();
|
||||
let shell = settings.terminal_shell();
|
||||
let envs = settings.terminal_env();
|
||||
let scroll = settings.terminal_scroll();
|
||||
|
||||
TerminalBuilder::new(
|
||||
working_directory.clone(),
|
||||
shell,
|
||||
envs,
|
||||
settings.terminal_overrides.blinking.clone(),
|
||||
scroll,
|
||||
window_id,
|
||||
)
|
||||
.map(|builder| cx.add_model(|cx| builder.subscribe(cx)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_buffer(
|
||||
&mut self,
|
||||
text: &str,
|
||||
|
@ -1633,10 +1652,6 @@ impl Project {
|
|||
operations: vec![language::proto::serialize_operation(operation)],
|
||||
});
|
||||
cx.background().spawn(request).detach_and_log_err(cx);
|
||||
} else if let Some(project_id) = self.remote_id() {
|
||||
let _ = self
|
||||
.client
|
||||
.send(proto::RegisterProjectActivity { project_id });
|
||||
}
|
||||
}
|
||||
BufferEvent::Edited { .. } => {
|
||||
|
@ -3428,19 +3443,29 @@ impl Project {
|
|||
position: Some(language::proto::serialize_anchor(&anchor)),
|
||||
version: serialize_version(&source_buffer.version()),
|
||||
};
|
||||
cx.spawn_weak(|_, mut cx| async move {
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
let response = rpc.request(message).await?;
|
||||
|
||||
source_buffer_handle
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(response.version))
|
||||
})
|
||||
.await;
|
||||
if this
|
||||
.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("project was dropped"))?
|
||||
.read_with(&cx, |this, _| this.is_read_only())
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"failed to get completions: project was disconnected"
|
||||
));
|
||||
} else {
|
||||
source_buffer_handle
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(response.version))
|
||||
})
|
||||
.await;
|
||||
|
||||
let completions = response.completions.into_iter().map(|completion| {
|
||||
language::proto::deserialize_completion(completion, language.clone())
|
||||
});
|
||||
futures::future::try_join_all(completions).await
|
||||
let completions = response.completions.into_iter().map(|completion| {
|
||||
language::proto::deserialize_completion(completion, language.clone())
|
||||
});
|
||||
futures::future::try_join_all(completions).await
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(Default::default()))
|
||||
|
@ -3617,7 +3642,7 @@ impl Project {
|
|||
} else if let Some(project_id) = self.remote_id() {
|
||||
let rpc = self.client.clone();
|
||||
let version = buffer.version();
|
||||
cx.spawn_weak(|_, mut cx| async move {
|
||||
cx.spawn_weak(|this, mut cx| async move {
|
||||
let response = rpc
|
||||
.request(proto::GetCodeActions {
|
||||
project_id,
|
||||
|
@ -3628,17 +3653,27 @@ impl Project {
|
|||
})
|
||||
.await?;
|
||||
|
||||
buffer_handle
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(response.version))
|
||||
})
|
||||
.await;
|
||||
if this
|
||||
.upgrade(&cx)
|
||||
.ok_or_else(|| anyhow!("project was dropped"))?
|
||||
.read_with(&cx, |this, _| this.is_read_only())
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"failed to get code actions: project was disconnected"
|
||||
));
|
||||
} else {
|
||||
buffer_handle
|
||||
.update(&mut cx, |buffer, _| {
|
||||
buffer.wait_for_version(deserialize_version(response.version))
|
||||
})
|
||||
.await;
|
||||
|
||||
response
|
||||
.actions
|
||||
.into_iter()
|
||||
.map(language::proto::deserialize_code_action)
|
||||
.collect()
|
||||
response
|
||||
.actions
|
||||
.into_iter()
|
||||
.map(language::proto::deserialize_code_action)
|
||||
.collect()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(Default::default()))
|
||||
|
@ -4147,9 +4182,13 @@ impl Project {
|
|||
let message = request.to_proto(project_id, buffer);
|
||||
return cx.spawn(|this, cx| async move {
|
||||
let response = rpc.request(message).await?;
|
||||
request
|
||||
.response_from_proto(response, this, buffer_handle, cx)
|
||||
.await
|
||||
if this.read_with(&cx, |this, _| this.is_read_only()) {
|
||||
Err(anyhow!("disconnected before completing request"))
|
||||
} else {
|
||||
request
|
||||
.response_from_proto(response, this, buffer_handle, cx)
|
||||
.await
|
||||
}
|
||||
});
|
||||
}
|
||||
Task::ready(Ok(Default::default()))
|
||||
|
@ -4227,12 +4266,13 @@ impl Project {
|
|||
});
|
||||
let worktree = worktree?;
|
||||
|
||||
let project_id = project.update(&mut cx, |project, cx| {
|
||||
project.add_worktree(&worktree, cx);
|
||||
project.remote_id()
|
||||
});
|
||||
project
|
||||
.update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))
|
||||
.await;
|
||||
|
||||
if let Some(project_id) = project_id {
|
||||
if let Some(project_id) =
|
||||
project.read_with(&cx, |project, _| project.remote_id())
|
||||
{
|
||||
worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
worktree.as_local_mut().unwrap().share(project_id, cx)
|
||||
|
@ -4256,7 +4296,11 @@ impl Project {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
|
||||
pub fn remove_worktree(
|
||||
&mut self,
|
||||
id_to_remove: WorktreeId,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> impl Future<Output = ()> {
|
||||
self.worktrees.retain(|worktree| {
|
||||
if let Some(worktree) = worktree.upgrade(cx) {
|
||||
let id = worktree.read(cx).id();
|
||||
|
@ -4270,11 +4314,14 @@ impl Project {
|
|||
false
|
||||
}
|
||||
});
|
||||
self.metadata_changed(cx);
|
||||
cx.notify();
|
||||
self.metadata_changed(cx)
|
||||
}
|
||||
|
||||
fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
|
||||
fn add_worktree(
|
||||
&mut self,
|
||||
worktree: &ModelHandle<Worktree>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> impl Future<Output = ()> {
|
||||
cx.observe(worktree, |_, _, cx| cx.notify()).detach();
|
||||
if worktree.read(cx).is_local() {
|
||||
cx.subscribe(worktree, |this, worktree, event, cx| match event {
|
||||
|
@ -4298,15 +4345,13 @@ impl Project {
|
|||
.push(WorktreeHandle::Weak(worktree.downgrade()));
|
||||
}
|
||||
|
||||
self.metadata_changed(cx);
|
||||
cx.observe_release(worktree, |this, worktree, cx| {
|
||||
this.remove_worktree(worktree.id(), cx);
|
||||
cx.notify();
|
||||
let _ = this.remove_worktree(worktree.id(), cx);
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.emit(Event::WorktreeAdded);
|
||||
cx.notify();
|
||||
self.metadata_changed(cx)
|
||||
}
|
||||
|
||||
fn update_local_worktree_buffers(
|
||||
|
@ -4623,11 +4668,11 @@ impl Project {
|
|||
} else {
|
||||
let worktree =
|
||||
Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
|
||||
this.add_worktree(&worktree, cx);
|
||||
let _ = this.add_worktree(&worktree, cx);
|
||||
}
|
||||
}
|
||||
|
||||
this.metadata_changed(cx);
|
||||
let _ = this.metadata_changed(cx);
|
||||
for (id, _) in old_worktrees_by_id {
|
||||
cx.emit(Event::WorktreeRemoved(id));
|
||||
}
|
||||
|
@ -4669,7 +4714,7 @@ impl Project {
|
|||
let entry = worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
let worktree = worktree.as_local_mut().unwrap();
|
||||
let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
|
||||
let path = PathBuf::from(envelope.payload.path);
|
||||
worktree.create_entry(path, envelope.payload.is_directory, cx)
|
||||
})
|
||||
.await?;
|
||||
|
@ -4693,7 +4738,7 @@ impl Project {
|
|||
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
|
||||
let entry = worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
|
||||
let new_path = PathBuf::from(envelope.payload.new_path);
|
||||
worktree
|
||||
.as_local_mut()
|
||||
.unwrap()
|
||||
|
@ -4721,7 +4766,7 @@ impl Project {
|
|||
let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
|
||||
let entry = worktree
|
||||
.update(&mut cx, |worktree, cx| {
|
||||
let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
|
||||
let new_path = PathBuf::from(envelope.payload.new_path);
|
||||
worktree
|
||||
.as_local_mut()
|
||||
.unwrap()
|
||||
|
@ -5863,48 +5908,6 @@ impl Project {
|
|||
}
|
||||
}
|
||||
|
||||
impl ProjectStore {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
projects: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn projects<'a>(
|
||||
&'a self,
|
||||
cx: &'a AppContext,
|
||||
) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
|
||||
self.projects
|
||||
.iter()
|
||||
.filter_map(|project| project.upgrade(cx))
|
||||
}
|
||||
|
||||
fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
|
||||
if let Err(ix) = self
|
||||
.projects
|
||||
.binary_search_by_key(&project.id(), WeakModelHandle::id)
|
||||
{
|
||||
self.projects.insert(ix, project);
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
|
||||
let mut did_change = false;
|
||||
self.projects.retain(|project| {
|
||||
if project.is_upgradable(cx) {
|
||||
true
|
||||
} else {
|
||||
did_change = true;
|
||||
false
|
||||
}
|
||||
});
|
||||
if did_change {
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl WorktreeHandle {
|
||||
pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
|
||||
match self {
|
||||
|
@ -5983,16 +5986,10 @@ impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
impl Entity for ProjectStore {
|
||||
type Event = ();
|
||||
}
|
||||
|
||||
impl Entity for Project {
|
||||
type Event = Event;
|
||||
|
||||
fn release(&mut self, cx: &mut gpui::MutableAppContext) {
|
||||
self.project_store.update(cx, ProjectStore::prune_projects);
|
||||
|
||||
fn release(&mut self, _: &mut gpui::MutableAppContext) {
|
||||
match &self.client_state {
|
||||
Some(ProjectClientState::Local { remote_id, .. }) => {
|
||||
self.client
|
||||
|
|
|
@ -2166,7 +2166,11 @@ async fn test_rescan_and_remote_updates(
|
|||
proto::WorktreeMetadata {
|
||||
id: initial_snapshot.id().to_proto(),
|
||||
root_name: initial_snapshot.root_name().into(),
|
||||
abs_path: initial_snapshot.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: initial_snapshot
|
||||
.abs_path()
|
||||
.as_os_str()
|
||||
.to_string_lossy()
|
||||
.into(),
|
||||
visible: true,
|
||||
},
|
||||
rpc.clone(),
|
||||
|
|
|
@ -41,7 +41,6 @@ use std::{
|
|||
future::Future,
|
||||
mem,
|
||||
ops::{Deref, DerefMut},
|
||||
os::unix::prelude::{OsStrExt, OsStringExt},
|
||||
path::{Path, PathBuf},
|
||||
sync::{atomic::AtomicUsize, Arc},
|
||||
task::Poll,
|
||||
|
@ -83,6 +82,7 @@ pub struct RemoteWorktree {
|
|||
replica_id: ReplicaId,
|
||||
diagnostic_summaries: TreeMap<PathKey, DiagnosticSummary>,
|
||||
visible: bool,
|
||||
disconnected: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -168,7 +168,7 @@ enum ScanState {
|
|||
struct ShareState {
|
||||
project_id: u64,
|
||||
snapshots_tx: watch::Sender<LocalSnapshot>,
|
||||
_maintain_remote_snapshot: Option<Task<Option<()>>>,
|
||||
_maintain_remote_snapshot: Task<Option<()>>,
|
||||
}
|
||||
|
||||
pub enum Event {
|
||||
|
@ -222,7 +222,7 @@ impl Worktree {
|
|||
let root_name = worktree.root_name.clone();
|
||||
let visible = worktree.visible;
|
||||
|
||||
let abs_path = PathBuf::from(OsString::from_vec(worktree.abs_path));
|
||||
let abs_path = PathBuf::from(worktree.abs_path);
|
||||
let snapshot = Snapshot {
|
||||
id: WorktreeId(remote_id as usize),
|
||||
abs_path: Arc::from(abs_path.deref()),
|
||||
|
@ -248,6 +248,7 @@ impl Worktree {
|
|||
client: client.clone(),
|
||||
diagnostic_summaries: Default::default(),
|
||||
visible,
|
||||
disconnected: false,
|
||||
})
|
||||
});
|
||||
|
||||
|
@ -660,7 +661,7 @@ impl LocalWorktree {
|
|||
id: self.id().to_proto(),
|
||||
root_name: self.root_name().to_string(),
|
||||
visible: self.visible,
|
||||
abs_path: self.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: self.abs_path().as_os_str().to_string_lossy().into(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -972,11 +973,10 @@ impl LocalWorktree {
|
|||
let _ = share_tx.send(Ok(()));
|
||||
} else {
|
||||
let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
|
||||
let rpc = self.client.clone();
|
||||
let worktree_id = cx.model_id() as u64;
|
||||
|
||||
for (path, summary) in self.diagnostic_summaries.iter() {
|
||||
if let Err(e) = rpc.send(proto::UpdateDiagnosticSummary {
|
||||
if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
|
||||
project_id,
|
||||
worktree_id,
|
||||
summary: Some(summary.to_proto(&path.0)),
|
||||
|
@ -986,15 +986,14 @@ impl LocalWorktree {
|
|||
}
|
||||
|
||||
let maintain_remote_snapshot = cx.background().spawn({
|
||||
let rpc = rpc;
|
||||
|
||||
let rpc = self.client.clone();
|
||||
async move {
|
||||
let mut prev_snapshot = match snapshots_rx.recv().await {
|
||||
Some(snapshot) => {
|
||||
let update = proto::UpdateWorktree {
|
||||
project_id,
|
||||
worktree_id,
|
||||
abs_path: snapshot.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: snapshot.abs_path().to_string_lossy().into(),
|
||||
root_name: snapshot.root_name().to_string(),
|
||||
updated_entries: snapshot
|
||||
.entries_by_path
|
||||
|
@ -1034,10 +1033,11 @@ impl LocalWorktree {
|
|||
}
|
||||
.log_err()
|
||||
});
|
||||
|
||||
self.share = Some(ShareState {
|
||||
project_id,
|
||||
snapshots_tx,
|
||||
_maintain_remote_snapshot: Some(maintain_remote_snapshot),
|
||||
_maintain_remote_snapshot: maintain_remote_snapshot,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -1055,25 +1055,6 @@ impl LocalWorktree {
|
|||
pub fn is_shared(&self) -> bool {
|
||||
self.share.is_some()
|
||||
}
|
||||
|
||||
pub fn send_extension_counts(&self, project_id: u64) {
|
||||
let mut extensions = Vec::new();
|
||||
let mut counts = Vec::new();
|
||||
|
||||
for (extension, count) in self.extension_counts() {
|
||||
extensions.push(extension.to_string_lossy().to_string());
|
||||
counts.push(*count as u32);
|
||||
}
|
||||
|
||||
self.client
|
||||
.send(proto::UpdateWorktreeExtensions {
|
||||
project_id,
|
||||
worktree_id: self.id().to_proto(),
|
||||
extensions,
|
||||
counts,
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteWorktree {
|
||||
|
@ -1090,6 +1071,7 @@ impl RemoteWorktree {
|
|||
pub fn disconnected_from_host(&mut self) {
|
||||
self.updates_tx.take();
|
||||
self.snapshot_subscriptions.clear();
|
||||
self.disconnected = true;
|
||||
}
|
||||
|
||||
pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
|
||||
|
@ -1104,10 +1086,12 @@ impl RemoteWorktree {
|
|||
self.scan_id > scan_id || (self.scan_id == scan_id && self.is_complete)
|
||||
}
|
||||
|
||||
fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = ()> {
|
||||
fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = Result<()>> {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
if self.observed_snapshot(scan_id) {
|
||||
let _ = tx.send(());
|
||||
} else if self.disconnected {
|
||||
drop(tx);
|
||||
} else {
|
||||
match self
|
||||
.snapshot_subscriptions
|
||||
|
@ -1118,7 +1102,8 @@ impl RemoteWorktree {
|
|||
}
|
||||
|
||||
async move {
|
||||
let _ = rx.await;
|
||||
rx.await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1147,7 +1132,7 @@ impl RemoteWorktree {
|
|||
) -> Task<Result<Entry>> {
|
||||
let wait_for_snapshot = self.wait_for_snapshot(scan_id);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
wait_for_snapshot.await;
|
||||
wait_for_snapshot.await?;
|
||||
this.update(&mut cx, |worktree, _| {
|
||||
let worktree = worktree.as_remote_mut().unwrap();
|
||||
let mut snapshot = worktree.background_snapshot.lock();
|
||||
|
@ -1166,7 +1151,7 @@ impl RemoteWorktree {
|
|||
) -> Task<Result<()>> {
|
||||
let wait_for_snapshot = self.wait_for_snapshot(scan_id);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
wait_for_snapshot.await;
|
||||
wait_for_snapshot.await?;
|
||||
this.update(&mut cx, |worktree, _| {
|
||||
let worktree = worktree.as_remote_mut().unwrap();
|
||||
let mut snapshot = worktree.background_snapshot.lock();
|
||||
|
@ -1404,7 +1389,7 @@ impl LocalSnapshot {
|
|||
proto::UpdateWorktree {
|
||||
project_id,
|
||||
worktree_id: self.id().to_proto(),
|
||||
abs_path: self.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: self.abs_path().to_string_lossy().into(),
|
||||
root_name,
|
||||
updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
|
||||
removed_entries: Default::default(),
|
||||
|
@ -1472,7 +1457,7 @@ impl LocalSnapshot {
|
|||
proto::UpdateWorktree {
|
||||
project_id,
|
||||
worktree_id,
|
||||
abs_path: self.abs_path().as_os_str().as_bytes().to_vec(),
|
||||
abs_path: self.abs_path().to_string_lossy().into(),
|
||||
root_name: self.root_name().to_string(),
|
||||
updated_entries,
|
||||
removed_entries,
|
||||
|
@ -2951,7 +2936,7 @@ impl<'a> From<&'a Entry> for proto::Entry {
|
|||
Self {
|
||||
id: entry.id.to_proto(),
|
||||
is_dir: entry.is_dir(),
|
||||
path: entry.path.as_os_str().as_bytes().to_vec(),
|
||||
path: entry.path.to_string_lossy().into(),
|
||||
inode: entry.inode,
|
||||
mtime: Some(entry.mtime.into()),
|
||||
is_symlink: entry.is_symlink,
|
||||
|
@ -2969,14 +2954,10 @@ impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
|
|||
EntryKind::Dir
|
||||
} else {
|
||||
let mut char_bag = *root_char_bag;
|
||||
char_bag.extend(
|
||||
String::from_utf8_lossy(&entry.path)
|
||||
.chars()
|
||||
.map(|c| c.to_ascii_lowercase()),
|
||||
);
|
||||
char_bag.extend(entry.path.chars().map(|c| c.to_ascii_lowercase()));
|
||||
EntryKind::File(char_bag)
|
||||
};
|
||||
let path: Arc<Path> = PathBuf::from(OsString::from_vec(entry.path)).into();
|
||||
let path: Arc<Path> = PathBuf::from(entry.path).into();
|
||||
Ok(Entry {
|
||||
id: ProjectEntryId::from_proto(entry.id),
|
||||
kind,
|
||||
|
|
|
@ -1393,8 +1393,15 @@ mod tests {
|
|||
.await;
|
||||
|
||||
let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
|
||||
let (_, workspace) =
|
||||
cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
|
||||
let (_, workspace) = cx.add_window(|cx| {
|
||||
Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project.clone(),
|
||||
|_, _| unimplemented!(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let panel = workspace.update(cx, |_, cx| ProjectPanel::new(project, cx));
|
||||
assert_eq!(
|
||||
visible_entries_as_strings(&panel, 0..50, cx),
|
||||
|
@ -1486,8 +1493,15 @@ mod tests {
|
|||
.await;
|
||||
|
||||
let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
|
||||
let (_, workspace) =
|
||||
cx.add_window(|cx| Workspace::new(project.clone(), |_, _| unimplemented!(), cx));
|
||||
let (_, workspace) = cx.add_window(|cx| {
|
||||
Workspace::new(
|
||||
Default::default(),
|
||||
0,
|
||||
project.clone(),
|
||||
|_, _| unimplemented!(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let panel = workspace.update(cx, |_, cx| ProjectPanel::new(project, cx));
|
||||
|
||||
select_path(&panel, "root1", cx);
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use editor::{
|
||||
combine_syntax_and_fuzzy_match_highlights, styled_runs_for_code_label, Autoscroll, Bias, Editor,
|
||||
combine_syntax_and_fuzzy_match_highlights, scroll::autoscroll::Autoscroll,
|
||||
styled_runs_for_code_label, Bias, Editor,
|
||||
};
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
|
|
|
@ -12,7 +12,7 @@ smallvec = { version = "1.6", features = ["union"] }
|
|||
sum_tree = { path = "../sum_tree" }
|
||||
arrayvec = "0.7.1"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
|
||||
|
||||
util = { path = "../util" }
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.8.3"
|
||||
|
|
|
@ -12,6 +12,7 @@ use std::{
|
|||
str,
|
||||
};
|
||||
use sum_tree::{Bias, Dimension, SumTree};
|
||||
use util::debug_panic;
|
||||
|
||||
pub use offset_utf16::OffsetUtf16;
|
||||
pub use point::Point;
|
||||
|
@ -679,28 +680,33 @@ impl Chunk {
|
|||
fn point_to_offset(&self, target: Point) -> usize {
|
||||
let mut offset = 0;
|
||||
let mut point = Point::new(0, 0);
|
||||
|
||||
for ch in self.0.chars() {
|
||||
if point >= target {
|
||||
if point > target {
|
||||
panic!("point {:?} is inside of character {:?}", target, ch);
|
||||
debug_panic!("point {target:?} is inside of character {ch:?}");
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if ch == '\n' {
|
||||
point.row += 1;
|
||||
if point.row > target.row {
|
||||
panic!(
|
||||
"point {:?} is beyond the end of a line with length {}",
|
||||
target, point.column
|
||||
);
|
||||
}
|
||||
point.column = 0;
|
||||
|
||||
if point.row > target.row {
|
||||
debug_panic!(
|
||||
"point {target:?} is beyond the end of a line with length {}",
|
||||
point.column
|
||||
);
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
point.column += ch.len_utf8() as u32;
|
||||
}
|
||||
|
||||
offset += ch.len_utf8();
|
||||
}
|
||||
|
||||
offset
|
||||
}
|
||||
|
||||
|
@ -737,26 +743,27 @@ impl Chunk {
|
|||
if ch == '\n' {
|
||||
point.row += 1;
|
||||
point.column = 0;
|
||||
|
||||
if point.row > target.row {
|
||||
if clip {
|
||||
// Return the offset of the newline
|
||||
return offset;
|
||||
if !clip {
|
||||
debug_panic!(
|
||||
"point {target:?} is beyond the end of a line with length {}",
|
||||
point.column
|
||||
);
|
||||
}
|
||||
panic!(
|
||||
"point {:?} is beyond the end of a line with length {}",
|
||||
target, point.column
|
||||
);
|
||||
// Return the offset of the newline
|
||||
return offset;
|
||||
}
|
||||
} else {
|
||||
point.column += ch.len_utf16() as u32;
|
||||
}
|
||||
|
||||
if point > target {
|
||||
if clip {
|
||||
// Return the offset of the codepoint which we have landed within, bias left
|
||||
return offset;
|
||||
if !clip {
|
||||
debug_panic!("point {target:?} is inside of codepoint {ch:?}");
|
||||
}
|
||||
panic!("point {:?} is inside of codepoint {:?}", target, ch);
|
||||
// Return the offset of the codepoint which we have landed within, bias left
|
||||
return offset;
|
||||
}
|
||||
|
||||
offset += ch.len_utf8();
|
||||
|
|
|
@ -48,9 +48,7 @@ message Envelope {
|
|||
OpenBufferForSymbolResponse open_buffer_for_symbol_response = 40;
|
||||
|
||||
UpdateProject update_project = 41;
|
||||
RegisterProjectActivity register_project_activity = 42;
|
||||
UpdateWorktree update_worktree = 43;
|
||||
UpdateWorktreeExtensions update_worktree_extensions = 44;
|
||||
|
||||
CreateProjectEntry create_project_entry = 45;
|
||||
RenameProjectEntry rename_project_entry = 46;
|
||||
|
@ -158,14 +156,12 @@ message JoinRoomResponse {
|
|||
optional LiveKitConnectionInfo live_kit_connection_info = 2;
|
||||
}
|
||||
|
||||
message LeaveRoom {
|
||||
uint64 id = 1;
|
||||
}
|
||||
message LeaveRoom {}
|
||||
|
||||
message Room {
|
||||
uint64 id = 1;
|
||||
repeated Participant participants = 2;
|
||||
repeated uint64 pending_participant_user_ids = 3;
|
||||
repeated PendingParticipant pending_participants = 3;
|
||||
string live_kit_room = 4;
|
||||
}
|
||||
|
||||
|
@ -176,6 +172,12 @@ message Participant {
|
|||
ParticipantLocation location = 4;
|
||||
}
|
||||
|
||||
message PendingParticipant {
|
||||
uint64 user_id = 1;
|
||||
uint64 calling_user_id = 2;
|
||||
optional uint64 initial_project_id = 3;
|
||||
}
|
||||
|
||||
message ParticipantProject {
|
||||
uint64 id = 1;
|
||||
repeated string worktree_root_names = 2;
|
||||
|
@ -199,13 +201,13 @@ message ParticipantLocation {
|
|||
|
||||
message Call {
|
||||
uint64 room_id = 1;
|
||||
uint64 recipient_user_id = 2;
|
||||
uint64 called_user_id = 2;
|
||||
optional uint64 initial_project_id = 3;
|
||||
}
|
||||
|
||||
message IncomingCall {
|
||||
uint64 room_id = 1;
|
||||
uint64 caller_user_id = 2;
|
||||
uint64 calling_user_id = 2;
|
||||
repeated uint64 participant_user_ids = 3;
|
||||
optional ParticipantProject initial_project = 4;
|
||||
}
|
||||
|
@ -214,7 +216,7 @@ message CallCanceled {}
|
|||
|
||||
message CancelCall {
|
||||
uint64 room_id = 1;
|
||||
uint64 recipient_user_id = 2;
|
||||
uint64 called_user_id = 2;
|
||||
}
|
||||
|
||||
message DeclineCall {
|
||||
|
@ -253,10 +255,6 @@ message UpdateProject {
|
|||
repeated WorktreeMetadata worktrees = 2;
|
||||
}
|
||||
|
||||
message RegisterProjectActivity {
|
||||
uint64 project_id = 1;
|
||||
}
|
||||
|
||||
message JoinProject {
|
||||
uint64 project_id = 1;
|
||||
}
|
||||
|
@ -280,33 +278,26 @@ message UpdateWorktree {
|
|||
repeated uint64 removed_entries = 5;
|
||||
uint64 scan_id = 6;
|
||||
bool is_last_update = 7;
|
||||
bytes abs_path = 8;
|
||||
}
|
||||
|
||||
message UpdateWorktreeExtensions {
|
||||
uint64 project_id = 1;
|
||||
uint64 worktree_id = 2;
|
||||
repeated string extensions = 3;
|
||||
repeated uint32 counts = 4;
|
||||
string abs_path = 8;
|
||||
}
|
||||
|
||||
message CreateProjectEntry {
|
||||
uint64 project_id = 1;
|
||||
uint64 worktree_id = 2;
|
||||
bytes path = 3;
|
||||
string path = 3;
|
||||
bool is_directory = 4;
|
||||
}
|
||||
|
||||
message RenameProjectEntry {
|
||||
uint64 project_id = 1;
|
||||
uint64 entry_id = 2;
|
||||
bytes new_path = 3;
|
||||
string new_path = 3;
|
||||
}
|
||||
|
||||
message CopyProjectEntry {
|
||||
uint64 project_id = 1;
|
||||
uint64 entry_id = 2;
|
||||
bytes new_path = 3;
|
||||
string new_path = 3;
|
||||
}
|
||||
|
||||
message DeleteProjectEntry {
|
||||
|
@ -898,7 +889,7 @@ message File {
|
|||
message Entry {
|
||||
uint64 id = 1;
|
||||
bool is_dir = 2;
|
||||
bytes path = 3;
|
||||
string path = 3;
|
||||
uint64 inode = 4;
|
||||
Timestamp mtime = 5;
|
||||
bool is_symlink = 6;
|
||||
|
@ -1093,7 +1084,7 @@ message WorktreeMetadata {
|
|||
uint64 id = 1;
|
||||
string root_name = 2;
|
||||
bool visible = 3;
|
||||
bytes abs_path = 4;
|
||||
string abs_path = 4;
|
||||
}
|
||||
|
||||
message UpdateDiffBase {
|
||||
|
|
|
@ -24,7 +24,7 @@ use std::{
|
|||
};
|
||||
use tracing::instrument;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
|
||||
#[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize)]
|
||||
pub struct ConnectionId(pub u32);
|
||||
|
||||
impl fmt::Display for ConnectionId {
|
||||
|
|
|
@ -140,12 +140,11 @@ messages!(
|
|||
(OpenBufferResponse, Background),
|
||||
(PerformRename, Background),
|
||||
(PerformRenameResponse, Background),
|
||||
(Ping, Foreground),
|
||||
(PrepareRename, Background),
|
||||
(PrepareRenameResponse, Background),
|
||||
(ProjectEntryResponse, Foreground),
|
||||
(RemoveContact, Foreground),
|
||||
(Ping, Foreground),
|
||||
(RegisterProjectActivity, Foreground),
|
||||
(ReloadBuffers, Foreground),
|
||||
(ReloadBuffersResponse, Foreground),
|
||||
(RemoveProjectCollaborator, Foreground),
|
||||
|
@ -175,7 +174,6 @@ messages!(
|
|||
(UpdateParticipantLocation, Foreground),
|
||||
(UpdateProject, Foreground),
|
||||
(UpdateWorktree, Foreground),
|
||||
(UpdateWorktreeExtensions, Background),
|
||||
(UpdateDiffBase, Background),
|
||||
(GetPrivateUserInfo, Foreground),
|
||||
(GetPrivateUserInfoResponse, Foreground),
|
||||
|
@ -231,6 +229,7 @@ request_messages!(
|
|||
(Test, Test),
|
||||
(UpdateBuffer, Ack),
|
||||
(UpdateParticipantLocation, Ack),
|
||||
(UpdateProject, Ack),
|
||||
(UpdateWorktree, Ack),
|
||||
);
|
||||
|
||||
|
@ -262,7 +261,6 @@ entity_messages!(
|
|||
OpenBufferForSymbol,
|
||||
PerformRename,
|
||||
PrepareRename,
|
||||
RegisterProjectActivity,
|
||||
ReloadBuffers,
|
||||
RemoveProjectCollaborator,
|
||||
RenameProjectEntry,
|
||||
|
@ -278,7 +276,6 @@ entity_messages!(
|
|||
UpdateLanguageServer,
|
||||
UpdateProject,
|
||||
UpdateWorktree,
|
||||
UpdateWorktreeExtensions,
|
||||
UpdateDiffBase
|
||||
);
|
||||
|
||||
|
|
|
@ -6,4 +6,4 @@ pub use conn::Connection;
|
|||
pub use peer::*;
|
||||
mod macros;
|
||||
|
||||
pub const PROTOCOL_VERSION: u32 = 39;
|
||||
pub const PROTOCOL_VERSION: u32 = 40;
|
||||
|
|
|
@ -14,8 +14,9 @@ use serde::Deserialize;
|
|||
use settings::Settings;
|
||||
use std::{any::Any, sync::Arc};
|
||||
use workspace::{
|
||||
item::ItemHandle,
|
||||
searchable::{Direction, SearchEvent, SearchableItemHandle, WeakSearchableItemHandle},
|
||||
ItemHandle, Pane, ToolbarItemLocation, ToolbarItemView,
|
||||
Pane, ToolbarItemLocation, ToolbarItemView,
|
||||
};
|
||||
|
||||
#[derive(Clone, Deserialize, PartialEq)]
|
||||
|
|
|
@ -4,8 +4,8 @@ use crate::{
|
|||
};
|
||||
use collections::HashMap;
|
||||
use editor::{
|
||||
items::active_match_index, Anchor, Autoscroll, Editor, MultiBuffer, SelectAll,
|
||||
MAX_TAB_TITLE_LEN,
|
||||
items::active_match_index, scroll::autoscroll::Autoscroll, Anchor, Editor, MultiBuffer,
|
||||
SelectAll, MAX_TAB_TITLE_LEN,
|
||||
};
|
||||
use gpui::{
|
||||
actions, elements::*, platform::CursorStyle, Action, AnyViewHandle, AppContext, ElementBox,
|
||||
|
@ -24,9 +24,9 @@ use std::{
|
|||
};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{
|
||||
item::{Item, ItemEvent, ItemHandle},
|
||||
searchable::{Direction, SearchableItem, SearchableItemHandle},
|
||||
Item, ItemEvent, ItemHandle, ItemNavHistory, Pane, ToolbarItemLocation, ToolbarItemView,
|
||||
Workspace,
|
||||
ItemNavHistory, Pane, ToolbarItemLocation, ToolbarItemView, Workspace, WorkspaceId,
|
||||
};
|
||||
|
||||
actions!(project_search, [SearchInNew, ToggleFocus]);
|
||||
|
@ -315,7 +315,7 @@ impl Item for ProjectSearchView {
|
|||
.update(cx, |editor, cx| editor.reload(project, cx))
|
||||
}
|
||||
|
||||
fn clone_on_split(&self, cx: &mut ViewContext<Self>) -> Option<Self>
|
||||
fn clone_on_split(&self, _workspace_id: WorkspaceId, cx: &mut ViewContext<Self>) -> Option<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
|
@ -353,6 +353,20 @@ impl Item for ProjectSearchView {
|
|||
fn breadcrumbs(&self, theme: &theme::Theme, cx: &AppContext) -> Option<Vec<ElementBox>> {
|
||||
self.results_editor.breadcrumbs(theme, cx)
|
||||
}
|
||||
|
||||
fn serialized_item_kind() -> Option<&'static str> {
|
||||
None
|
||||
}
|
||||
|
||||
fn deserialize(
|
||||
_project: ModelHandle<Project>,
|
||||
_workspace: WeakViewHandle<Workspace>,
|
||||
_workspace_id: workspace::WorkspaceId,
|
||||
_item_id: workspace::ItemId,
|
||||
_cx: &mut ViewContext<Pane>,
|
||||
) -> Task<anyhow::Result<ViewHandle<Self>>> {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
impl ProjectSearchView {
|
||||
|
@ -893,7 +907,7 @@ impl View for ProjectSearchBar {
|
|||
impl ToolbarItemView for ProjectSearchBar {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
active_pane_item: Option<&dyn workspace::ItemHandle>,
|
||||
active_pane_item: Option<&dyn ItemHandle>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> ToolbarItemLocation {
|
||||
cx.notify();
|
||||
|
|
|
@ -14,6 +14,7 @@ test-support = []
|
|||
assets = { path = "../assets" }
|
||||
collections = { path = "../collections" }
|
||||
gpui = { path = "../gpui" }
|
||||
sqlez = { path = "../sqlez" }
|
||||
fs = { path = "../fs" }
|
||||
anyhow = "1.0.38"
|
||||
futures = "0.3"
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue