mirror of
https://github.com/zed-industries/zed.git
synced 2024-11-24 06:19:37 +00:00
chore: Fix several style lints (#17488)
Some checks are pending
CI / Check formatting and spelling (push) Waiting to run
CI / (macOS) Run Clippy and tests (push) Waiting to run
CI / (Linux) Run Clippy and tests (push) Waiting to run
CI / (Windows) Run Clippy and tests (push) Waiting to run
CI / Create a macOS bundle (push) Blocked by required conditions
CI / Create a Linux bundle (push) Blocked by required conditions
CI / Create arm64 Linux bundle (push) Blocked by required conditions
Deploy Docs / Deploy Docs (push) Waiting to run
Docs / Check formatting (push) Waiting to run
Some checks are pending
CI / Check formatting and spelling (push) Waiting to run
CI / (macOS) Run Clippy and tests (push) Waiting to run
CI / (Linux) Run Clippy and tests (push) Waiting to run
CI / (Windows) Run Clippy and tests (push) Waiting to run
CI / Create a macOS bundle (push) Blocked by required conditions
CI / Create a Linux bundle (push) Blocked by required conditions
CI / Create arm64 Linux bundle (push) Blocked by required conditions
Deploy Docs / Deploy Docs (push) Waiting to run
Docs / Check formatting (push) Waiting to run
It's not comprehensive enough to start linting on `style` group, but hey, it's a start. Release Notes: - N/A
This commit is contained in:
parent
93249fc82b
commit
e6c1c51b37
361 changed files with 3530 additions and 3587 deletions
|
@ -573,6 +573,11 @@ single_range_in_vec_init = "allow"
|
|||
# There are a bunch of rules currently failing in the `style` group, so
|
||||
# allow all of those, for now.
|
||||
style = { level = "allow", priority = -1 }
|
||||
# We often return trait objects from `new` functions.
|
||||
new_ret_no_self = { level = "allow" }
|
||||
# We have a few `next` functions that differ in lifetimes
|
||||
# compared to Iterator::next. Yet, clippy complains about those.
|
||||
should_implement_trait = { level = "allow" }
|
||||
|
||||
# Individual rules that have violations in the codebase:
|
||||
type_complexity = "allow"
|
||||
|
|
|
@ -16,7 +16,7 @@ use util::ResultExt as _;
|
|||
|
||||
pub use supported_countries::*;
|
||||
|
||||
pub const ANTHROPIC_API_URL: &'static str = "https://api.anthropic.com";
|
||||
pub const ANTHROPIC_API_URL: &str = "https://api.anthropic.com";
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
|
@ -339,14 +339,12 @@ pub async fn extract_tool_args_from_events(
|
|||
while let Some(event) = events.next().await {
|
||||
if let Event::ContentBlockStart {
|
||||
index,
|
||||
content_block,
|
||||
content_block: ResponseContent::ToolUse { name, .. },
|
||||
} = event?
|
||||
{
|
||||
if let ResponseContent::ToolUse { name, .. } = content_block {
|
||||
if name == tool_name {
|
||||
tool_use_index = Some(index);
|
||||
break;
|
||||
}
|
||||
if name == tool_name {
|
||||
tool_use_index = Some(index);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -610,9 +608,6 @@ impl ApiError {
|
|||
}
|
||||
|
||||
pub fn is_rate_limit_error(&self) -> bool {
|
||||
match self.error_type.as_str() {
|
||||
"rate_limit_error" => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(self.error_type.as_str(), "rate_limit_error")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ impl SoundRegistry {
|
|||
let bytes = self
|
||||
.assets
|
||||
.load(&path)?
|
||||
.map(|asset| Ok(asset))
|
||||
.map(Ok)
|
||||
.unwrap_or_else(|| Err(anyhow::anyhow!("No such asset available")))?
|
||||
.into_owned();
|
||||
let cursor = Cursor::new(bytes);
|
||||
|
|
|
@ -33,6 +33,7 @@ impl Sound {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Audio {
|
||||
_output_stream: Option<OutputStream>,
|
||||
output_handle: Option<OutputStreamHandle>,
|
||||
|
@ -45,10 +46,7 @@ impl Global for GlobalAudio {}
|
|||
|
||||
impl Audio {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
_output_stream: None,
|
||||
output_handle: None,
|
||||
}
|
||||
Self::default()
|
||||
}
|
||||
|
||||
fn ensure_output_exists(&mut self) -> Option<&OutputStreamHandle> {
|
||||
|
|
|
@ -95,7 +95,7 @@ struct MacOsUnmounter {
|
|||
impl Drop for MacOsUnmounter {
|
||||
fn drop(&mut self) {
|
||||
let unmount_output = std::process::Command::new("hdiutil")
|
||||
.args(&["detach", "-force"])
|
||||
.args(["detach", "-force"])
|
||||
.arg(&self.mount_path)
|
||||
.output();
|
||||
|
||||
|
@ -211,7 +211,7 @@ pub fn check(_: &Check, cx: &mut WindowContext) {
|
|||
return;
|
||||
}
|
||||
|
||||
if let Some(message) = env::var("ZED_UPDATE_EXPLANATION").ok() {
|
||||
if let Ok(message) = env::var("ZED_UPDATE_EXPLANATION") {
|
||||
drop(cx.prompt(
|
||||
gpui::PromptLevel::Info,
|
||||
"Zed was installed via a package manager.",
|
||||
|
@ -254,7 +254,7 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut AppContext) -> Option<(
|
|||
let url = &auto_updater
|
||||
.http_client
|
||||
.build_url(&format!("/releases/{release_channel}/{current_version}"));
|
||||
cx.open_url(&url);
|
||||
cx.open_url(url);
|
||||
}
|
||||
|
||||
None
|
||||
|
@ -722,7 +722,7 @@ async fn install_release_linux(
|
|||
}
|
||||
|
||||
let output = Command::new("rsync")
|
||||
.args(&["-av", "--delete"])
|
||||
.args(["-av", "--delete"])
|
||||
.arg(&from)
|
||||
.arg(&to)
|
||||
.output()
|
||||
|
@ -754,10 +754,10 @@ async fn install_release_macos(
|
|||
|
||||
mounted_app_path.push("/");
|
||||
let output = Command::new("hdiutil")
|
||||
.args(&["attach", "-nobrowse"])
|
||||
.args(["attach", "-nobrowse"])
|
||||
.arg(&downloaded_dmg)
|
||||
.arg("-mountroot")
|
||||
.arg(&temp_dir.path())
|
||||
.arg(temp_dir.path())
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
|
@ -773,7 +773,7 @@ async fn install_release_macos(
|
|||
};
|
||||
|
||||
let output = Command::new("rsync")
|
||||
.args(&["-av", "--delete"])
|
||||
.args(["-av", "--delete"])
|
||||
.arg(&mounted_app_path)
|
||||
.arg(&running_app_path)
|
||||
.output()
|
||||
|
|
|
@ -18,6 +18,12 @@ pub struct Breadcrumbs {
|
|||
subscription: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl Default for Breadcrumbs {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Breadcrumbs {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
|
|
|
@ -259,13 +259,13 @@ impl Room {
|
|||
None
|
||||
};
|
||||
|
||||
match room
|
||||
let did_join = room
|
||||
.update(&mut cx, |room, cx| {
|
||||
room.leave_when_empty = true;
|
||||
room.call(called_user_id, initial_project_id, cx)
|
||||
})?
|
||||
.await
|
||||
{
|
||||
.await;
|
||||
match did_join {
|
||||
Ok(()) => Ok(room),
|
||||
Err(error) => Err(error.context("room creation failed")),
|
||||
}
|
||||
|
@ -493,7 +493,7 @@ impl Room {
|
|||
// we leave the room and return an error.
|
||||
if let Some(this) = this.upgrade() {
|
||||
log::info!("reconnection failed, leaving room");
|
||||
let _ = this.update(&mut cx, |this, cx| this.leave(cx))?.await?;
|
||||
this.update(&mut cx, |this, cx| this.leave(cx))?.await?;
|
||||
}
|
||||
Err(anyhow!(
|
||||
"can't reconnect to room: client failed to re-establish connection"
|
||||
|
@ -933,7 +933,7 @@ impl Room {
|
|||
let list = this
|
||||
.follows_by_leader_id_project_id
|
||||
.entry((leader, project_id))
|
||||
.or_insert(Vec::new());
|
||||
.or_default();
|
||||
if !list.contains(&follower) {
|
||||
list.push(follower);
|
||||
}
|
||||
|
@ -942,7 +942,7 @@ impl Room {
|
|||
this.pending_room_update.take();
|
||||
if this.should_leave() {
|
||||
log::info!("room is empty, leaving");
|
||||
let _ = this.leave(cx).detach();
|
||||
this.leave(cx).detach();
|
||||
}
|
||||
|
||||
this.user_store.update(cx, |user_store, cx| {
|
||||
|
@ -1017,19 +1017,11 @@ impl Room {
|
|||
.collect::<Vec<u64>>();
|
||||
speaker_ids.sort_unstable();
|
||||
for (sid, participant) in &mut self.remote_participants {
|
||||
if let Ok(_) = speaker_ids.binary_search(sid) {
|
||||
participant.speaking = true;
|
||||
} else {
|
||||
participant.speaking = false;
|
||||
}
|
||||
participant.speaking = speaker_ids.binary_search(sid).is_ok();
|
||||
}
|
||||
if let Some(id) = self.client.user_id() {
|
||||
if let Some(room) = &mut self.live_kit {
|
||||
if let Ok(_) = speaker_ids.binary_search(&id) {
|
||||
room.speaking = true;
|
||||
} else {
|
||||
room.speaking = false;
|
||||
}
|
||||
room.speaking = speaker_ids.binary_search(&id).is_ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -177,13 +177,10 @@ impl ChannelBuffer {
|
|||
match event {
|
||||
language::Event::Operation(operation) => {
|
||||
if *ZED_ALWAYS_ACTIVE {
|
||||
match operation {
|
||||
language::Operation::UpdateSelections { selections, .. } => {
|
||||
if selections.is_empty() {
|
||||
return;
|
||||
}
|
||||
if let language::Operation::UpdateSelections { selections, .. } = operation {
|
||||
if selections.is_empty() {
|
||||
return;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let operation = language::proto::serialize_operation(operation);
|
||||
|
|
|
@ -61,9 +61,9 @@ pub enum ChannelMessageId {
|
|||
Pending(usize),
|
||||
}
|
||||
|
||||
impl Into<Option<u64>> for ChannelMessageId {
|
||||
fn into(self) -> Option<u64> {
|
||||
match self {
|
||||
impl From<ChannelMessageId> for Option<u64> {
|
||||
fn from(val: ChannelMessageId) -> Self {
|
||||
match val {
|
||||
ChannelMessageId::Saved(id) => Some(id),
|
||||
ChannelMessageId::Pending(_) => None,
|
||||
}
|
||||
|
|
|
@ -249,15 +249,14 @@ impl ChannelStore {
|
|||
}
|
||||
|
||||
pub fn initialize(&mut self) {
|
||||
if !self.did_subscribe {
|
||||
if self
|
||||
if !self.did_subscribe
|
||||
&& self
|
||||
.client
|
||||
.send(proto::SubscribeToChannels {})
|
||||
.log_err()
|
||||
.is_some()
|
||||
{
|
||||
self.did_subscribe = true;
|
||||
}
|
||||
{
|
||||
self.did_subscribe = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -423,7 +422,7 @@ impl ChannelStore {
|
|||
) {
|
||||
self.channel_states
|
||||
.entry(channel_id)
|
||||
.or_insert_with(|| Default::default())
|
||||
.or_default()
|
||||
.acknowledge_message_id(message_id);
|
||||
cx.notify();
|
||||
}
|
||||
|
@ -436,7 +435,7 @@ impl ChannelStore {
|
|||
) {
|
||||
self.channel_states
|
||||
.entry(channel_id)
|
||||
.or_insert_with(|| Default::default())
|
||||
.or_default()
|
||||
.update_latest_message_id(message_id);
|
||||
cx.notify();
|
||||
}
|
||||
|
@ -450,7 +449,7 @@ impl ChannelStore {
|
|||
) {
|
||||
self.channel_states
|
||||
.entry(channel_id)
|
||||
.or_insert_with(|| Default::default())
|
||||
.or_default()
|
||||
.acknowledge_notes_version(epoch, version);
|
||||
cx.notify()
|
||||
}
|
||||
|
@ -464,7 +463,7 @@ impl ChannelStore {
|
|||
) {
|
||||
self.channel_states
|
||||
.entry(channel_id)
|
||||
.or_insert_with(|| Default::default())
|
||||
.or_default()
|
||||
.update_latest_notes_version(epoch, version);
|
||||
cx.notify()
|
||||
}
|
||||
|
@ -924,7 +923,7 @@ impl ChannelStore {
|
|||
if let Some(role) = ChannelRole::from_i32(membership.role) {
|
||||
this.channel_states
|
||||
.entry(ChannelId(membership.channel_id))
|
||||
.or_insert_with(|| ChannelState::default())
|
||||
.or_default()
|
||||
.set_role(role)
|
||||
}
|
||||
}
|
||||
|
@ -1094,11 +1093,7 @@ impl ChannelStore {
|
|||
id: ChannelId(channel.id),
|
||||
visibility: channel.visibility(),
|
||||
name: channel.name.into(),
|
||||
parent_path: channel
|
||||
.parent_path
|
||||
.into_iter()
|
||||
.map(|cid| ChannelId(cid))
|
||||
.collect(),
|
||||
parent_path: channel.parent_path.into_iter().map(ChannelId).collect(),
|
||||
}),
|
||||
),
|
||||
}
|
||||
|
@ -1113,14 +1108,11 @@ impl ChannelStore {
|
|||
|
||||
if channels_changed {
|
||||
if !payload.delete_channels.is_empty() {
|
||||
let delete_channels: Vec<ChannelId> = payload
|
||||
.delete_channels
|
||||
.into_iter()
|
||||
.map(|cid| ChannelId(cid))
|
||||
.collect();
|
||||
let delete_channels: Vec<ChannelId> =
|
||||
payload.delete_channels.into_iter().map(ChannelId).collect();
|
||||
self.channel_index.delete_channels(&delete_channels);
|
||||
self.channel_participants
|
||||
.retain(|channel_id, _| !delete_channels.contains(&channel_id));
|
||||
.retain(|channel_id, _| !delete_channels.contains(channel_id));
|
||||
|
||||
for channel_id in &delete_channels {
|
||||
let channel_id = *channel_id;
|
||||
|
|
|
@ -117,7 +117,7 @@ impl Settings for ClientSettings {
|
|||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
|
||||
let mut result = sources.json_merge::<Self>()?;
|
||||
if let Some(server_url) = &*ZED_SERVER_URL {
|
||||
result.server_url.clone_from(&server_url)
|
||||
result.server_url.clone_from(server_url)
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
@ -1141,7 +1141,7 @@ impl Client {
|
|||
request_headers.insert("x-zed-app-version", HeaderValue::from_str(&app_version)?);
|
||||
request_headers.insert(
|
||||
"x-zed-release-channel",
|
||||
HeaderValue::from_str(&release_channel.map(|r| r.dev_name()).unwrap_or("unknown"))?,
|
||||
HeaderValue::from_str(release_channel.map(|r| r.dev_name()).unwrap_or("unknown"))?,
|
||||
);
|
||||
|
||||
match url_scheme {
|
||||
|
@ -1344,16 +1344,14 @@ impl Client {
|
|||
);
|
||||
}
|
||||
|
||||
let user = serde_json::from_slice::<GithubUser>(body.as_slice()).map_err(|err| {
|
||||
serde_json::from_slice::<GithubUser>(body.as_slice()).map_err(|err| {
|
||||
log::error!("Error deserializing: {:?}", err);
|
||||
log::error!(
|
||||
"GitHub API response text: {:?}",
|
||||
String::from_utf8_lossy(body.as_slice())
|
||||
);
|
||||
anyhow!("error deserializing GitHub user")
|
||||
})?;
|
||||
|
||||
user
|
||||
})?
|
||||
};
|
||||
|
||||
let query_params = [
|
||||
|
@ -1408,7 +1406,7 @@ impl Client {
|
|||
|
||||
pub async fn sign_out(self: &Arc<Self>, cx: &AsyncAppContext) {
|
||||
self.state.write().credentials = None;
|
||||
self.disconnect(&cx);
|
||||
self.disconnect(cx);
|
||||
|
||||
if self.has_credentials(cx).await {
|
||||
self.credentials_provider
|
||||
|
|
|
@ -35,12 +35,8 @@ pub(crate) async fn connect_socks_proxy_stream(
|
|||
}
|
||||
|
||||
fn parse_socks_proxy(proxy: Option<&Uri>) -> Option<((String, u16), SocksVersion)> {
|
||||
let Some(proxy_uri) = proxy else {
|
||||
return None;
|
||||
};
|
||||
let Some(scheme) = proxy_uri.scheme_str() else {
|
||||
return None;
|
||||
};
|
||||
let proxy_uri = proxy?;
|
||||
let scheme = proxy_uri.scheme_str()?;
|
||||
let socks_version = if scheme.starts_with("socks4") {
|
||||
// socks4
|
||||
SocksVersion::V4
|
||||
|
|
|
@ -670,6 +670,24 @@ impl Telemetry {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn calculate_json_checksum(json: &impl AsRef<[u8]>) -> Option<String> {
|
||||
let Some(checksum_seed) = &*ZED_CLIENT_CHECKSUM_SEED else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let mut summer = Sha256::new();
|
||||
summer.update(checksum_seed);
|
||||
summer.update(json);
|
||||
summer.update(checksum_seed);
|
||||
let mut checksum = String::new();
|
||||
for byte in summer.finalize().as_slice() {
|
||||
use std::fmt::Write;
|
||||
write!(&mut checksum, "{:02x}", byte).unwrap();
|
||||
}
|
||||
|
||||
Some(checksum)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -828,21 +846,3 @@ mod tests {
|
|||
&& telemetry.state.lock().first_event_date_time.is_none()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn calculate_json_checksum(json: &impl AsRef<[u8]>) -> Option<String> {
|
||||
let Some(checksum_seed) = &*ZED_CLIENT_CHECKSUM_SEED else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let mut summer = Sha256::new();
|
||||
summer.update(checksum_seed);
|
||||
summer.update(&json);
|
||||
summer.update(checksum_seed);
|
||||
let mut checksum = String::new();
|
||||
for byte in summer.finalize().as_slice() {
|
||||
use std::fmt::Write;
|
||||
write!(&mut checksum, "{:02x}", byte).unwrap();
|
||||
}
|
||||
|
||||
Some(checksum)
|
||||
}
|
||||
|
|
|
@ -200,7 +200,7 @@ impl UserStore {
|
|||
cx.update(|cx| {
|
||||
if let Some(info) = info {
|
||||
let disable_staff = std::env::var("ZED_DISABLE_STAFF")
|
||||
.map_or(false, |v| v != "" && v != "0");
|
||||
.map_or(false, |v| !v.is_empty() && v != "0");
|
||||
let staff = info.staff && !disable_staff;
|
||||
cx.update_flags(staff, info.flags);
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
|
|
|
@ -137,7 +137,7 @@ async fn get_authenticated_user(
|
|||
)
|
||||
.await?;
|
||||
let metrics_id = app.db.get_user_metrics_id(user.id).await?;
|
||||
return Ok(Json(AuthenticatedUserResponse { user, metrics_id }));
|
||||
Ok(Json(AuthenticatedUserResponse { user, metrics_id }))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
|
|
|
@ -600,7 +600,7 @@ async fn find_or_create_billing_customer(
|
|||
// there's nothing more we need to do.
|
||||
if let Some(billing_customer) = app
|
||||
.db
|
||||
.get_billing_customer_by_stripe_customer_id(&customer_id)
|
||||
.get_billing_customer_by_stripe_customer_id(customer_id)
|
||||
.await?
|
||||
{
|
||||
return Ok(Some(billing_customer));
|
||||
|
@ -609,7 +609,7 @@ async fn find_or_create_billing_customer(
|
|||
// If all we have is a customer ID, resolve it to a full customer record by
|
||||
// hitting the Stripe API.
|
||||
let customer = match customer_or_id {
|
||||
Expandable::Id(id) => Customer::retrieve(&stripe_client, &id, &[]).await?,
|
||||
Expandable::Id(id) => Customer::retrieve(stripe_client, &id, &[]).await?,
|
||||
Expandable::Object(customer) => *customer,
|
||||
};
|
||||
|
||||
|
|
|
@ -397,7 +397,7 @@ pub async fn post_events(
|
|||
match &wrapper.event {
|
||||
Event::Editor(event) => to_upload.editor_events.push(EditorEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
country_code.clone(),
|
||||
|
@ -410,7 +410,7 @@ pub async fn post_events(
|
|||
.inline_completion_events
|
||||
.push(InlineCompletionEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
country_code.clone(),
|
||||
|
@ -419,7 +419,7 @@ pub async fn post_events(
|
|||
}
|
||||
Event::Call(event) => to_upload.call_events.push(CallEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
|
@ -429,7 +429,7 @@ pub async fn post_events(
|
|||
.assistant_events
|
||||
.push(AssistantEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
|
@ -437,42 +437,42 @@ pub async fn post_events(
|
|||
}
|
||||
Event::Cpu(event) => to_upload.cpu_events.push(CpuEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Memory(event) => to_upload.memory_events.push(MemoryEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::App(event) => to_upload.app_events.push(AppEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Setting(event) => to_upload.setting_events.push(SettingEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Edit(event) => to_upload.edit_events.push(EditEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Action(event) => to_upload.action_events.push(ActionEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
|
@ -486,7 +486,7 @@ pub async fn post_events(
|
|||
.extension_events
|
||||
.push(ExtensionEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
metadata,
|
||||
first_event_at,
|
||||
|
@ -495,7 +495,7 @@ pub async fn post_events(
|
|||
}
|
||||
Event::Repl(event) => to_upload.repl_events.push(ReplEventRow::from_event(
|
||||
event.clone(),
|
||||
&wrapper,
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
|
@ -1332,7 +1332,7 @@ pub fn calculate_json_checksum(app: Arc<AppState>, json: &impl AsRef<[u8]>) -> O
|
|||
|
||||
let mut summer = Sha256::new();
|
||||
summer.update(checksum_seed);
|
||||
summer.update(&json);
|
||||
summer.update(json);
|
||||
summer.update(checksum_seed);
|
||||
Some(summer.finalize().into_iter().collect())
|
||||
}
|
||||
|
|
|
@ -319,14 +319,14 @@ async fn fetch_extensions_from_blob_store(
|
|||
if let Some(extension) = fetch_extension_manifest(
|
||||
blob_store_client,
|
||||
blob_store_bucket,
|
||||
&extension_id,
|
||||
&published_version,
|
||||
extension_id,
|
||||
published_version,
|
||||
)
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
new_versions
|
||||
.entry(&extension_id)
|
||||
.entry(extension_id)
|
||||
.or_default()
|
||||
.push(extension);
|
||||
}
|
||||
|
|
|
@ -85,7 +85,7 @@ pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl Into
|
|||
impersonator_id: None,
|
||||
})
|
||||
} else {
|
||||
verify_access_token(&access_token, user_id, &state.db).await
|
||||
verify_access_token(access_token, user_id, &state.db).await
|
||||
};
|
||||
|
||||
if let Ok(validate_result) = validate_result {
|
||||
|
@ -202,7 +202,7 @@ pub async fn verify_access_token(
|
|||
.unwrap()
|
||||
});
|
||||
|
||||
let token: AccessTokenJson = serde_json::from_str(&token)?;
|
||||
let token: AccessTokenJson = serde_json::from_str(token)?;
|
||||
|
||||
let db_token = db.get_access_token(token.id).await?;
|
||||
let token_user_id = db_token.impersonated_user_id.unwrap_or(db_token.user_id);
|
||||
|
@ -249,7 +249,7 @@ pub async fn verify_dev_server_token(
|
|||
db: &Arc<Database>,
|
||||
) -> anyhow::Result<dev_server::Model> {
|
||||
let (id, token) = split_dev_server_token(dev_server_token)?;
|
||||
let token_hash = hash_access_token(&token);
|
||||
let token_hash = hash_access_token(token);
|
||||
let server = db.get_dev_server(id).await?;
|
||||
|
||||
if server
|
||||
|
@ -301,18 +301,16 @@ mod test {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
let token = create_access_token(&db, user.user_id, None).await.unwrap();
|
||||
let token = create_access_token(db, user.user_id, None).await.unwrap();
|
||||
assert!(matches!(
|
||||
verify_access_token(&token, user.user_id, &db)
|
||||
.await
|
||||
.unwrap(),
|
||||
verify_access_token(&token, user.user_id, db).await.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
is_valid: true,
|
||||
impersonator_id: None,
|
||||
}
|
||||
));
|
||||
|
||||
let old_token = create_previous_access_token(user.user_id, None, &db)
|
||||
let old_token = create_previous_access_token(user.user_id, None, db)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
@ -333,7 +331,7 @@ mod test {
|
|||
assert!(hash.starts_with("$scrypt$"));
|
||||
|
||||
assert!(matches!(
|
||||
verify_access_token(&old_token, user.user_id, &db)
|
||||
verify_access_token(&old_token, user.user_id, db)
|
||||
.await
|
||||
.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
|
@ -355,7 +353,7 @@ mod test {
|
|||
assert!(hash.starts_with("$sha256$"));
|
||||
|
||||
assert!(matches!(
|
||||
verify_access_token(&old_token, user.user_id, &db)
|
||||
verify_access_token(&old_token, user.user_id, db)
|
||||
.await
|
||||
.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
|
@ -365,9 +363,7 @@ mod test {
|
|||
));
|
||||
|
||||
assert!(matches!(
|
||||
verify_access_token(&token, user.user_id, &db)
|
||||
.await
|
||||
.unwrap(),
|
||||
verify_access_token(&token, user.user_id, db).await.unwrap(),
|
||||
VerifyAccessTokenResult {
|
||||
is_valid: true,
|
||||
impersonator_id: None,
|
||||
|
|
|
@ -139,14 +139,12 @@ impl Database {
|
|||
let (tx, result) = self.with_weak_transaction(&f).await?;
|
||||
match result {
|
||||
Ok(result) => match tx.commit().await.map_err(Into::into) {
|
||||
Ok(()) => return Ok(result),
|
||||
Err(error) => {
|
||||
return Err(error);
|
||||
}
|
||||
Ok(()) => Ok(result),
|
||||
Err(error) => Err(error),
|
||||
},
|
||||
Err(error) => {
|
||||
tx.rollback().await?;
|
||||
return Err(error);
|
||||
Err(error)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -217,7 +215,7 @@ impl Database {
|
|||
F: Send + Fn(TransactionHandle) -> Fut,
|
||||
Fut: Send + Future<Output = Result<T>>,
|
||||
{
|
||||
let room_id = Database::room_id_for_project(&self, project_id).await?;
|
||||
let room_id = Database::room_id_for_project(self, project_id).await?;
|
||||
let body = async {
|
||||
let mut i = 0;
|
||||
loop {
|
||||
|
|
|
@ -218,9 +218,9 @@ impl From<proto::ChannelRole> for ChannelRole {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<proto::ChannelRole> for ChannelRole {
|
||||
fn into(self) -> proto::ChannelRole {
|
||||
match self {
|
||||
impl From<ChannelRole> for proto::ChannelRole {
|
||||
fn from(val: ChannelRole) -> Self {
|
||||
match val {
|
||||
ChannelRole::Admin => proto::ChannelRole::Admin,
|
||||
ChannelRole::Member => proto::ChannelRole::Member,
|
||||
ChannelRole::Talker => proto::ChannelRole::Talker,
|
||||
|
@ -230,9 +230,9 @@ impl Into<proto::ChannelRole> for ChannelRole {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<i32> for ChannelRole {
|
||||
fn into(self) -> i32 {
|
||||
let proto: proto::ChannelRole = self.into();
|
||||
impl From<ChannelRole> for i32 {
|
||||
fn from(val: ChannelRole) -> Self {
|
||||
let proto: proto::ChannelRole = val.into();
|
||||
proto.into()
|
||||
}
|
||||
}
|
||||
|
@ -259,18 +259,18 @@ impl From<proto::ChannelVisibility> for ChannelVisibility {
|
|||
}
|
||||
}
|
||||
|
||||
impl Into<proto::ChannelVisibility> for ChannelVisibility {
|
||||
fn into(self) -> proto::ChannelVisibility {
|
||||
match self {
|
||||
impl From<ChannelVisibility> for proto::ChannelVisibility {
|
||||
fn from(val: ChannelVisibility) -> Self {
|
||||
match val {
|
||||
ChannelVisibility::Public => proto::ChannelVisibility::Public,
|
||||
ChannelVisibility::Members => proto::ChannelVisibility::Members,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<i32> for ChannelVisibility {
|
||||
fn into(self) -> i32 {
|
||||
let proto: proto::ChannelVisibility = self.into();
|
||||
impl From<ChannelVisibility> for i32 {
|
||||
fn from(val: ChannelVisibility) -> Self {
|
||||
let proto: proto::ChannelVisibility = val.into();
|
||||
proto.into()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -391,7 +391,7 @@ impl Database {
|
|||
drop(rows);
|
||||
|
||||
if collaborators.is_empty() {
|
||||
self.snapshot_channel_buffer(channel_id, &tx).await?;
|
||||
self.snapshot_channel_buffer(channel_id, tx).await?;
|
||||
}
|
||||
|
||||
Ok(LeftChannelBuffer {
|
||||
|
|
|
@ -188,17 +188,16 @@ impl Database {
|
|||
.anyhow())?;
|
||||
}
|
||||
}
|
||||
} else if visibility == ChannelVisibility::Members {
|
||||
if self
|
||||
} else if visibility == ChannelVisibility::Members
|
||||
&& self
|
||||
.get_channel_descendants_excluding_self([&channel], &tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.any(|channel| channel.visibility == ChannelVisibility::Public)
|
||||
{
|
||||
Err(ErrorCode::BadPublicNesting
|
||||
.with_tag("direction", "children")
|
||||
.anyhow())?;
|
||||
}
|
||||
{
|
||||
Err(ErrorCode::BadPublicNesting
|
||||
.with_tag("direction", "children")
|
||||
.anyhow())?;
|
||||
}
|
||||
|
||||
let mut model = channel.into_active_model();
|
||||
|
@ -308,7 +307,7 @@ impl Database {
|
|||
|
||||
fn sanitize_channel_name(name: &str) -> Result<&str> {
|
||||
let new_name = name.trim().trim_start_matches('#');
|
||||
if new_name == "" {
|
||||
if new_name.is_empty() {
|
||||
Err(anyhow!("channel name can't be blank"))?;
|
||||
}
|
||||
Ok(new_name)
|
||||
|
@ -985,7 +984,7 @@ impl Database {
|
|||
.all(&*tx)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|c| Channel::from_model(c))
|
||||
.map(Channel::from_model)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok((root_id, channels))
|
||||
|
|
|
@ -86,14 +86,13 @@ impl Database {
|
|||
avoid_duplicates: bool,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<Option<(UserId, proto::Notification)>> {
|
||||
if avoid_duplicates {
|
||||
if self
|
||||
if avoid_duplicates
|
||||
&& self
|
||||
.find_notification(recipient_id, ¬ification, tx)
|
||||
.await?
|
||||
.is_some()
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let proto = notification.to_proto();
|
||||
|
|
|
@ -459,7 +459,7 @@ impl Database {
|
|||
.await?;
|
||||
}
|
||||
|
||||
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
|
||||
let (channel, room) = self.get_channel_room(room_id, tx).await?;
|
||||
let channel = channel.ok_or_else(|| anyhow!("no channel for room"))?;
|
||||
Ok(JoinRoom {
|
||||
room,
|
||||
|
@ -766,13 +766,13 @@ impl Database {
|
|||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
return Ok(Some(RejoinedProject {
|
||||
Ok(Some(RejoinedProject {
|
||||
id: project_id,
|
||||
old_connection_id,
|
||||
collaborators,
|
||||
worktrees,
|
||||
language_servers,
|
||||
}));
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn leave_room(
|
||||
|
@ -1108,15 +1108,14 @@ impl Database {
|
|||
.count(tx)
|
||||
.await?
|
||||
> 0;
|
||||
if requires_zed_cla {
|
||||
if contributor::Entity::find()
|
||||
if requires_zed_cla
|
||||
&& contributor::Entity::find()
|
||||
.filter(contributor::Column::UserId.eq(user_id))
|
||||
.one(tx)
|
||||
.await?
|
||||
.is_none()
|
||||
{
|
||||
Err(anyhow!("user has not signed the Zed CLA"))?;
|
||||
}
|
||||
{
|
||||
Err(anyhow!("user has not signed the Zed CLA"))?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
|
|
@ -48,7 +48,7 @@ impl Model {
|
|||
id: self.id.to_proto(),
|
||||
project_id: project.map(|p| p.id.to_proto()),
|
||||
dev_server_id: self.dev_server_id.to_proto(),
|
||||
path: self.paths().get(0).cloned().unwrap_or_default(),
|
||||
path: self.paths().first().cloned().unwrap_or_default(),
|
||||
paths: self.paths().clone(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,19 +22,17 @@ async fn test_already_processed_stripe_event(db: &Arc<Database>) {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
assert!(
|
||||
db.already_processed_stripe_event(&processed_event_id)
|
||||
.await
|
||||
.unwrap(),
|
||||
true,
|
||||
"Expected {processed_event_id} to already be processed"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
db.already_processed_stripe_event(&unprocessed_event_id)
|
||||
assert!(
|
||||
!db.already_processed_stripe_event(&unprocessed_event_id)
|
||||
.await
|
||||
.unwrap(),
|
||||
false,
|
||||
"Expected {unprocessed_event_id} to be unprocessed"
|
||||
);
|
||||
}
|
||||
|
|
|
@ -304,10 +304,7 @@ impl AppState {
|
|||
db: db.clone(),
|
||||
live_kit_client,
|
||||
blob_store_client: build_blob_store_client(&config).await.log_err(),
|
||||
stripe_client: build_stripe_client(&config)
|
||||
.await
|
||||
.map(|client| Arc::new(client))
|
||||
.log_err(),
|
||||
stripe_client: build_stripe_client(&config).await.map(Arc::new).log_err(),
|
||||
rate_limiter: Arc::new(RateLimiter::new(db)),
|
||||
executor,
|
||||
clickhouse_client: config
|
||||
|
|
|
@ -141,7 +141,7 @@ async fn validate_api_token<B>(mut req: Request<B>, next: Next<B>) -> impl IntoR
|
|||
})?;
|
||||
|
||||
let state = req.extensions().get::<Arc<LlmState>>().unwrap();
|
||||
match LlmTokenClaims::validate(&token, &state.config) {
|
||||
match LlmTokenClaims::validate(token, &state.config) {
|
||||
Ok(claims) => {
|
||||
if state.db.is_access_token_revoked(&claims.jti).await? {
|
||||
return Err(Error::http(
|
||||
|
@ -154,7 +154,7 @@ async fn validate_api_token<B>(mut req: Request<B>, next: Next<B>) -> impl IntoR
|
|||
.record("user_id", claims.user_id)
|
||||
.record("login", claims.github_user_login.clone())
|
||||
.record("authn.jti", &claims.jti)
|
||||
.record("is_staff", &claims.is_staff);
|
||||
.record("is_staff", claims.is_staff);
|
||||
|
||||
req.extensions_mut().insert(claims);
|
||||
Ok::<_, Error>(next.run(req).await.into_response())
|
||||
|
@ -247,7 +247,7 @@ async fn perform_completion(
|
|||
};
|
||||
|
||||
let mut request: anthropic::Request =
|
||||
serde_json::from_str(¶ms.provider_request.get())?;
|
||||
serde_json::from_str(params.provider_request.get())?;
|
||||
|
||||
// Override the model on the request with the latest version of the model that is
|
||||
// known to the server.
|
||||
|
@ -348,7 +348,7 @@ async fn perform_completion(
|
|||
&state.http_client,
|
||||
open_ai::OPEN_AI_API_URL,
|
||||
api_key,
|
||||
serde_json::from_str(¶ms.provider_request.get())?,
|
||||
serde_json::from_str(params.provider_request.get())?,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
@ -379,7 +379,7 @@ async fn perform_completion(
|
|||
&state.http_client,
|
||||
google_ai::API_URL,
|
||||
api_key,
|
||||
serde_json::from_str(¶ms.provider_request.get())?,
|
||||
serde_json::from_str(params.provider_request.get())?,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
@ -412,9 +412,9 @@ async fn perform_completion(
|
|||
.context("no Qwen2-7B URL configured on the server")?;
|
||||
let chunks = open_ai::stream_completion(
|
||||
&state.http_client,
|
||||
&api_url,
|
||||
api_url,
|
||||
api_key,
|
||||
serde_json::from_str(¶ms.provider_request.get())?,
|
||||
serde_json::from_str(params.provider_request.get())?,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
|
|
@ -26,19 +26,16 @@ fn authorize_access_to_model(
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
match provider {
|
||||
LanguageModelProvider::Anthropic => {
|
||||
if model == "claude-3-5-sonnet" {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if claims.has_llm_closed_beta_feature_flag
|
||||
&& Some(model) == config.llm_closed_beta_model_name.as_deref()
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
if provider == LanguageModelProvider::Anthropic {
|
||||
if model == "claude-3-5-sonnet" {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if claims.has_llm_closed_beta_feature_flag
|
||||
&& Some(model) == config.llm_closed_beta_model_name.as_deref()
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Err(Error::http(
|
||||
|
|
|
@ -110,14 +110,12 @@ impl LlmDatabase {
|
|||
let (tx, result) = self.with_transaction(&f).await?;
|
||||
match result {
|
||||
Ok(result) => match tx.commit().await.map_err(Into::into) {
|
||||
Ok(()) => return Ok(result),
|
||||
Err(error) => {
|
||||
return Err(error);
|
||||
}
|
||||
Ok(()) => Ok(result),
|
||||
Err(error) => Err(error),
|
||||
},
|
||||
Err(error) => {
|
||||
tx.rollback().await?;
|
||||
return Err(error);
|
||||
Err(error)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -82,7 +82,7 @@ impl LlmDatabase {
|
|||
pub async fn insert_models(&mut self, models: &[ModelParams]) -> Result<()> {
|
||||
let all_provider_ids = &self.provider_ids;
|
||||
self.transaction(|tx| async move {
|
||||
model::Entity::insert_many(models.into_iter().map(|model_params| {
|
||||
model::Entity::insert_many(models.iter().map(|model_params| {
|
||||
let provider_id = all_provider_ids[&model_params.provider];
|
||||
model::ActiveModel {
|
||||
provider_id: ActiveValue::set(provider_id),
|
||||
|
|
|
@ -257,7 +257,7 @@ async fn setup_app_database(config: &Config) -> Result<()> {
|
|||
db.initialize_notification_kinds().await?;
|
||||
|
||||
if config.seed_path.is_some() {
|
||||
collab::seed::seed(&config, &db, false).await?;
|
||||
collab::seed::seed(config, &db, false).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -100,7 +100,7 @@ impl RateLimiter {
|
|||
pub async fn save(&self) -> Result<()> {
|
||||
let mut buckets = Vec::new();
|
||||
self.dirty_buckets.retain(|key| {
|
||||
if let Some(bucket) = self.buckets.get(&key) {
|
||||
if let Some(bucket) = self.buckets.get(key) {
|
||||
buckets.push(crate::db::rate_buckets::Model {
|
||||
user_id: key.0,
|
||||
rate_limit_name: key.1.clone(),
|
||||
|
|
|
@ -115,16 +115,16 @@ impl Principal {
|
|||
fn update_span(&self, span: &tracing::Span) {
|
||||
match &self {
|
||||
Principal::User(user) => {
|
||||
span.record("user_id", &user.id.0);
|
||||
span.record("user_id", user.id.0);
|
||||
span.record("login", &user.github_login);
|
||||
}
|
||||
Principal::Impersonated { user, admin } => {
|
||||
span.record("user_id", &user.id.0);
|
||||
span.record("user_id", user.id.0);
|
||||
span.record("login", &user.github_login);
|
||||
span.record("impersonator", &admin.github_login);
|
||||
}
|
||||
Principal::DevServer(dev_server) => {
|
||||
span.record("dev_server_id", &dev_server.id.0);
|
||||
span.record("dev_server_id", dev_server.id.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -964,14 +964,10 @@ impl Server {
|
|||
}
|
||||
};
|
||||
|
||||
let supermaven_client = if let Some(supermaven_admin_api_key) = this.app_state.config.supermaven_admin_api_key.clone() {
|
||||
Some(Arc::new(SupermavenAdminApi::new(
|
||||
let supermaven_client = this.app_state.config.supermaven_admin_api_key.clone().map(|supermaven_admin_api_key| Arc::new(SupermavenAdminApi::new(
|
||||
supermaven_admin_api_key.to_string(),
|
||||
http_client.clone(),
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
)));
|
||||
|
||||
let session = Session {
|
||||
principal: principal.clone(),
|
||||
|
@ -1126,7 +1122,7 @@ impl Server {
|
|||
self.peer.send(connection_id, incoming_call)?;
|
||||
}
|
||||
|
||||
update_user_contacts(user.id, &session).await?;
|
||||
update_user_contacts(user.id, session).await?;
|
||||
}
|
||||
Principal::DevServer(dev_server) => {
|
||||
{
|
||||
|
@ -1159,7 +1155,7 @@ impl Server {
|
|||
.db
|
||||
.dev_server_projects_update(dev_server.user_id)
|
||||
.await?;
|
||||
send_dev_server_projects_update(dev_server.user_id, status, &session).await;
|
||||
send_dev_server_projects_update(dev_server.user_id, status, session).await;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1564,21 +1560,17 @@ async fn join_room(
|
|||
|
||||
let live_kit_connection_info =
|
||||
if let Some(live_kit) = session.app_state.live_kit_client.as_ref() {
|
||||
if let Some(token) = live_kit
|
||||
live_kit
|
||||
.room_token(
|
||||
&joined_room.room.live_kit_room,
|
||||
&session.user_id().to_string(),
|
||||
)
|
||||
.trace_err()
|
||||
{
|
||||
Some(proto::LiveKitConnectionInfo {
|
||||
.map(|token| proto::LiveKitConnectionInfo {
|
||||
server_url: live_kit.url().into(),
|
||||
token,
|
||||
can_publish: true,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -1863,7 +1855,7 @@ async fn call(
|
|||
initial_project_id,
|
||||
)
|
||||
.await?;
|
||||
room_updated(&room, &session.peer);
|
||||
room_updated(room, &session.peer);
|
||||
mem::take(incoming_call)
|
||||
};
|
||||
update_user_contacts(called_user_id, &session).await?;
|
||||
|
@ -2006,13 +1998,13 @@ async fn share_project(
|
|||
&request.worktrees,
|
||||
request
|
||||
.dev_server_project_id
|
||||
.map(|id| DevServerProjectId::from_proto(id)),
|
||||
.map(DevServerProjectId::from_proto),
|
||||
)
|
||||
.await?;
|
||||
response.send(proto::ShareProjectResponse {
|
||||
project_id: project_id.to_proto(),
|
||||
})?;
|
||||
room_updated(&room, &session.peer);
|
||||
room_updated(room, &session.peer);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -2269,9 +2261,9 @@ async fn leave_project(request: proto::LeaveProject, session: UserSession) -> Re
|
|||
"leave project"
|
||||
);
|
||||
|
||||
project_left(&project, &session);
|
||||
project_left(project, &session);
|
||||
if let Some(room) = room {
|
||||
room_updated(&room, &session.peer);
|
||||
room_updated(room, &session.peer);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -2753,7 +2745,7 @@ async fn shutdown_dev_server_internal(
|
|||
.await
|
||||
.dev_server_projects_update(dev_server.user_id)
|
||||
.await?;
|
||||
send_dev_server_projects_update(dev_server.user_id, status, &session).await;
|
||||
send_dev_server_projects_update(dev_server.user_id, status, session).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -2795,7 +2787,7 @@ async fn update_project(
|
|||
},
|
||||
);
|
||||
if let Some(room) = room {
|
||||
room_updated(&room, &session.peer);
|
||||
room_updated(room, &session.peer);
|
||||
}
|
||||
response.send(proto::Ack {})?;
|
||||
|
||||
|
@ -3562,7 +3554,7 @@ async fn create_channel(
|
|||
) -> Result<()> {
|
||||
let db = session.db().await;
|
||||
|
||||
let parent_id = request.parent_id.map(|id| ChannelId::from_proto(id));
|
||||
let parent_id = request.parent_id.map(ChannelId::from_proto);
|
||||
let (channel, membership) = db
|
||||
.create_channel(&request.name, parent_id, session.user_id())
|
||||
.await?;
|
||||
|
@ -4284,10 +4276,7 @@ async fn send_channel_message(
|
|||
&request.mentions,
|
||||
timestamp,
|
||||
nonce.clone().into(),
|
||||
match request.reply_to_message_id {
|
||||
Some(reply_to_message_id) => Some(MessageId::from_proto(reply_to_message_id)),
|
||||
None => None,
|
||||
},
|
||||
request.reply_to_message_id.map(MessageId::from_proto),
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
@ -4848,9 +4837,7 @@ async fn get_notifications(
|
|||
.get_notifications(
|
||||
session.user_id(),
|
||||
NOTIFICATION_COUNT_PER_PAGE,
|
||||
request
|
||||
.before_id
|
||||
.map(|id| db::NotificationId::from_proto(id)),
|
||||
request.before_id.map(db::NotificationId::from_proto),
|
||||
)
|
||||
.await?;
|
||||
response.send(proto::GetNotificationsResponse {
|
||||
|
@ -5104,7 +5091,7 @@ fn build_initial_contacts_update(
|
|||
for contact in contacts {
|
||||
match contact {
|
||||
db::Contact::Accepted { user_id, busy } => {
|
||||
update.contacts.push(contact_for_user(user_id, busy, &pool));
|
||||
update.contacts.push(contact_for_user(user_id, busy, pool));
|
||||
}
|
||||
db::Contact::Outgoing { user_id } => update.outgoing_requests.push(user_id.to_proto()),
|
||||
db::Contact::Incoming { user_id } => {
|
||||
|
@ -5161,7 +5148,8 @@ fn channel_updated(
|
|||
None,
|
||||
pool.channel_connection_ids(channel.root_id())
|
||||
.filter_map(|(channel_id, role)| {
|
||||
role.can_see_channel(channel.visibility).then(|| channel_id)
|
||||
role.can_see_channel(channel.visibility)
|
||||
.then_some(channel_id)
|
||||
}),
|
||||
|peer_id| {
|
||||
peer.send(
|
||||
|
@ -5239,7 +5227,7 @@ async fn lost_dev_server_connection(session: &DevServerSession) -> Result<()> {
|
|||
|
||||
for project_id in project_ids {
|
||||
// not unshare re-checks the connection ids match, so we get away with no transaction
|
||||
unshare_project_internal(project_id, session.connection_id, None, &session).await?;
|
||||
unshare_project_internal(project_id, session.connection_id, None, session).await?;
|
||||
}
|
||||
|
||||
let user_id = session.dev_server().user_id;
|
||||
|
@ -5311,7 +5299,7 @@ async fn leave_room_for_session(session: &UserSession, connection_id: Connection
|
|||
}
|
||||
|
||||
for contact_user_id in contacts_to_update {
|
||||
update_user_contacts(contact_user_id, &session).await?;
|
||||
update_user_contacts(contact_user_id, session).await?;
|
||||
}
|
||||
|
||||
if let Some(live_kit) = session.app_state.live_kit_client.as_ref() {
|
||||
|
|
|
@ -236,7 +236,7 @@ impl ConnectionPool {
|
|||
}
|
||||
PrincipalId::DevServerId(dev_server_id) => {
|
||||
assert_eq!(
|
||||
self.connected_dev_servers.get(&dev_server_id).unwrap(),
|
||||
self.connected_dev_servers.get(dev_server_id).unwrap(),
|
||||
connection_id
|
||||
);
|
||||
}
|
||||
|
@ -300,9 +300,9 @@ impl ChannelPool {
|
|||
}
|
||||
|
||||
pub fn remove_user(&mut self, user_id: &UserId) {
|
||||
if let Some(channels) = self.by_user.remove(&user_id) {
|
||||
if let Some(channels) = self.by_user.remove(user_id) {
|
||||
for channel_id in channels.keys() {
|
||||
self.unsubscribe(user_id, &channel_id)
|
||||
self.unsubscribe(user_id, channel_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -85,7 +85,7 @@ async fn test_core_channel_buffers(
|
|||
|
||||
// Client B sees that client A is gone from the channel buffer.
|
||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
|
||||
assert_collaborators(buffer.collaborators(), &[client_b.user_id()]);
|
||||
});
|
||||
|
||||
// Client A rejoins the channel buffer
|
||||
|
@ -99,7 +99,7 @@ async fn test_core_channel_buffers(
|
|||
// Sanity test, make sure we saw A rejoining
|
||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_collaborators(
|
||||
&buffer.collaborators(),
|
||||
buffer.collaborators(),
|
||||
&[client_a.user_id(), client_b.user_id()],
|
||||
);
|
||||
});
|
||||
|
@ -111,7 +111,7 @@ async fn test_core_channel_buffers(
|
|||
|
||||
// Client B observes A disconnect
|
||||
channel_buffer_b.read_with(cx_b, |buffer, _| {
|
||||
assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
|
||||
assert_collaborators(buffer.collaborators(), &[client_b.user_id()]);
|
||||
});
|
||||
|
||||
// TODO:
|
||||
|
@ -687,7 +687,7 @@ fn assert_collaborators(collaborators: &HashMap<PeerId, Collaborator>, ids: &[Op
|
|||
user_ids.sort();
|
||||
assert_eq!(
|
||||
user_ids,
|
||||
ids.into_iter().map(|id| id.unwrap()).collect::<Vec<_>>()
|
||||
ids.iter().map(|id| id.unwrap()).collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -356,7 +356,7 @@ async fn test_channel_message_changes(
|
|||
let project_b = client_b.build_empty_local_project(cx_b);
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
|
||||
let chat_panel_b = workspace_b.update(cx_b, |workspace, cx| ChatPanel::new(workspace, cx));
|
||||
let chat_panel_b = workspace_b.update(cx_b, ChatPanel::new);
|
||||
chat_panel_b
|
||||
.update(cx_b, |chat_panel, cx| {
|
||||
chat_panel.set_active(true, cx);
|
||||
|
|
|
@ -96,7 +96,7 @@ async fn test_dev_server(cx: &mut gpui::TestAppContext, cx2: &mut gpui::TestAppC
|
|||
|
||||
let content = dev_server
|
||||
.fs()
|
||||
.load(&Path::new("/remote/1.txt"))
|
||||
.load(Path::new("/remote/1.txt"))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(content, "wow!remote\nremote\nremote\n");
|
||||
|
@ -599,7 +599,7 @@ async fn test_save_as_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::Tes
|
|||
let title = remote_workspace
|
||||
.update(&mut cx, |ws, cx| {
|
||||
let active_item = ws.active_item(cx).unwrap();
|
||||
active_item.tab_description(0, &cx).unwrap()
|
||||
active_item.tab_description(0, cx).unwrap()
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
|
@ -607,7 +607,7 @@ async fn test_save_as_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::Tes
|
|||
|
||||
let path = Path::new("/remote/2.txt");
|
||||
assert_eq!(
|
||||
dev_server.fs().load(&path).await.unwrap(),
|
||||
dev_server.fs().load(path).await.unwrap(),
|
||||
"remote\nremote\nremote"
|
||||
);
|
||||
}
|
||||
|
@ -632,12 +632,12 @@ async fn test_new_file_remote(cx1: &mut gpui::TestAppContext, cx2: &mut gpui::Te
|
|||
|
||||
let title = remote_workspace
|
||||
.update(&mut cx, |ws, cx| {
|
||||
ws.active_item(cx).unwrap().tab_description(0, &cx).unwrap()
|
||||
ws.active_item(cx).unwrap().tab_description(0, cx).unwrap()
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(title, "2.txt");
|
||||
|
||||
let path = Path::new("/remote/2.txt");
|
||||
assert_eq!(dev_server.fs().load(&path).await.unwrap(), "new!");
|
||||
assert_eq!(dev_server.fs().load(path).await.unwrap(), "new!");
|
||||
}
|
||||
|
|
|
@ -2109,7 +2109,7 @@ struct Row10;"#};
|
|||
editor_cx_a.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
|
@ -2146,7 +2146,7 @@ struct Row10;"#};
|
|||
editor_cx_b.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(1)..=DisplayRow(2), DisplayRow(8)..=DisplayRow(8)],
|
||||
|
@ -2194,7 +2194,7 @@ struct Row10;"#};
|
|||
editor_cx_a.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
|
@ -2209,7 +2209,7 @@ struct Row10;"#};
|
|||
editor_cx_b.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(5)..=DisplayRow(5)]
|
||||
|
|
|
@ -506,7 +506,7 @@ async fn test_basic_following(
|
|||
|
||||
// Client B activates an item that doesn't implement following,
|
||||
// so the previously-opened screen-sharing item gets activated.
|
||||
let unfollowable_item = cx_b.new_view(|cx| TestItem::new(cx));
|
||||
let unfollowable_item = cx_b.new_view(TestItem::new);
|
||||
workspace_b.update(cx_b, |workspace, cx| {
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.add_item(Box::new(unfollowable_item), true, true, None, cx)
|
||||
|
|
|
@ -538,10 +538,7 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
|
|||
|
||||
// Leave the room
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| {
|
||||
let hang_up = call.hang_up(cx);
|
||||
hang_up
|
||||
})
|
||||
.update(cx_a, |call, cx| call.hang_up(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
@ -574,10 +571,7 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
|
|||
|
||||
// Leave the room
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| {
|
||||
let hang_up = call.hang_up(cx);
|
||||
hang_up
|
||||
})
|
||||
.update(cx_a, |call, cx| call.hang_up(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
@ -2578,7 +2572,7 @@ async fn test_git_diff_base_change(
|
|||
);
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
buffer,
|
||||
&diff_base,
|
||||
&[(1..2, "", "two\n")],
|
||||
);
|
||||
|
@ -2602,7 +2596,7 @@ async fn test_git_diff_base_change(
|
|||
);
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
buffer,
|
||||
&diff_base,
|
||||
&[(1..2, "", "two\n")],
|
||||
);
|
||||
|
@ -2626,7 +2620,7 @@ async fn test_git_diff_base_change(
|
|||
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
buffer,
|
||||
&diff_base,
|
||||
&[(2..3, "", "three\n")],
|
||||
);
|
||||
|
@ -2641,7 +2635,7 @@ async fn test_git_diff_base_change(
|
|||
);
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
buffer,
|
||||
&diff_base,
|
||||
&[(2..3, "", "three\n")],
|
||||
);
|
||||
|
@ -2684,7 +2678,7 @@ async fn test_git_diff_base_change(
|
|||
);
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
buffer,
|
||||
&diff_base,
|
||||
&[(1..2, "", "two\n")],
|
||||
);
|
||||
|
@ -2708,7 +2702,7 @@ async fn test_git_diff_base_change(
|
|||
);
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
buffer,
|
||||
&diff_base,
|
||||
&[(1..2, "", "two\n")],
|
||||
);
|
||||
|
@ -2741,7 +2735,7 @@ async fn test_git_diff_base_change(
|
|||
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
buffer,
|
||||
&diff_base,
|
||||
&[(2..3, "", "three\n")],
|
||||
);
|
||||
|
@ -2756,7 +2750,7 @@ async fn test_git_diff_base_change(
|
|||
);
|
||||
git::diff::assert_hunks(
|
||||
buffer.snapshot().git_diff_hunks_in_row_range(0..4),
|
||||
&buffer,
|
||||
buffer,
|
||||
&diff_base,
|
||||
&[(2..3, "", "three\n")],
|
||||
);
|
||||
|
@ -2884,8 +2878,8 @@ async fn test_git_status_sync(
|
|||
client_a.fs().set_status_for_repo_via_git_operation(
|
||||
Path::new("/dir/.git"),
|
||||
&[
|
||||
(&Path::new(A_TXT), GitFileStatus::Added),
|
||||
(&Path::new(B_TXT), GitFileStatus::Added),
|
||||
(Path::new(A_TXT), GitFileStatus::Added),
|
||||
(Path::new(B_TXT), GitFileStatus::Added),
|
||||
],
|
||||
);
|
||||
|
||||
|
@ -2932,8 +2926,8 @@ async fn test_git_status_sync(
|
|||
client_a.fs().set_status_for_repo_via_working_copy_change(
|
||||
Path::new("/dir/.git"),
|
||||
&[
|
||||
(&Path::new(A_TXT), GitFileStatus::Modified),
|
||||
(&Path::new(B_TXT), GitFileStatus::Modified),
|
||||
(Path::new(A_TXT), GitFileStatus::Modified),
|
||||
(Path::new(B_TXT), GitFileStatus::Modified),
|
||||
],
|
||||
);
|
||||
|
||||
|
@ -6336,7 +6330,7 @@ async fn test_preview_tabs(cx: &mut TestAppContext) {
|
|||
|
||||
// Close permanent tab
|
||||
pane.update(cx, |pane, cx| {
|
||||
let id = pane.items().nth(0).unwrap().item_id();
|
||||
let id = pane.items().next().unwrap().item_id();
|
||||
pane.close_item_by_id(id, workspace::SaveIntent::Skip, cx)
|
||||
})
|
||||
.await
|
||||
|
@ -6347,7 +6341,7 @@ async fn test_preview_tabs(cx: &mut TestAppContext) {
|
|||
assert_eq!(get_path(pane, 0, cx), path_1.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(0).unwrap().item_id())
|
||||
Some(pane.items().next().unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
|
@ -6366,7 +6360,7 @@ async fn test_preview_tabs(cx: &mut TestAppContext) {
|
|||
assert_eq!(get_path(pane, 0, cx), path_1.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(0).unwrap().item_id())
|
||||
Some(pane.items().next().unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
|
@ -6395,7 +6389,7 @@ async fn test_preview_tabs(cx: &mut TestAppContext) {
|
|||
assert_eq!(get_path(pane, 0, cx), path_1.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(0).unwrap().item_id())
|
||||
Some(pane.items().next().unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
|
@ -6433,7 +6427,7 @@ async fn test_preview_tabs(cx: &mut TestAppContext) {
|
|||
assert_eq!(get_path(pane, 0, cx), path_2.clone());
|
||||
assert_eq!(
|
||||
pane.preview_item_id(),
|
||||
Some(pane.items().nth(0).unwrap().item_id())
|
||||
Some(pane.items().next().unwrap().item_id())
|
||||
);
|
||||
|
||||
assert!(pane.can_navigate_backward());
|
||||
|
|
|
@ -282,7 +282,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
|||
let mut paths = client.fs().paths(false);
|
||||
paths.remove(0);
|
||||
let new_root_path = if paths.is_empty() || rng.gen() {
|
||||
Path::new("/").join(&plan.next_root_dir_name())
|
||||
Path::new("/").join(plan.next_root_dir_name())
|
||||
} else {
|
||||
paths.choose(rng).unwrap().clone()
|
||||
};
|
||||
|
|
|
@ -263,8 +263,7 @@ impl TestServer {
|
|||
})
|
||||
});
|
||||
|
||||
let git_hosting_provider_registry =
|
||||
cx.update(|cx| GitHostingProviderRegistry::default_global(cx));
|
||||
let git_hosting_provider_registry = cx.update(GitHostingProviderRegistry::default_global);
|
||||
git_hosting_provider_registry
|
||||
.register_hosting_provider(Arc::new(git_hosting_providers::Github));
|
||||
|
||||
|
|
|
@ -227,7 +227,7 @@ impl ChannelView {
|
|||
{
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(Some(Autoscroll::focused()), cx, |s| {
|
||||
s.replace_cursors_with(|map| vec![item.range.start.to_display_point(&map)])
|
||||
s.replace_cursors_with(|map| vec![item.range.start.to_display_point(map)])
|
||||
})
|
||||
});
|
||||
return;
|
||||
|
@ -460,8 +460,7 @@ impl Item for ChannelView {
|
|||
}
|
||||
|
||||
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| Item::deactivated(editor, cx))
|
||||
self.editor.update(cx, Item::deactivated)
|
||||
}
|
||||
|
||||
fn set_nav_history(&mut self, history: ItemNavHistory, cx: &mut ViewContext<Self>) {
|
||||
|
|
|
@ -560,7 +560,7 @@ impl ChatPanel {
|
|||
},
|
||||
)
|
||||
.child(
|
||||
self.render_popover_buttons(&cx, message_id, can_delete_message, can_edit_message)
|
||||
self.render_popover_buttons(cx, message_id, can_delete_message, can_edit_message)
|
||||
.mt_neg_2p5(),
|
||||
)
|
||||
}
|
||||
|
@ -705,7 +705,7 @@ impl ChatPanel {
|
|||
menu.entry(
|
||||
"Copy message text",
|
||||
None,
|
||||
cx.handler_for(&this, move |this, cx| {
|
||||
cx.handler_for(this, move |this, cx| {
|
||||
if let Some(message) = this.active_chat().and_then(|active_chat| {
|
||||
active_chat.read(cx).find_loaded_message(message_id)
|
||||
}) {
|
||||
|
@ -718,7 +718,7 @@ impl ChatPanel {
|
|||
menu.entry(
|
||||
"Delete message",
|
||||
None,
|
||||
cx.handler_for(&this, move |this, cx| this.remove_message(message_id, cx)),
|
||||
cx.handler_for(this, move |this, cx| this.remove_message(message_id, cx)),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
@ -802,13 +802,11 @@ impl ChatPanel {
|
|||
{
|
||||
task.detach();
|
||||
}
|
||||
} else {
|
||||
if let Some(task) = chat
|
||||
.update(cx, |chat, cx| chat.send_message(message, cx))
|
||||
.log_err()
|
||||
{
|
||||
task.detach();
|
||||
}
|
||||
} else if let Some(task) = chat
|
||||
.update(cx, |chat, cx| chat.send_message(message, cx))
|
||||
.log_err()
|
||||
{
|
||||
task.detach();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -854,7 +852,7 @@ impl ChatPanel {
|
|||
let scroll_to_message_id = this.update(&mut cx, |this, cx| {
|
||||
this.set_active_chat(chat.clone(), cx);
|
||||
|
||||
scroll_to_message_id.or_else(|| this.last_acknowledged_message_id)
|
||||
scroll_to_message_id.or(this.last_acknowledged_message_id)
|
||||
})?;
|
||||
|
||||
if let Some(message_id) = scroll_to_message_id {
|
||||
|
|
|
@ -293,8 +293,8 @@ impl MessageEditor {
|
|||
completion_fn: impl Fn(&StringMatch) -> (String, CodeLabel),
|
||||
) -> Vec<Completion> {
|
||||
let matches = fuzzy::match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
candidates,
|
||||
query,
|
||||
true,
|
||||
10,
|
||||
&Default::default(),
|
||||
|
|
|
@ -219,7 +219,7 @@ impl CollabPanel {
|
|||
})
|
||||
.detach();
|
||||
|
||||
let channel_name_editor = cx.new_view(|cx| Editor::single_line(cx));
|
||||
let channel_name_editor = cx.new_view(Editor::single_line);
|
||||
|
||||
cx.subscribe(&channel_name_editor, |this: &mut Self, _, event, cx| {
|
||||
if let editor::EditorEvent::Blurred = event {
|
||||
|
@ -328,7 +328,7 @@ impl CollabPanel {
|
|||
panel.width = serialized_panel.width.map(|w| w.round());
|
||||
panel.collapsed_channels = serialized_panel
|
||||
.collapsed_channels
|
||||
.unwrap_or_else(|| Vec::new())
|
||||
.unwrap_or_else(Vec::new)
|
||||
.iter()
|
||||
.map(|cid| ChannelId(*cid))
|
||||
.collect();
|
||||
|
@ -955,7 +955,7 @@ impl CollabPanel {
|
|||
}
|
||||
|
||||
fn take_editing_state(&mut self, cx: &mut ViewContext<Self>) -> bool {
|
||||
if let Some(_) = self.channel_editing_state.take() {
|
||||
if self.channel_editing_state.take().is_some() {
|
||||
self.channel_name_editor.update(cx, |editor, cx| {
|
||||
editor.set_text("", cx);
|
||||
});
|
||||
|
@ -1850,8 +1850,7 @@ impl CollabPanel {
|
|||
if let Some(contact) = self.selected_contact() {
|
||||
self.deploy_contact_context_menu(bounds.center(), contact, cx);
|
||||
cx.stop_propagation();
|
||||
return;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn selected_channel(&self) -> Option<&Arc<Channel>> {
|
||||
|
@ -2142,7 +2141,7 @@ impl CollabPanel {
|
|||
} => self
|
||||
.render_participant_project(
|
||||
*project_id,
|
||||
&worktree_root_names,
|
||||
worktree_root_names,
|
||||
*host_user_id,
|
||||
*is_last,
|
||||
is_selected,
|
||||
|
@ -2401,7 +2400,7 @@ impl CollabPanel {
|
|||
) -> impl IntoElement {
|
||||
let github_login = SharedString::from(user.github_login.clone());
|
||||
let user_id = user.id;
|
||||
let is_response_pending = self.user_store.read(cx).is_contact_request_pending(&user);
|
||||
let is_response_pending = self.user_store.read(cx).is_contact_request_pending(user);
|
||||
let color = if is_response_pending {
|
||||
Color::Muted
|
||||
} else {
|
||||
|
@ -2457,7 +2456,7 @@ impl CollabPanel {
|
|||
let response_is_pending = self
|
||||
.channel_store
|
||||
.read(cx)
|
||||
.has_pending_channel_invite_response(&channel);
|
||||
.has_pending_channel_invite_response(channel);
|
||||
let color = if response_is_pending {
|
||||
Color::Muted
|
||||
} else {
|
||||
|
@ -2832,7 +2831,7 @@ impl Panel for CollabPanel {
|
|||
fn icon(&self, cx: &gpui::WindowContext) -> Option<ui::IconName> {
|
||||
CollaborationPanelSettings::get_global(cx)
|
||||
.button
|
||||
.then(|| ui::IconName::Collab)
|
||||
.then_some(ui::IconName::Collab)
|
||||
}
|
||||
|
||||
fn icon_tooltip(&self, _cx: &WindowContext) -> Option<&'static str> {
|
||||
|
|
|
@ -309,7 +309,7 @@ impl PickerDelegate for ChannelModalDelegate {
|
|||
let members = search_members.await?;
|
||||
picker.update(&mut cx, |picker, cx| {
|
||||
picker.delegate.has_all_members =
|
||||
query == "" && members.len() < 100;
|
||||
query.is_empty() && members.len() < 100;
|
||||
picker.delegate.matching_member_indices =
|
||||
(0..members.len()).collect();
|
||||
picker.delegate.members = members;
|
||||
|
|
|
@ -154,7 +154,7 @@ impl PickerDelegate for ContactFinderDelegate {
|
|||
.selected(selected)
|
||||
.start_slot(Avatar::new(user.avatar_uri.clone()))
|
||||
.child(Label::new(user.github_login.clone()))
|
||||
.end_slot::<Icon>(icon_path.map(|icon_path| Icon::from_path(icon_path))),
|
||||
.end_slot::<Icon>(icon_path.map(Icon::from_path)),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
|
|||
chat_panel::init(cx);
|
||||
collab_panel::init(cx);
|
||||
notification_panel::init(cx);
|
||||
notifications::init(&app_state, cx);
|
||||
notifications::init(app_state, cx);
|
||||
title_bar::init(cx);
|
||||
vcs_menu::init(cx);
|
||||
}
|
||||
|
|
|
@ -92,7 +92,7 @@ impl NotificationPanel {
|
|||
cx.new_view(|cx: &mut ViewContext<Self>| {
|
||||
let mut status = client.status();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
while let Some(_) = status.next().await {
|
||||
while (status.next().await).is_some() {
|
||||
if this
|
||||
.update(&mut cx, |_, cx| {
|
||||
cx.notify();
|
||||
|
|
|
@ -52,7 +52,7 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
|
|||
room::Event::RemoteProjectUnshared { project_id }
|
||||
| room::Event::RemoteProjectJoined { project_id }
|
||||
| room::Event::RemoteProjectInvitationDiscarded { project_id } => {
|
||||
if let Some(windows) = notification_windows.remove(&project_id) {
|
||||
if let Some(windows) = notification_windows.remove(project_id) {
|
||||
for window in windows {
|
||||
window
|
||||
.update(cx, |_, cx| {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use anyhow;
|
||||
use gpui::Pixels;
|
||||
use schemars::JsonSchema;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
|
|
|
@ -273,7 +273,7 @@ impl PickerDelegate for CommandPaletteDelegate {
|
|||
let mut commands = self.all_commands.clone();
|
||||
let hit_counts = cx.global::<HitCounts>().clone();
|
||||
let executor = cx.background_executor().clone();
|
||||
let query = trim_consecutive_whitespaces(&query.as_str());
|
||||
let query = trim_consecutive_whitespaces(query.as_str());
|
||||
async move {
|
||||
commands.sort_by_key(|action| {
|
||||
(
|
||||
|
@ -303,7 +303,7 @@ impl PickerDelegate for CommandPaletteDelegate {
|
|||
})
|
||||
.collect()
|
||||
} else {
|
||||
let ret = fuzzy::match_strings(
|
||||
fuzzy::match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
true,
|
||||
|
@ -311,8 +311,7 @@ impl PickerDelegate for CommandPaletteDelegate {
|
|||
&Default::default(),
|
||||
executor,
|
||||
)
|
||||
.await;
|
||||
ret
|
||||
.await
|
||||
};
|
||||
|
||||
tx.send((commands, matches)).await.log_err();
|
||||
|
|
|
@ -118,9 +118,7 @@ impl CommandPaletteInterceptor {
|
|||
|
||||
/// Intercepts the given query from the command palette.
|
||||
pub fn intercept(&self, query: &str, cx: &AppContext) -> Option<CommandInterceptResult> {
|
||||
let Some(handler) = self.0.as_ref() else {
|
||||
return None;
|
||||
};
|
||||
let handler = self.0.as_ref()?;
|
||||
|
||||
(handler)(query, cx)
|
||||
}
|
||||
|
|
|
@ -235,13 +235,13 @@ impl Client {
|
|||
let content = buffer.trim();
|
||||
|
||||
if !content.is_empty() {
|
||||
if let Ok(response) = serde_json::from_str::<AnyResponse>(&content) {
|
||||
if let Ok(response) = serde_json::from_str::<AnyResponse>(content) {
|
||||
if let Some(handlers) = response_handlers.lock().as_mut() {
|
||||
if let Some(handler) = handlers.remove(&response.id) {
|
||||
handler(Ok(content.to_string()));
|
||||
}
|
||||
}
|
||||
} else if let Ok(notification) = serde_json::from_str::<AnyNotification>(&content) {
|
||||
} else if let Ok(notification) = serde_json::from_str::<AnyNotification>(content) {
|
||||
let mut notification_handlers = notification_handlers.lock();
|
||||
if let Some(handler) =
|
||||
notification_handlers.get_mut(notification.method.as_str())
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use gpui::{actions, AppContext, Context, ViewContext};
|
||||
use log;
|
||||
use manager::ContextServerManager;
|
||||
use workspace::Workspace;
|
||||
|
||||
|
@ -27,7 +26,7 @@ pub fn init(cx: &mut AppContext) {
|
|||
}
|
||||
|
||||
fn restart_servers(_workspace: &mut Workspace, _action: &Restart, cx: &mut ViewContext<Workspace>) {
|
||||
let model = ContextServerManager::global(&cx);
|
||||
let model = ContextServerManager::global(cx);
|
||||
cx.update_model(&model, |manager, cx| {
|
||||
for server in manager.servers() {
|
||||
manager
|
||||
|
|
|
@ -125,6 +125,12 @@ pub enum Event {
|
|||
impl Global for ContextServerManager {}
|
||||
impl EventEmitter<Event> for ContextServerManager {}
|
||||
|
||||
impl Default for ContextServerManager {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextServerManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
|
|
|
@ -422,90 +422,86 @@ impl Copilot {
|
|||
(this, fake_server)
|
||||
}
|
||||
|
||||
fn start_language_server(
|
||||
async fn start_language_server(
|
||||
new_server_id: LanguageServerId,
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
this: WeakModel<Self>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> impl Future<Output = ()> {
|
||||
async move {
|
||||
let start_language_server = async {
|
||||
let server_path = get_copilot_lsp(http).await?;
|
||||
let node_path = node_runtime.binary_path().await?;
|
||||
let arguments: Vec<OsString> = vec![server_path.into(), "--stdio".into()];
|
||||
let binary = LanguageServerBinary {
|
||||
path: node_path,
|
||||
arguments,
|
||||
// TODO: We could set HTTP_PROXY etc here and fix the copilot issue.
|
||||
env: None,
|
||||
};
|
||||
|
||||
let root_path = if cfg!(target_os = "windows") {
|
||||
Path::new("C:/")
|
||||
} else {
|
||||
Path::new("/")
|
||||
};
|
||||
|
||||
let server = LanguageServer::new(
|
||||
Arc::new(Mutex::new(None)),
|
||||
new_server_id,
|
||||
binary,
|
||||
root_path,
|
||||
None,
|
||||
cx.clone(),
|
||||
)?;
|
||||
|
||||
server
|
||||
.on_notification::<StatusNotification, _>(
|
||||
|_, _| { /* Silence the notification */ },
|
||||
)
|
||||
.detach();
|
||||
let server = cx.update(|cx| server.initialize(None, cx))?.await?;
|
||||
|
||||
let status = server
|
||||
.request::<request::CheckStatus>(request::CheckStatusParams {
|
||||
local_checks_only: false,
|
||||
})
|
||||
.await?;
|
||||
|
||||
server
|
||||
.request::<request::SetEditorInfo>(request::SetEditorInfoParams {
|
||||
editor_info: request::EditorInfo {
|
||||
name: "zed".into(),
|
||||
version: env!("CARGO_PKG_VERSION").into(),
|
||||
},
|
||||
editor_plugin_info: request::EditorPluginInfo {
|
||||
name: "zed-copilot".into(),
|
||||
version: "0.0.1".into(),
|
||||
},
|
||||
})
|
||||
.await?;
|
||||
|
||||
anyhow::Ok((server, status))
|
||||
) {
|
||||
let start_language_server = async {
|
||||
let server_path = get_copilot_lsp(http).await?;
|
||||
let node_path = node_runtime.binary_path().await?;
|
||||
let arguments: Vec<OsString> = vec![server_path.into(), "--stdio".into()];
|
||||
let binary = LanguageServerBinary {
|
||||
path: node_path,
|
||||
arguments,
|
||||
// TODO: We could set HTTP_PROXY etc here and fix the copilot issue.
|
||||
env: None,
|
||||
};
|
||||
|
||||
let server = start_language_server.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
cx.notify();
|
||||
match server {
|
||||
Ok((server, status)) => {
|
||||
this.server = CopilotServer::Running(RunningCopilotServer {
|
||||
lsp: server,
|
||||
sign_in_status: SignInStatus::SignedOut,
|
||||
registered_buffers: Default::default(),
|
||||
});
|
||||
cx.emit(Event::CopilotLanguageServerStarted);
|
||||
this.update_sign_in_status(status, cx);
|
||||
}
|
||||
Err(error) => {
|
||||
this.server = CopilotServer::Error(error.to_string().into());
|
||||
cx.notify()
|
||||
}
|
||||
let root_path = if cfg!(target_os = "windows") {
|
||||
Path::new("C:/")
|
||||
} else {
|
||||
Path::new("/")
|
||||
};
|
||||
|
||||
let server = LanguageServer::new(
|
||||
Arc::new(Mutex::new(None)),
|
||||
new_server_id,
|
||||
binary,
|
||||
root_path,
|
||||
None,
|
||||
cx.clone(),
|
||||
)?;
|
||||
|
||||
server
|
||||
.on_notification::<StatusNotification, _>(|_, _| { /* Silence the notification */ })
|
||||
.detach();
|
||||
let server = cx.update(|cx| server.initialize(None, cx))?.await?;
|
||||
|
||||
let status = server
|
||||
.request::<request::CheckStatus>(request::CheckStatusParams {
|
||||
local_checks_only: false,
|
||||
})
|
||||
.await?;
|
||||
|
||||
server
|
||||
.request::<request::SetEditorInfo>(request::SetEditorInfoParams {
|
||||
editor_info: request::EditorInfo {
|
||||
name: "zed".into(),
|
||||
version: env!("CARGO_PKG_VERSION").into(),
|
||||
},
|
||||
editor_plugin_info: request::EditorPluginInfo {
|
||||
name: "zed-copilot".into(),
|
||||
version: "0.0.1".into(),
|
||||
},
|
||||
})
|
||||
.await?;
|
||||
|
||||
anyhow::Ok((server, status))
|
||||
};
|
||||
|
||||
let server = start_language_server.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
cx.notify();
|
||||
match server {
|
||||
Ok((server, status)) => {
|
||||
this.server = CopilotServer::Running(RunningCopilotServer {
|
||||
lsp: server,
|
||||
sign_in_status: SignInStatus::SignedOut,
|
||||
registered_buffers: Default::default(),
|
||||
});
|
||||
cx.emit(Event::CopilotLanguageServerStarted);
|
||||
this.update_sign_in_status(status, cx);
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
Err(error) => {
|
||||
this.server = CopilotServer::Error(error.to_string().into());
|
||||
cx.notify()
|
||||
}
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
pub fn sign_in(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
|
@ -1005,7 +1001,7 @@ async fn get_copilot_lsp(http: Arc<dyn HttpClient>) -> anyhow::Result<PathBuf> {
|
|||
|
||||
let url = &release
|
||||
.assets
|
||||
.get(0)
|
||||
.first()
|
||||
.context("Github release for copilot contained no assets")?
|
||||
.browser_download_url;
|
||||
|
||||
|
|
|
@ -15,9 +15,8 @@ use settings::watch_config_file;
|
|||
use strum::EnumIter;
|
||||
use ui::Context;
|
||||
|
||||
pub const COPILOT_CHAT_COMPLETION_URL: &'static str =
|
||||
"https://api.githubcopilot.com/chat/completions";
|
||||
pub const COPILOT_CHAT_AUTH_URL: &'static str = "https://api.github.com/copilot_internal/v2/token";
|
||||
pub const COPILOT_CHAT_COMPLETION_URL: &str = "https://api.githubcopilot.com/chat/completions";
|
||||
pub const COPILOT_CHAT_AUTH_URL: &str = "https://api.github.com/copilot_internal/v2/token";
|
||||
|
||||
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
|
|
|
@ -145,7 +145,7 @@ impl InlineCompletionProvider for CopilotCompletionProvider {
|
|||
};
|
||||
}
|
||||
Direction::Next => {
|
||||
if self.completions.len() == 0 {
|
||||
if self.completions.is_empty() {
|
||||
self.active_completion_index = 0
|
||||
} else {
|
||||
self.active_completion_index =
|
||||
|
@ -221,15 +221,13 @@ impl InlineCompletionProvider for CopilotCompletionProvider {
|
|||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
if should_report_inline_completion_event {
|
||||
if self.active_completion().is_some() {
|
||||
if let Some(telemetry) = self.telemetry.as_ref() {
|
||||
telemetry.report_inline_completion_event(
|
||||
Self::name().to_string(),
|
||||
false,
|
||||
self.file_extension.clone(),
|
||||
);
|
||||
}
|
||||
if should_report_inline_completion_event && self.active_completion().is_some() {
|
||||
if let Some(telemetry) = self.telemetry.as_ref() {
|
||||
telemetry.report_inline_completion_event(
|
||||
Self::name().to_string(),
|
||||
false,
|
||||
self.file_extension.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1148,7 +1146,7 @@ mod tests {
|
|||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsContent)) {
|
||||
_ = cx.update(|cx| {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
|
|
|
@ -167,7 +167,7 @@ impl Render for CopilotCodeVerification {
|
|||
let prompt = match &self.status {
|
||||
Status::SigningIn {
|
||||
prompt: Some(prompt),
|
||||
} => Self::render_prompting_modal(self.connect_clicked, &prompt, cx).into_any_element(),
|
||||
} => Self::render_prompting_modal(self.connect_clicked, prompt, cx).into_any_element(),
|
||||
Status::Unauthorized => {
|
||||
self.connect_clicked = false;
|
||||
Self::render_unauthorized_modal(cx).into_any_element()
|
||||
|
|
|
@ -18,7 +18,7 @@ use sqlez::thread_safe_connection::ThreadSafeConnection;
|
|||
use sqlez_macros::sql;
|
||||
use std::env;
|
||||
use std::future::Future;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::LazyLock;
|
||||
use util::{maybe, ResultExt};
|
||||
|
@ -79,7 +79,7 @@ pub async fn open_db<M: Migrator + 'static>(
|
|||
open_fallback_db().await
|
||||
}
|
||||
|
||||
async fn open_main_db<M: Migrator>(db_path: &PathBuf) -> Option<ThreadSafeConnection<M>> {
|
||||
async fn open_main_db<M: Migrator>(db_path: &Path) -> Option<ThreadSafeConnection<M>> {
|
||||
log::info!("Opening main db");
|
||||
ThreadSafeConnection::<M>::builder(db_path.to_string_lossy().as_ref(), true)
|
||||
.with_db_initialization_query(DB_INITIALIZE_QUERY)
|
||||
|
|
|
@ -28,7 +28,7 @@ impl From<proto::DevServerProject> for DevServerProject {
|
|||
fn from(project: proto::DevServerProject) -> Self {
|
||||
Self {
|
||||
id: DevServerProjectId(project.id),
|
||||
project_id: project.project_id.map(|id| ProjectId(id)),
|
||||
project_id: project.project_id.map(ProjectId),
|
||||
paths: project.paths.into_iter().map(|path| path.into()).collect(),
|
||||
dev_server_id: DevServerId(project.dev_server_id),
|
||||
}
|
||||
|
|
|
@ -432,7 +432,7 @@ impl ProjectDiagnosticsEditor {
|
|||
.unwrap();
|
||||
|
||||
prev_excerpt_id = excerpt_id;
|
||||
first_excerpt_id.get_or_insert_with(|| prev_excerpt_id);
|
||||
first_excerpt_id.get_or_insert(prev_excerpt_id);
|
||||
group_state.excerpts.push(excerpt_id);
|
||||
let header_position = (excerpt_id, language::Anchor::MIN);
|
||||
|
||||
|
@ -491,7 +491,7 @@ impl ProjectDiagnosticsEditor {
|
|||
blocks_to_remove.extend(group_state.blocks.iter().copied());
|
||||
} else if let Some((_, group_state)) = to_keep {
|
||||
prev_excerpt_id = *group_state.excerpts.last().unwrap();
|
||||
first_excerpt_id.get_or_insert_with(|| prev_excerpt_id);
|
||||
first_excerpt_id.get_or_insert(prev_excerpt_id);
|
||||
path_state.diagnostic_groups.push(group_state);
|
||||
}
|
||||
}
|
||||
|
@ -776,7 +776,7 @@ impl Item for ProjectDiagnosticsEditor {
|
|||
}
|
||||
}
|
||||
|
||||
const DIAGNOSTIC_HEADER: &'static str = "diagnostic header";
|
||||
const DIAGNOSTIC_HEADER: &str = "diagnostic header";
|
||||
|
||||
fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
|
||||
let (message, code_ranges) = highlight_diagnostic_message(&diagnostic, None);
|
||||
|
|
|
@ -773,9 +773,7 @@ async fn test_random_diagnostics(cx: &mut TestAppContext, mut rng: StdRng) {
|
|||
(
|
||||
path.clone(),
|
||||
server_id,
|
||||
current_diagnostics
|
||||
.entry((path, server_id))
|
||||
.or_insert(vec![]),
|
||||
current_diagnostics.entry((path, server_id)).or_default(),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
@ -853,8 +851,8 @@ fn get_diagnostics_excerpts(
|
|||
result.push(ExcerptInfo {
|
||||
path: buffer.file().unwrap().path().to_path_buf(),
|
||||
range: ExcerptRange {
|
||||
context: range.context.to_point(&buffer),
|
||||
primary: range.primary.map(|range| range.to_point(&buffer)),
|
||||
context: range.context.to_point(buffer),
|
||||
primary: range.primary.map(|range| range.to_point(buffer)),
|
||||
},
|
||||
group_id: usize::MAX,
|
||||
primary: false,
|
||||
|
@ -962,9 +960,9 @@ fn random_diagnostic(
|
|||
}
|
||||
}
|
||||
|
||||
const FILE_HEADER: &'static str = "file header";
|
||||
const EXCERPT_HEADER: &'static str = "excerpt header";
|
||||
const EXCERPT_FOOTER: &'static str = "excerpt footer";
|
||||
const FILE_HEADER: &str = "file header";
|
||||
const EXCERPT_HEADER: &str = "excerpt header";
|
||||
const EXCERPT_FOOTER: &str = "excerpt footer";
|
||||
|
||||
fn editor_blocks(
|
||||
editor: &View<Editor>,
|
||||
|
|
|
@ -18,7 +18,7 @@ impl Render for ToolbarControls {
|
|||
let editor = editor.read(cx);
|
||||
include_warnings = editor.include_warnings;
|
||||
has_stale_excerpts = !editor.paths_to_update.is_empty();
|
||||
is_updating = editor.update_paths_tx.len() > 0
|
||||
is_updating = !editor.update_paths_tx.is_empty()
|
||||
|| editor
|
||||
.project
|
||||
.read(cx)
|
||||
|
@ -84,6 +84,12 @@ impl ToolbarItemView for ToolbarControls {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for ToolbarControls {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToolbarControls {
|
||||
pub fn new() -> Self {
|
||||
ToolbarControls { editor: None }
|
||||
|
|
|
@ -28,7 +28,7 @@ pub fn switch_source_header(
|
|||
};
|
||||
|
||||
let Some((_, _, server_to_query, buffer)) =
|
||||
find_specific_language_server_in_selection(&editor, cx, &is_c_language, CLANGD_SERVER_NAME)
|
||||
find_specific_language_server_in_selection(editor, cx, is_c_language, CLANGD_SERVER_NAME)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
@ -85,7 +85,7 @@ pub fn switch_source_header(
|
|||
|
||||
pub fn apply_related_actions(editor: &View<Editor>, cx: &mut WindowContext) {
|
||||
if editor.update(cx, |e, cx| {
|
||||
find_specific_language_server_in_selection(e, cx, &is_c_language, CLANGD_SERVER_NAME)
|
||||
find_specific_language_server_in_selection(e, cx, is_c_language, CLANGD_SERVER_NAME)
|
||||
.is_some()
|
||||
}) {
|
||||
register_action(editor, cx, switch_source_header);
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
//! - [`WrapMap`] that handles soft wrapping.
|
||||
//! - [`BlockMap`] that tracks custom blocks such as diagnostics that should be displayed within buffer.
|
||||
//! - [`DisplayMap`] that adds background highlights to the regions of text.
|
||||
//! Each one of those builds on top of preceding map.
|
||||
//! Each one of those builds on top of preceding map.
|
||||
//!
|
||||
//! [Editor]: crate::Editor
|
||||
//! [EditorElement]: crate::element::EditorElement
|
||||
|
@ -588,7 +588,7 @@ impl DisplaySnapshot {
|
|||
|
||||
pub fn display_point_to_anchor(&self, point: DisplayPoint, bias: Bias) -> Anchor {
|
||||
self.buffer_snapshot
|
||||
.anchor_at(point.to_offset(&self, bias), bias)
|
||||
.anchor_at(point.to_offset(self, bias), bias)
|
||||
}
|
||||
|
||||
fn display_point_to_inlay_point(&self, point: DisplayPoint, bias: Bias) -> InlayPoint {
|
||||
|
@ -735,7 +735,7 @@ impl DisplaySnapshot {
|
|||
let mut line = String::new();
|
||||
|
||||
let range = display_row..display_row.next_row();
|
||||
for chunk in self.highlighted_chunks(range, false, &editor_style) {
|
||||
for chunk in self.highlighted_chunks(range, false, editor_style) {
|
||||
line.push_str(chunk.text);
|
||||
|
||||
let text_style = if let Some(style) = chunk.style {
|
||||
|
@ -1286,7 +1286,7 @@ pub mod tests {
|
|||
height,
|
||||
disposition,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
priority: priority,
|
||||
priority,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
|
|
@ -60,9 +60,9 @@ pub struct BlockSnapshot {
|
|||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct CustomBlockId(usize);
|
||||
|
||||
impl Into<ElementId> for CustomBlockId {
|
||||
fn into(self) -> ElementId {
|
||||
ElementId::Integer(self.0)
|
||||
impl From<CustomBlockId> for ElementId {
|
||||
fn from(val: CustomBlockId) -> Self {
|
||||
ElementId::Integer(val.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -657,7 +657,7 @@ impl BlockMap {
|
|||
.flatten()
|
||||
}
|
||||
|
||||
pub(crate) fn sort_blocks<B: BlockLike>(blocks: &mut Vec<(u32, B)>) {
|
||||
pub(crate) fn sort_blocks<B: BlockLike>(blocks: &mut [(u32, B)]) {
|
||||
// Place excerpt headers and footers above custom blocks on the same row
|
||||
blocks.sort_unstable_by(|(row_a, block_a), (row_b, block_b)| {
|
||||
row_a.cmp(row_b).then_with(|| {
|
||||
|
@ -1478,7 +1478,7 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_basic_blocks(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(|cx| init_test(cx));
|
||||
cx.update(init_test);
|
||||
|
||||
let text = "aaa\nbbb\nccc\nddd";
|
||||
|
||||
|
@ -1734,7 +1734,7 @@ mod tests {
|
|||
|
||||
#[gpui::test]
|
||||
fn test_replace_with_heights(cx: &mut gpui::TestAppContext) {
|
||||
let _update = cx.update(|cx| init_test(cx));
|
||||
cx.update(init_test);
|
||||
|
||||
let text = "aaa\nbbb\nccc\nddd";
|
||||
|
||||
|
@ -1838,7 +1838,7 @@ mod tests {
|
|||
#[cfg(target_os = "macos")]
|
||||
#[gpui::test]
|
||||
fn test_blocks_on_wrapped_lines(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(|cx| init_test(cx));
|
||||
cx.update(init_test);
|
||||
|
||||
let _font_id = cx.text_system().font_id(&font("Helvetica")).unwrap();
|
||||
|
||||
|
@ -1885,7 +1885,7 @@ mod tests {
|
|||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random_blocks(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
|
||||
cx.update(|cx| init_test(cx));
|
||||
cx.update(init_test);
|
||||
|
||||
let operations = env::var("OPERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
|
||||
|
|
|
@ -46,7 +46,7 @@ impl CreaseSnapshot {
|
|||
Ordering::Greater => break,
|
||||
}
|
||||
}
|
||||
return None;
|
||||
None
|
||||
}
|
||||
|
||||
pub fn crease_items_with_offsets(
|
||||
|
@ -183,7 +183,7 @@ impl CreaseMap {
|
|||
}
|
||||
}
|
||||
removals.sort_unstable_by(|(a_id, a_range), (b_id, b_range)| {
|
||||
AnchorRangeExt::cmp(a_range, b_range, snapshot).then(b_id.cmp(&a_id))
|
||||
AnchorRangeExt::cmp(a_range, b_range, snapshot).then(b_id.cmp(a_id))
|
||||
});
|
||||
|
||||
self.snapshot.creases = {
|
||||
|
|
|
@ -122,7 +122,7 @@ impl<'a> FoldMapWriter<'a> {
|
|||
let snapshot = self.0.snapshot.inlay_snapshot.clone();
|
||||
for (range, fold_text) in ranges.into_iter() {
|
||||
let buffer = &snapshot.buffer;
|
||||
let range = range.start.to_offset(&buffer)..range.end.to_offset(&buffer);
|
||||
let range = range.start.to_offset(buffer)..range.end.to_offset(buffer);
|
||||
|
||||
// Ignore any empty ranges.
|
||||
if range.start == range.end {
|
||||
|
@ -420,7 +420,7 @@ impl FoldMap {
|
|||
}
|
||||
|
||||
if fold_range.end > fold_range.start {
|
||||
const ELLIPSIS: &'static str = "⋯";
|
||||
const ELLIPSIS: &str = "⋯";
|
||||
|
||||
let fold_id = fold.id;
|
||||
new_transforms.push(
|
||||
|
@ -850,7 +850,8 @@ fn consolidate_inlay_edits(mut edits: Vec<InlayEdit>) -> Vec<InlayEdit> {
|
|||
|
||||
let _old_alloc_ptr = edits.as_ptr();
|
||||
let mut inlay_edits = edits.into_iter();
|
||||
let inlay_edits = if let Some(mut first_edit) = inlay_edits.next() {
|
||||
|
||||
if let Some(mut first_edit) = inlay_edits.next() {
|
||||
// This code relies on reusing allocations from the Vec<_> - at the time of writing .flatten() prevents them.
|
||||
#[allow(clippy::filter_map_identity)]
|
||||
let mut v: Vec<_> = inlay_edits
|
||||
|
@ -872,9 +873,7 @@ fn consolidate_inlay_edits(mut edits: Vec<InlayEdit>) -> Vec<InlayEdit> {
|
|||
v
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
inlay_edits
|
||||
}
|
||||
}
|
||||
|
||||
fn consolidate_fold_edits(mut edits: Vec<FoldEdit>) -> Vec<FoldEdit> {
|
||||
|
@ -886,7 +885,8 @@ fn consolidate_fold_edits(mut edits: Vec<FoldEdit>) -> Vec<FoldEdit> {
|
|||
});
|
||||
let _old_alloc_ptr = edits.as_ptr();
|
||||
let mut fold_edits = edits.into_iter();
|
||||
let fold_edits = if let Some(mut first_edit) = fold_edits.next() {
|
||||
|
||||
if let Some(mut first_edit) = fold_edits.next() {
|
||||
// This code relies on reusing allocations from the Vec<_> - at the time of writing .flatten() prevents them.
|
||||
#[allow(clippy::filter_map_identity)]
|
||||
let mut v: Vec<_> = fold_edits
|
||||
|
@ -907,9 +907,7 @@ fn consolidate_fold_edits(mut edits: Vec<FoldEdit>) -> Vec<FoldEdit> {
|
|||
v
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
fold_edits
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
|
@ -956,9 +954,9 @@ impl sum_tree::Summary for TransformSummary {
|
|||
#[derive(Copy, Clone, Eq, PartialEq, Debug, Default)]
|
||||
pub struct FoldId(usize);
|
||||
|
||||
impl Into<ElementId> for FoldId {
|
||||
fn into(self) -> ElementId {
|
||||
ElementId::Integer(self.0)
|
||||
impl From<FoldId> for ElementId {
|
||||
fn from(val: FoldId) -> Self {
|
||||
ElementId::Integer(val.0)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -324,7 +324,7 @@ impl<'a> Iterator for InlayChunks<'a> {
|
|||
} else {
|
||||
next_inlay_highlight_endpoint = range.end - offset_in_inlay.0;
|
||||
highlight_style
|
||||
.get_or_insert_with(|| Default::default())
|
||||
.get_or_insert_with(Default::default)
|
||||
.highlight(*style);
|
||||
}
|
||||
} else {
|
||||
|
@ -451,15 +451,14 @@ impl InlayMap {
|
|||
) -> (InlaySnapshot, Vec<InlayEdit>) {
|
||||
let snapshot = &mut self.snapshot;
|
||||
|
||||
if buffer_edits.is_empty() {
|
||||
if snapshot.buffer.trailing_excerpt_update_count()
|
||||
if buffer_edits.is_empty()
|
||||
&& snapshot.buffer.trailing_excerpt_update_count()
|
||||
!= buffer_snapshot.trailing_excerpt_update_count()
|
||||
{
|
||||
buffer_edits.push(Edit {
|
||||
old: snapshot.buffer.len()..snapshot.buffer.len(),
|
||||
new: buffer_snapshot.len()..buffer_snapshot.len(),
|
||||
});
|
||||
}
|
||||
{
|
||||
buffer_edits.push(Edit {
|
||||
old: snapshot.buffer.len()..snapshot.buffer.len(),
|
||||
new: buffer_snapshot.len()..buffer_snapshot.len(),
|
||||
});
|
||||
}
|
||||
|
||||
if buffer_edits.is_empty() {
|
||||
|
|
|
@ -654,7 +654,7 @@ mod tests {
|
|||
fn test_marking_tabs(cx: &mut gpui::AppContext) {
|
||||
let input = "\t \thello";
|
||||
|
||||
let buffer = MultiBuffer::build_simple(&input, cx);
|
||||
let buffer = MultiBuffer::build_simple(input, cx);
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
|
|
|
@ -111,7 +111,7 @@ impl WrapMap {
|
|||
} else {
|
||||
self.edits_since_sync = self
|
||||
.edits_since_sync
|
||||
.compose(&self.snapshot.interpolate(tab_snapshot, &edits));
|
||||
.compose(self.snapshot.interpolate(tab_snapshot, &edits));
|
||||
self.snapshot.interpolated = false;
|
||||
}
|
||||
|
||||
|
@ -213,7 +213,7 @@ impl WrapMap {
|
|||
}
|
||||
let new_rows = self.snapshot.transforms.summary().output.lines.row + 1;
|
||||
self.snapshot.interpolated = false;
|
||||
self.edits_since_sync = self.edits_since_sync.compose(&Patch::new(vec![WrapEdit {
|
||||
self.edits_since_sync = self.edits_since_sync.compose(Patch::new(vec![WrapEdit {
|
||||
old: 0..old_rows,
|
||||
new: 0..new_rows,
|
||||
}]));
|
||||
|
@ -1009,7 +1009,8 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for WrapPoint {
|
|||
fn consolidate_wrap_edits(edits: Vec<WrapEdit>) -> Vec<WrapEdit> {
|
||||
let _old_alloc_ptr = edits.as_ptr();
|
||||
let mut wrap_edits = edits.into_iter();
|
||||
let wrap_edits = if let Some(mut first_edit) = wrap_edits.next() {
|
||||
|
||||
if let Some(mut first_edit) = wrap_edits.next() {
|
||||
// This code relies on reusing allocations from the Vec<_> - at the time of writing .flatten() prevents them.
|
||||
#[allow(clippy::filter_map_identity)]
|
||||
let mut v: Vec<_> = wrap_edits
|
||||
|
@ -1030,9 +1031,7 @@ fn consolidate_wrap_edits(edits: Vec<WrapEdit>) -> Vec<WrapEdit> {
|
|||
v
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
wrap_edits
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -1003,11 +1003,11 @@ impl CompletionsMenu {
|
|||
cx,
|
||||
);
|
||||
|
||||
return cx.spawn(move |this, mut cx| async move {
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
if let Some(true) = resolve_task.await.log_err() {
|
||||
this.update(&mut cx, |_, cx| cx.notify()).ok();
|
||||
}
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
fn attempt_resolve_selected_completion_documentation(
|
||||
|
@ -2350,7 +2350,7 @@ impl Editor {
|
|||
if let Some(show_inline_completions) = self.show_inline_completions_override {
|
||||
show_inline_completions
|
||||
} else {
|
||||
self.mode == EditorMode::Full && provider.is_enabled(&buffer, buffer_position, cx)
|
||||
self.mode == EditorMode::Full && provider.is_enabled(buffer, buffer_position, cx)
|
||||
}
|
||||
} else {
|
||||
false
|
||||
|
@ -2729,7 +2729,7 @@ impl Editor {
|
|||
|
||||
if !add || click_count > 1 {
|
||||
None
|
||||
} else if selected_points.len() > 0 {
|
||||
} else if !selected_points.is_empty() {
|
||||
Some(selected_points[0].id)
|
||||
} else {
|
||||
let clicked_point_already_selected =
|
||||
|
@ -2738,17 +2738,13 @@ impl Editor {
|
|||
|| selection.end.to_point(buffer) == end.to_point(buffer)
|
||||
});
|
||||
|
||||
if let Some(selection) = clicked_point_already_selected {
|
||||
Some(selection.id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
clicked_point_already_selected.map(|selection| selection.id)
|
||||
}
|
||||
};
|
||||
|
||||
let selections_count = self.selections.count();
|
||||
|
||||
self.change_selections(auto_scroll.then(|| Autoscroll::newest()), cx, |s| {
|
||||
self.change_selections(auto_scroll.then(Autoscroll::newest), cx, |s| {
|
||||
if let Some(point_to_delete) = point_to_delete {
|
||||
s.delete(point_to_delete);
|
||||
|
||||
|
@ -2981,10 +2977,10 @@ impl Editor {
|
|||
return;
|
||||
}
|
||||
|
||||
if self.mode == EditorMode::Full {
|
||||
if self.change_selections(Some(Autoscroll::fit()), cx, |s| s.try_cancel()) {
|
||||
return;
|
||||
}
|
||||
if self.mode == EditorMode::Full
|
||||
&& self.change_selections(Some(Autoscroll::fit()), cx, |s| s.try_cancel())
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
cx.propagate();
|
||||
|
@ -3023,11 +3019,9 @@ impl Editor {
|
|||
return true;
|
||||
}
|
||||
|
||||
if self.mode == EditorMode::Full {
|
||||
if self.active_diagnostics.is_some() {
|
||||
self.dismiss_diagnostics(cx);
|
||||
return true;
|
||||
}
|
||||
if self.mode == EditorMode::Full && self.active_diagnostics.is_some() {
|
||||
self.dismiss_diagnostics(cx);
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
|
@ -3550,10 +3544,10 @@ impl Editor {
|
|||
.unwrap_or_default();
|
||||
let mut new_text =
|
||||
String::with_capacity(1 + capacity_for_delimiter + indent.len as usize);
|
||||
new_text.push_str("\n");
|
||||
new_text.push('\n');
|
||||
new_text.extend(indent.chars());
|
||||
if let Some(delimiter) = &comment_delimiter {
|
||||
new_text.push_str(&delimiter);
|
||||
new_text.push_str(delimiter);
|
||||
}
|
||||
if insert_extra_newline {
|
||||
new_text = new_text.repeat(2);
|
||||
|
@ -4187,7 +4181,7 @@ impl Editor {
|
|||
};
|
||||
let trigger_kind = match (&options.trigger, is_followup_invoke) {
|
||||
(_, true) => CompletionTriggerKind::TRIGGER_FOR_INCOMPLETE_COMPLETIONS,
|
||||
(Some(trigger), _) if buffer.read(cx).completion_triggers().contains(&trigger) => {
|
||||
(Some(trigger), _) if buffer.read(cx).completion_triggers().contains(trigger) => {
|
||||
CompletionTriggerKind::TRIGGER_CHARACTER
|
||||
}
|
||||
|
||||
|
@ -4627,33 +4621,30 @@ impl Editor {
|
|||
&& code_actions
|
||||
.as_ref()
|
||||
.map_or(true, |actions| actions.is_empty());
|
||||
if let Some(task) = editor
|
||||
.update(&mut cx, |editor, cx| {
|
||||
*editor.context_menu.write() =
|
||||
Some(ContextMenu::CodeActions(CodeActionsMenu {
|
||||
buffer,
|
||||
actions: CodeActionContents {
|
||||
tasks: resolved_tasks,
|
||||
actions: code_actions,
|
||||
},
|
||||
selected_item: Default::default(),
|
||||
scroll_handle: UniformListScrollHandle::default(),
|
||||
deployed_from_indicator,
|
||||
}));
|
||||
if spawn_straight_away {
|
||||
if let Some(task) = editor.confirm_code_action(
|
||||
&ConfirmCodeAction { item_ix: Some(0) },
|
||||
cx,
|
||||
) {
|
||||
cx.notify();
|
||||
return task;
|
||||
}
|
||||
if let Ok(task) = editor.update(&mut cx, |editor, cx| {
|
||||
*editor.context_menu.write() =
|
||||
Some(ContextMenu::CodeActions(CodeActionsMenu {
|
||||
buffer,
|
||||
actions: CodeActionContents {
|
||||
tasks: resolved_tasks,
|
||||
actions: code_actions,
|
||||
},
|
||||
selected_item: Default::default(),
|
||||
scroll_handle: UniformListScrollHandle::default(),
|
||||
deployed_from_indicator,
|
||||
}));
|
||||
if spawn_straight_away {
|
||||
if let Some(task) = editor.confirm_code_action(
|
||||
&ConfirmCodeAction { item_ix: Some(0) },
|
||||
cx,
|
||||
) {
|
||||
cx.notify();
|
||||
return task;
|
||||
}
|
||||
cx.notify();
|
||||
Task::ready(Ok(()))
|
||||
})
|
||||
.ok()
|
||||
{
|
||||
}
|
||||
cx.notify();
|
||||
Task::ready(Ok(()))
|
||||
}) {
|
||||
task.await
|
||||
} else {
|
||||
Ok(())
|
||||
|
@ -5257,7 +5248,7 @@ impl Editor {
|
|||
}
|
||||
|
||||
fn insert_tasks(&mut self, key: (BufferId, BufferRow), value: RunnableTasks) {
|
||||
if let Some(_) = self.tasks.insert(key, value) {
|
||||
if self.tasks.insert(key, value).is_some() {
|
||||
// This case should hopefully be rare, but just in case...
|
||||
log::error!("multiple different run targets found on a single line, only the last target will be rendered")
|
||||
}
|
||||
|
@ -6060,7 +6051,7 @@ impl Editor {
|
|||
Some(MultiBufferRow(0)..multi_buffer_snapshot.max_buffer_row()).into_iter(),
|
||||
&multi_buffer_snapshot,
|
||||
) {
|
||||
Self::prepare_revert_change(&mut revert_changes, &self.buffer(), &hunk, cx);
|
||||
Self::prepare_revert_change(&mut revert_changes, self.buffer(), &hunk, cx);
|
||||
}
|
||||
if !revert_changes.is_empty() {
|
||||
self.transact(cx, |editor, cx| {
|
||||
|
@ -6634,7 +6625,7 @@ impl Editor {
|
|||
head = display_map.clip_point(head, Bias::Right);
|
||||
let goal = SelectionGoal::HorizontalPosition(
|
||||
display_map
|
||||
.x_for_display_point(head, &text_layout_details)
|
||||
.x_for_display_point(head, text_layout_details)
|
||||
.into(),
|
||||
);
|
||||
selection.collapse_to(head, goal);
|
||||
|
@ -6967,7 +6958,7 @@ impl Editor {
|
|||
selection.start,
|
||||
selection.goal,
|
||||
false,
|
||||
&text_layout_details,
|
||||
text_layout_details,
|
||||
);
|
||||
selection.collapse_to(cursor, goal);
|
||||
});
|
||||
|
@ -7003,7 +6994,7 @@ impl Editor {
|
|||
action.lines,
|
||||
selection.goal,
|
||||
false,
|
||||
&text_layout_details,
|
||||
text_layout_details,
|
||||
);
|
||||
selection.collapse_to(cursor, goal);
|
||||
});
|
||||
|
@ -7034,7 +7025,7 @@ impl Editor {
|
|||
action.lines,
|
||||
selection.goal,
|
||||
false,
|
||||
&text_layout_details,
|
||||
text_layout_details,
|
||||
);
|
||||
selection.collapse_to(cursor, goal);
|
||||
});
|
||||
|
@ -7045,7 +7036,7 @@ impl Editor {
|
|||
let text_layout_details = &self.text_layout_details(cx);
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_heads_with(|map, head, goal| {
|
||||
movement::down_by_rows(map, head, action.lines, goal, false, &text_layout_details)
|
||||
movement::down_by_rows(map, head, action.lines, goal, false, text_layout_details)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -7054,7 +7045,7 @@ impl Editor {
|
|||
let text_layout_details = &self.text_layout_details(cx);
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_heads_with(|map, head, goal| {
|
||||
movement::up_by_rows(map, head, action.lines, goal, false, &text_layout_details)
|
||||
movement::up_by_rows(map, head, action.lines, goal, false, text_layout_details)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -7068,7 +7059,7 @@ impl Editor {
|
|||
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_heads_with(|map, head, goal| {
|
||||
movement::up_by_rows(map, head, row_count, goal, false, &text_layout_details)
|
||||
movement::up_by_rows(map, head, row_count, goal, false, text_layout_details)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -7117,7 +7108,7 @@ impl Editor {
|
|||
row_count,
|
||||
selection.goal,
|
||||
false,
|
||||
&text_layout_details,
|
||||
text_layout_details,
|
||||
);
|
||||
selection.collapse_to(cursor, goal);
|
||||
});
|
||||
|
@ -7128,7 +7119,7 @@ impl Editor {
|
|||
let text_layout_details = &self.text_layout_details(cx);
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_heads_with(|map, head, goal| {
|
||||
movement::up(map, head, goal, false, &text_layout_details)
|
||||
movement::up(map, head, goal, false, text_layout_details)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -7156,7 +7147,7 @@ impl Editor {
|
|||
selection.end,
|
||||
selection.goal,
|
||||
false,
|
||||
&text_layout_details,
|
||||
text_layout_details,
|
||||
);
|
||||
selection.collapse_to(cursor, goal);
|
||||
});
|
||||
|
@ -7177,7 +7168,7 @@ impl Editor {
|
|||
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_heads_with(|map, head, goal| {
|
||||
movement::down_by_rows(map, head, row_count, goal, false, &text_layout_details)
|
||||
movement::down_by_rows(map, head, row_count, goal, false, text_layout_details)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -7225,7 +7216,7 @@ impl Editor {
|
|||
row_count,
|
||||
selection.goal,
|
||||
false,
|
||||
&text_layout_details,
|
||||
text_layout_details,
|
||||
);
|
||||
selection.collapse_to(cursor, goal);
|
||||
});
|
||||
|
@ -7236,7 +7227,7 @@ impl Editor {
|
|||
let text_layout_details = &self.text_layout_details(cx);
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.move_heads_with(|map, head, goal| {
|
||||
movement::down(map, head, goal, false, &text_layout_details)
|
||||
movement::down(map, head, goal, false, text_layout_details)
|
||||
})
|
||||
});
|
||||
}
|
||||
|
@ -7919,12 +7910,12 @@ impl Editor {
|
|||
let query_match = query_match.unwrap(); // can only fail due to I/O
|
||||
let offset_range =
|
||||
start_offset + query_match.start()..start_offset + query_match.end();
|
||||
let display_range = offset_range.start.to_display_point(&display_map)
|
||||
..offset_range.end.to_display_point(&display_map);
|
||||
let display_range = offset_range.start.to_display_point(display_map)
|
||||
..offset_range.end.to_display_point(display_map);
|
||||
|
||||
if !select_next_state.wordwise
|
||||
|| (!movement::is_inside_word(&display_map, display_range.start)
|
||||
&& !movement::is_inside_word(&display_map, display_range.end))
|
||||
|| (!movement::is_inside_word(display_map, display_range.start)
|
||||
&& !movement::is_inside_word(display_map, display_range.end))
|
||||
{
|
||||
// TODO: This is n^2, because we might check all the selections
|
||||
if !selections
|
||||
|
@ -7988,11 +7979,11 @@ impl Editor {
|
|||
if only_carets {
|
||||
for selection in &mut selections {
|
||||
let word_range = movement::surrounding_word(
|
||||
&display_map,
|
||||
selection.start.to_display_point(&display_map),
|
||||
display_map,
|
||||
selection.start.to_display_point(display_map),
|
||||
);
|
||||
selection.start = word_range.start.to_offset(&display_map, Bias::Left);
|
||||
selection.end = word_range.end.to_offset(&display_map, Bias::Left);
|
||||
selection.start = word_range.start.to_offset(display_map, Bias::Left);
|
||||
selection.end = word_range.end.to_offset(display_map, Bias::Left);
|
||||
selection.goal = SelectionGoal::None;
|
||||
selection.reversed = false;
|
||||
select_next_match_ranges(
|
||||
|
@ -8525,7 +8516,7 @@ impl Editor {
|
|||
let display_point = point.to_display_point(display_snapshot);
|
||||
let goal = SelectionGoal::HorizontalPosition(
|
||||
display_snapshot
|
||||
.x_for_display_point(display_point, &text_layout_details)
|
||||
.x_for_display_point(display_point, text_layout_details)
|
||||
.into(),
|
||||
);
|
||||
(display_point, goal)
|
||||
|
@ -8550,8 +8541,8 @@ impl Editor {
|
|||
let cursor = selection.head();
|
||||
let (_buffer_id, symbols) = buffer_snap.symbols_containing(cursor, None)?;
|
||||
for symbol in symbols.iter().rev() {
|
||||
let start = symbol.range.start.to_offset(&buffer_snap);
|
||||
let end = symbol.range.end.to_offset(&buffer_snap);
|
||||
let start = symbol.range.start.to_offset(buffer_snap);
|
||||
let end = symbol.range.end.to_offset(buffer_snap);
|
||||
let new_range = start..end;
|
||||
if start < selection.start || end > selection.end {
|
||||
return Some(Selection {
|
||||
|
@ -8836,12 +8827,10 @@ impl Editor {
|
|||
} else {
|
||||
open.start
|
||||
}
|
||||
} else if inside {
|
||||
*close.start()
|
||||
} else {
|
||||
if inside {
|
||||
*close.start()
|
||||
} else {
|
||||
*close.end()
|
||||
}
|
||||
*close.end()
|
||||
},
|
||||
);
|
||||
}
|
||||
|
@ -8912,7 +8901,7 @@ impl Editor {
|
|||
self.buffer.update(cx, |buffer, cx| {
|
||||
buffer.expand_excerpts(
|
||||
selections
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|selection| selection.head().excerpt_id)
|
||||
.dedup(),
|
||||
lines,
|
||||
|
@ -9109,7 +9098,7 @@ impl Editor {
|
|||
) -> bool {
|
||||
let display_point = initial_point.to_display_point(snapshot);
|
||||
let mut hunks = hunks
|
||||
.map(|hunk| diff_hunk_to_display(&hunk, &snapshot))
|
||||
.map(|hunk| diff_hunk_to_display(&hunk, snapshot))
|
||||
.filter(|hunk| is_wrapped || !hunk.contains_display_row(display_point.row()))
|
||||
.dedup();
|
||||
|
||||
|
@ -9790,9 +9779,10 @@ impl Editor {
|
|||
}
|
||||
editor
|
||||
});
|
||||
cx.subscribe(&rename_editor, |_, _, e, cx| match e {
|
||||
EditorEvent::Focused => cx.emit(EditorEvent::FocusedIn),
|
||||
_ => {}
|
||||
cx.subscribe(&rename_editor, |_, _, e: &EditorEvent, cx| {
|
||||
if e == &EditorEvent::Focused {
|
||||
cx.emit(EditorEvent::FocusedIn)
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
|
@ -10579,9 +10569,7 @@ impl Editor {
|
|||
|
||||
pub fn soft_wrap_mode(&self, cx: &AppContext) -> SoftWrap {
|
||||
let settings = self.buffer.read(cx).settings_at(0, cx);
|
||||
let mode = self
|
||||
.soft_wrap_mode_override
|
||||
.unwrap_or_else(|| settings.soft_wrap);
|
||||
let mode = self.soft_wrap_mode_override.unwrap_or(settings.soft_wrap);
|
||||
match mode {
|
||||
language_settings::SoftWrap::None => SoftWrap::None,
|
||||
language_settings::SoftWrap::PreferLine => SoftWrap::PreferLine,
|
||||
|
@ -11046,8 +11034,8 @@ impl Editor {
|
|||
highlight
|
||||
.range
|
||||
.start()
|
||||
.cmp(&rows.start(), &snapshot)
|
||||
.then(highlight.range.end().cmp(&rows.end(), &snapshot))
|
||||
.cmp(rows.start(), &snapshot)
|
||||
.then(highlight.range.end().cmp(rows.end(), &snapshot))
|
||||
});
|
||||
match (color, existing_highlight_index) {
|
||||
(Some(_), Ok(ix)) | (_, Err(ix)) => row_highlights.insert(
|
||||
|
@ -11126,7 +11114,7 @@ impl Editor {
|
|||
if highlight.color.is_none() || !highlight.should_autoscroll {
|
||||
return None;
|
||||
}
|
||||
Some(highlight.range.start().to_display_point(&snapshot).row())
|
||||
Some(highlight.range.start().to_display_point(snapshot).row())
|
||||
})
|
||||
.min()
|
||||
}
|
||||
|
@ -11300,8 +11288,8 @@ impl Editor {
|
|||
break;
|
||||
}
|
||||
|
||||
let start = range.start.to_display_point(&display_snapshot);
|
||||
let end = range.end.to_display_point(&display_snapshot);
|
||||
let start = range.start.to_display_point(display_snapshot);
|
||||
let end = range.end.to_display_point(display_snapshot);
|
||||
results.push((start..end, color))
|
||||
}
|
||||
}
|
||||
|
@ -11413,8 +11401,8 @@ impl Editor {
|
|||
break;
|
||||
}
|
||||
|
||||
let start = range.start.to_display_point(&display_snapshot);
|
||||
let end = range.end.to_display_point(&display_snapshot);
|
||||
let start = range.start.to_display_point(display_snapshot);
|
||||
let end = range.end.to_display_point(display_snapshot);
|
||||
results.push((start..end, color))
|
||||
}
|
||||
}
|
||||
|
@ -12168,8 +12156,8 @@ fn hunks_for_selections(
|
|||
let buffer_rows_for_selections = selections.iter().map(|selection| {
|
||||
let head = selection.head();
|
||||
let tail = selection.tail();
|
||||
let start = MultiBufferRow(tail.to_point(&multi_buffer_snapshot).row);
|
||||
let end = MultiBufferRow(head.to_point(&multi_buffer_snapshot).row);
|
||||
let start = MultiBufferRow(tail.to_point(multi_buffer_snapshot).row);
|
||||
let end = MultiBufferRow(head.to_point(multi_buffer_snapshot).row);
|
||||
if start > end {
|
||||
end..start
|
||||
} else {
|
||||
|
@ -12349,10 +12337,7 @@ fn snippet_completions(
|
|||
filter_range: 0..matching_prefix.len(),
|
||||
},
|
||||
server_id: LanguageServerId(usize::MAX),
|
||||
documentation: snippet
|
||||
.description
|
||||
.clone()
|
||||
.map(|description| Documentation::SingleLine(description)),
|
||||
documentation: snippet.description.clone().map(Documentation::SingleLine),
|
||||
lsp_completion: lsp::CompletionItem {
|
||||
label: snippet.prefix.first().unwrap().clone(),
|
||||
kind: Some(CompletionItemKind::SNIPPET),
|
||||
|
@ -12390,7 +12375,7 @@ impl CompletionProvider for Model<Project> {
|
|||
) -> Task<Result<Vec<Completion>>> {
|
||||
self.update(cx, |project, cx| {
|
||||
let snippets = snippet_completions(project, buffer, buffer_position, cx);
|
||||
let project_completions = project.completions(&buffer, buffer_position, options, cx);
|
||||
let project_completions = project.completions(buffer, buffer_position, options, cx);
|
||||
cx.background_executor().spawn(async move {
|
||||
let mut completions = project_completions.await?;
|
||||
//let snippets = snippets.into_iter().;
|
||||
|
@ -12847,7 +12832,7 @@ impl ViewInputHandler for Editor {
|
|||
|
||||
fn marked_text_range(&self, cx: &mut ViewContext<Self>) -> Option<Range<usize>> {
|
||||
let snapshot = self.buffer.read(cx).read(cx);
|
||||
let range = self.text_highlights::<InputComposition>(cx)?.1.get(0)?;
|
||||
let range = self.text_highlights::<InputComposition>(cx)?.1.first()?;
|
||||
Some(range.start.to_offset_utf16(&snapshot).0..range.end.to_offset_utf16(&snapshot).0)
|
||||
}
|
||||
|
||||
|
@ -13276,7 +13261,7 @@ pub fn highlight_diagnostic_message(
|
|||
let mut code_ranges = Vec::new();
|
||||
|
||||
if let Some(source) = &diagnostic.source {
|
||||
text_without_backticks.push_str(&source);
|
||||
text_without_backticks.push_str(source);
|
||||
code_ranges.push(0..source.len());
|
||||
text_without_backticks.push_str(": ");
|
||||
}
|
||||
|
@ -13415,7 +13400,7 @@ pub trait RangeToAnchorExt: Sized {
|
|||
|
||||
fn to_display_points(self, snapshot: &EditorSnapshot) -> Range<DisplayPoint> {
|
||||
let anchor_range = self.to_anchors(&snapshot.buffer_snapshot);
|
||||
anchor_range.start.to_display_point(&snapshot)..anchor_range.end.to_display_point(&snapshot)
|
||||
anchor_range.start.to_display_point(snapshot)..anchor_range.end.to_display_point(snapshot)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,12 @@ use crate::EditorSettings;
|
|||
#[derive(IntoElement)]
|
||||
pub struct EditorSettingsControls {}
|
||||
|
||||
impl Default for EditorSettingsControls {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl EditorSettingsControls {
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
|
@ -235,13 +241,7 @@ impl EditableSettingControl for BufferFontLigaturesControl {
|
|||
let mut features = settings
|
||||
.buffer_font_features
|
||||
.as_ref()
|
||||
.map(|features| {
|
||||
features
|
||||
.tag_value_list()
|
||||
.into_iter()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.map(|features| features.tag_value_list().to_vec())
|
||||
.unwrap_or_default();
|
||||
|
||||
if let Some(calt_index) = features.iter().position(|(tag, _)| tag == "calt") {
|
||||
|
|
|
@ -195,7 +195,7 @@ fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) {
|
|||
editor.change_selections(None, cx, |s| s.select_ranges([2..2]));
|
||||
|
||||
// Simulate an edit in another editor
|
||||
_ = buffer.update(cx, |buffer, cx| {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.start_transaction_at(now, cx);
|
||||
buffer.edit([(0..1, "a")], None, cx);
|
||||
buffer.edit([(1..1, "b")], None, cx);
|
||||
|
@ -661,7 +661,7 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
|
|||
let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx);
|
||||
let mut editor = build_editor(buffer.clone(), cx);
|
||||
let handle = cx.view();
|
||||
editor.set_nav_history(Some(pane.read(cx).nav_history_for_item(&handle)));
|
||||
editor.set_nav_history(Some(pane.read(cx).nav_history_for_item(handle)));
|
||||
|
||||
fn pop_history(editor: &mut Editor, cx: &mut WindowContext) -> Option<NavigationEntry> {
|
||||
editor.nav_history.as_mut().unwrap().pop_backward(cx)
|
||||
|
@ -1088,7 +1088,7 @@ fn test_move_cursor(cx: &mut TestAppContext) {
|
|||
let buffer = cx.update(|cx| MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx));
|
||||
let view = cx.add_window(|cx| build_editor(buffer.clone(), cx));
|
||||
|
||||
_ = buffer.update(cx, |buffer, cx| {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
vec![
|
||||
(Point::new(1, 0)..Point::new(1, 0), "\t"),
|
||||
|
@ -1822,7 +1822,7 @@ async fn test_scroll_page_up_page_down(cx: &mut gpui::TestAppContext) {
|
|||
cx.simulate_window_resize(window, size(px(1000.), 4. * line_height + px(0.5)));
|
||||
|
||||
cx.set_state(
|
||||
&r#"ˇone
|
||||
r#"ˇone
|
||||
two
|
||||
three
|
||||
four
|
||||
|
@ -1886,7 +1886,7 @@ async fn test_autoscroll(cx: &mut gpui::TestAppContext) {
|
|||
cx.simulate_window_resize(window, size(px(1000.), 6. * line_height));
|
||||
|
||||
cx.set_state(
|
||||
&r#"ˇone
|
||||
r#"ˇone
|
||||
two
|
||||
three
|
||||
four
|
||||
|
@ -4650,17 +4650,8 @@ let foo = «2ˇ»;"#,
|
|||
async fn test_select_previous_multibuffer(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new_multibuffer(
|
||||
cx,
|
||||
[
|
||||
&indoc! {
|
||||
"aaa\n«bbb\nccc\n»ddd"
|
||||
},
|
||||
&indoc! {
|
||||
"aaa\n«bbb\nccc\n»ddd"
|
||||
},
|
||||
],
|
||||
);
|
||||
let mut cx =
|
||||
EditorTestContext::new_multibuffer(cx, ["aaa\n«bbb\nccc\n»ddd", "aaa\n«bbb\nccc\n»ddd"]);
|
||||
|
||||
cx.assert_editor_state(indoc! {"
|
||||
ˇbbb
|
||||
|
@ -4809,7 +4800,7 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
|
|||
let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
|
||||
|
||||
editor
|
||||
.condition::<crate::EditorEvent>(&cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
editor.update(cx, |view, cx| {
|
||||
|
@ -5021,7 +5012,7 @@ async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) {
|
|||
.condition::<crate::EditorEvent>(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
_ = editor.update(cx, |editor, cx| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(None, cx, |s| s.select_ranges([5..5, 8..8, 9..9]));
|
||||
editor.newline(&Newline, cx);
|
||||
assert_eq!(editor.text(cx), "fn a(\n \n) {\n \n}\n");
|
||||
|
@ -5702,7 +5693,7 @@ async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) {
|
|||
view.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.update(cx, |view, cx| {
|
||||
view.change_selections(None, cx, |s| {
|
||||
s.select_display_ranges([
|
||||
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1),
|
||||
|
@ -5852,7 +5843,7 @@ async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) {
|
|||
.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
_ = editor.update(cx, |editor, cx| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(None, cx, |s| {
|
||||
s.select_ranges([
|
||||
Point::new(0, 1)..Point::new(0, 1),
|
||||
|
@ -6042,7 +6033,7 @@ async fn test_auto_replace_emoji_shortcode(cx: &mut gpui::TestAppContext) {
|
|||
.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
_ = editor.update(cx, |editor, cx| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.set_auto_replace_emoji_shortcode(true);
|
||||
|
||||
editor.handle_input("Hello ", cx);
|
||||
|
@ -6111,7 +6102,7 @@ async fn test_snippets(cx: &mut gpui::TestAppContext) {
|
|||
let buffer = cx.update(|cx| MultiBuffer::build_simple(&text, cx));
|
||||
let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
|
||||
|
||||
_ = editor.update(cx, |editor, cx| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
let snippet = Snippet::parse("f(${1:one}, ${2:two}, ${1:three})$0").unwrap();
|
||||
|
||||
editor
|
||||
|
@ -6738,7 +6729,7 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
|
|||
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
|
||||
_ = editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx));
|
||||
editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx));
|
||||
|
||||
let format = editor
|
||||
.update(cx, |editor, cx| {
|
||||
|
@ -6766,7 +6757,7 @@ async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
|
|||
"one, two\nthree\n"
|
||||
);
|
||||
|
||||
_ = editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx));
|
||||
editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx));
|
||||
// Ensure we don't lock if formatting hangs.
|
||||
fake_server.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
|
||||
assert_eq!(
|
||||
|
@ -7765,7 +7756,7 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
|
|||
handle_resolve_completion_request(&mut cx, None).await;
|
||||
apply_additional_edits.await.unwrap();
|
||||
|
||||
_ = cx.update(|cx| {
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _>(|settings, cx| {
|
||||
settings.update_user_settings::<EditorSettings>(cx, |settings| {
|
||||
settings.show_completions_on_input = Some(false);
|
||||
|
@ -8371,7 +8362,7 @@ fn test_editing_disjoint_excerpts(cx: &mut TestAppContext) {
|
|||
});
|
||||
|
||||
let (view, cx) = cx.add_window_view(|cx| build_editor(multibuffer, cx));
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.update(cx, |view, cx| {
|
||||
assert_eq!(view.text(cx), "aaaa\nbbbb");
|
||||
view.change_selections(None, cx, |s| {
|
||||
s.select_ranges([
|
||||
|
@ -8441,7 +8432,7 @@ fn test_editing_overlapping_excerpts(cx: &mut TestAppContext) {
|
|||
});
|
||||
|
||||
let (view, cx) = cx.add_window_view(|cx| build_editor(multibuffer, cx));
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.update(cx, |view, cx| {
|
||||
let (expected_text, selection_ranges) = marked_text_ranges(
|
||||
indoc! {"
|
||||
aaaa
|
||||
|
@ -8545,7 +8536,7 @@ fn test_refresh_selections(cx: &mut TestAppContext) {
|
|||
);
|
||||
});
|
||||
|
||||
_ = multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.remove_excerpts([excerpt1_id.unwrap()], cx);
|
||||
});
|
||||
_ = editor.update(cx, |editor, cx| {
|
||||
|
@ -8612,7 +8603,7 @@ fn test_refresh_selections_while_selecting_with_mouse(cx: &mut TestAppContext) {
|
|||
editor
|
||||
});
|
||||
|
||||
_ = multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.remove_excerpts([excerpt1_id.unwrap()], cx);
|
||||
});
|
||||
_ = editor.update(cx, |editor, cx| {
|
||||
|
@ -8679,7 +8670,7 @@ async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) {
|
|||
view.condition::<crate::EditorEvent>(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
|
||||
.await;
|
||||
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.update(cx, |view, cx| {
|
||||
view.change_selections(None, cx, |s| {
|
||||
s.select_display_ranges([
|
||||
DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 3),
|
||||
|
@ -8864,7 +8855,7 @@ async fn test_following(cx: &mut gpui::TestAppContext) {
|
|||
_ = follower.update(cx, |follower, cx| {
|
||||
assert_eq!(follower.selections.ranges(cx), vec![1..1]);
|
||||
});
|
||||
assert_eq!(*is_still_following.borrow(), true);
|
||||
assert!(*is_still_following.borrow());
|
||||
assert_eq!(*follower_edit_event_count.borrow(), 0);
|
||||
|
||||
// Update the scroll position only
|
||||
|
@ -8884,7 +8875,7 @@ async fn test_following(cx: &mut gpui::TestAppContext) {
|
|||
.unwrap(),
|
||||
gpui::Point::new(1.5, 3.5)
|
||||
);
|
||||
assert_eq!(*is_still_following.borrow(), true);
|
||||
assert!(*is_still_following.borrow());
|
||||
assert_eq!(*follower_edit_event_count.borrow(), 0);
|
||||
|
||||
// Update the selections and scroll position. The follower's scroll position is updated
|
||||
|
@ -8905,7 +8896,7 @@ async fn test_following(cx: &mut gpui::TestAppContext) {
|
|||
assert_eq!(follower.scroll_position(cx), gpui::Point::new(1.5, 0.0));
|
||||
assert_eq!(follower.selections.ranges(cx), vec![0..0]);
|
||||
});
|
||||
assert_eq!(*is_still_following.borrow(), true);
|
||||
assert!(*is_still_following.borrow());
|
||||
|
||||
// Creating a pending selection that precedes another selection
|
||||
_ = leader.update(cx, |leader, cx| {
|
||||
|
@ -8922,7 +8913,7 @@ async fn test_following(cx: &mut gpui::TestAppContext) {
|
|||
_ = follower.update(cx, |follower, cx| {
|
||||
assert_eq!(follower.selections.ranges(cx), vec![0..0, 1..1]);
|
||||
});
|
||||
assert_eq!(*is_still_following.borrow(), true);
|
||||
assert!(*is_still_following.borrow());
|
||||
|
||||
// Extend the pending selection so that it surrounds another selection
|
||||
_ = leader.update(cx, |leader, cx| {
|
||||
|
@ -8950,7 +8941,7 @@ async fn test_following(cx: &mut gpui::TestAppContext) {
|
|||
cx,
|
||||
);
|
||||
});
|
||||
assert_eq!(*is_still_following.borrow(), false);
|
||||
assert!(!(*is_still_following.borrow()));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
@ -9011,7 +9002,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
|
|||
});
|
||||
|
||||
// Insert some excerpts.
|
||||
_ = leader.update(cx, |leader, cx| {
|
||||
leader.update(cx, |leader, cx| {
|
||||
leader.buffer.update(cx, |multibuffer, cx| {
|
||||
let excerpt_ids = multibuffer.push_excerpts(
|
||||
buffer_1.clone(),
|
||||
|
@ -9086,7 +9077,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut gpui::TestAppContext) {
|
|||
);
|
||||
|
||||
// Remove some excerpts.
|
||||
_ = leader.update(cx, |leader, cx| {
|
||||
leader.update(cx, |leader, cx| {
|
||||
leader.buffer.update(cx, |multibuffer, cx| {
|
||||
let excerpt_ids = multibuffer.excerpt_ids();
|
||||
multibuffer.remove_excerpts([excerpt_ids[1], excerpt_ids[2]], cx);
|
||||
|
@ -9129,8 +9120,8 @@ async fn go_to_prev_overlapping_diagnostic(
|
|||
}
|
||||
"});
|
||||
|
||||
_ = cx.update(|cx| {
|
||||
_ = project.update(cx, |project, cx| {
|
||||
cx.update(|cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
project
|
||||
.update_diagnostics(
|
||||
LanguageServerId(0),
|
||||
|
@ -9568,7 +9559,7 @@ async fn test_on_type_formatting_not_triggered(cx: &mut gpui::TestAppContext) {
|
|||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
_ = buffer.update(cx, |buffer, _| {
|
||||
buffer.update(cx, |buffer, _| {
|
||||
assert_eq!(
|
||||
buffer.text(),
|
||||
"fn main() { let a = {5}; }",
|
||||
|
@ -9971,7 +9962,7 @@ async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) {
|
|||
let buffer_text = "one\ntwo\nthree\n";
|
||||
let buffer = cx.new_model(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
|
||||
_ = editor.update(cx, |editor, cx| editor.set_text(buffer_text, cx));
|
||||
editor.update(cx, |editor, cx| editor.set_text(buffer_text, cx));
|
||||
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
|
@ -10913,7 +10904,7 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(1)..=DisplayRow(1), DisplayRow(7)..=DisplayRow(7), DisplayRow(9)..=DisplayRow(9)],
|
||||
|
@ -11058,7 +11049,7 @@ async fn test_toggled_diff_base_change(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(9)..=DisplayRow(10), DisplayRow(13)..=DisplayRow(14)],
|
||||
|
@ -11240,7 +11231,7 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![
|
||||
|
@ -11317,7 +11308,7 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(0)..=DisplayRow(0), DisplayRow(5)..=DisplayRow(5)],
|
||||
|
@ -11410,7 +11401,7 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![
|
||||
|
@ -11622,7 +11613,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
|
|||
multi_buffer_editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(all_hunks, expected_all_hunks);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
|
@ -11636,7 +11627,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
|
|||
multi_buffer_editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![
|
||||
|
@ -11655,7 +11646,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
|
|||
multi_buffer_editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(all_hunks, expected_all_hunks);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
|
@ -11668,7 +11659,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
|
|||
multi_buffer_editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![
|
||||
|
@ -11687,7 +11678,7 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
|
|||
multi_buffer_editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(all_hunks, expected_all_hunks);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
|
@ -11775,7 +11766,7 @@ async fn test_edits_around_toggled_additions(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
|
@ -11815,7 +11806,7 @@ async fn test_edits_around_toggled_additions(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
|
@ -11860,7 +11851,7 @@ async fn test_edits_around_toggled_additions(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
|
@ -11904,7 +11895,7 @@ async fn test_edits_around_toggled_additions(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
|
@ -11952,7 +11943,7 @@ async fn test_edits_around_toggled_additions(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
|
@ -11988,7 +11979,7 @@ async fn test_edits_around_toggled_additions(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
|
@ -12103,7 +12094,7 @@ async fn test_edits_around_toggled_deletions(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
|
@ -12139,7 +12130,7 @@ async fn test_edits_around_toggled_deletions(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
Vec::new(),
|
||||
|
@ -12178,7 +12169,7 @@ async fn test_edits_around_toggled_deletions(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
|
@ -12213,7 +12204,7 @@ async fn test_edits_around_toggled_deletions(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
|
@ -12315,7 +12306,7 @@ async fn test_edits_around_toggled_modifications(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(6)],
|
||||
|
@ -12358,7 +12349,7 @@ async fn test_edits_around_toggled_modifications(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(6)],
|
||||
|
@ -12404,7 +12395,7 @@ async fn test_edits_around_toggled_modifications(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(8)],
|
||||
|
@ -12448,7 +12439,7 @@ async fn test_edits_around_toggled_modifications(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(9)],
|
||||
|
@ -12492,7 +12483,7 @@ async fn test_edits_around_toggled_modifications(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(8)],
|
||||
|
@ -12533,7 +12524,7 @@ async fn test_edits_around_toggled_modifications(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
Vec::new(),
|
||||
|
@ -12636,7 +12627,7 @@ async fn test_multiple_expanded_hunks_merge(
|
|||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(&editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(6)],
|
||||
|
@ -12689,8 +12680,8 @@ async fn setup_indent_guides_editor(
|
|||
let buffer_id = cx.update_editor(|editor, cx| {
|
||||
editor.set_text(text, cx);
|
||||
let buffer_ids = editor.buffer().read(cx).excerpt_buffer_ids();
|
||||
let buffer_id = buffer_ids[0];
|
||||
buffer_id
|
||||
|
||||
buffer_ids[0]
|
||||
});
|
||||
|
||||
(buffer_id, cx)
|
||||
|
@ -13249,7 +13240,7 @@ fn test_crease_insertion_and_rendering(cx: &mut TestAppContext) {
|
|||
|
||||
let render_args = render_args.lock().take().unwrap();
|
||||
assert_eq!(render_args.row, MultiBufferRow(1));
|
||||
assert_eq!(render_args.folded, false);
|
||||
assert!(!render_args.folded);
|
||||
assert!(!snapshot.is_line_folded(MultiBufferRow(1)));
|
||||
|
||||
cx.update_window(*editor, |_, cx| (render_args.callback)(true, cx))
|
||||
|
@ -13616,7 +13607,7 @@ pub(crate) fn update_test_language_settings(
|
|||
cx: &mut TestAppContext,
|
||||
f: impl Fn(&mut AllLanguageSettingsContent),
|
||||
) {
|
||||
_ = cx.update(|cx| {
|
||||
cx.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, f);
|
||||
});
|
||||
|
@ -13627,7 +13618,7 @@ pub(crate) fn update_test_project_settings(
|
|||
cx: &mut TestAppContext,
|
||||
f: impl Fn(&mut ProjectSettings),
|
||||
) {
|
||||
_ = cx.update(|cx| {
|
||||
cx.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<ProjectSettings>(cx, f);
|
||||
});
|
||||
|
@ -13635,7 +13626,7 @@ pub(crate) fn update_test_project_settings(
|
|||
}
|
||||
|
||||
pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsContent)) {
|
||||
_ = cx.update(|cx| {
|
||||
cx.update(|cx| {
|
||||
assets::Assets.load_test_fonts(cx);
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
|
|
|
@ -241,7 +241,7 @@ impl EditorElement {
|
|||
if text.is_empty() {
|
||||
return;
|
||||
}
|
||||
editor.handle_input(&text, cx);
|
||||
editor.handle_input(text, cx);
|
||||
});
|
||||
register_action(view, cx, |editor, _: &HalfPageUp, cx| {
|
||||
editor.scroll_screen(&ScrollAmount::Page(-0.5), cx)
|
||||
|
@ -652,30 +652,11 @@ impl EditorElement {
|
|||
cx.stop_propagation();
|
||||
} else if end_selection && pending_nonempty_selections {
|
||||
cx.stop_propagation();
|
||||
} else if cfg!(target_os = "linux") && event.button == MouseButton::Middle {
|
||||
if !text_hitbox.is_hovered(cx) || editor.read_only(cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
if EditorSettings::get_global(cx).middle_click_paste {
|
||||
if let Some(text) = cx.read_from_primary().and_then(|item| item.text()) {
|
||||
let point_for_position =
|
||||
position_map.point_for_position(text_hitbox.bounds, event.position);
|
||||
let position = point_for_position.previous_valid;
|
||||
|
||||
editor.select(
|
||||
SelectPhase::Begin {
|
||||
position,
|
||||
add: false,
|
||||
click_count: 1,
|
||||
},
|
||||
cx,
|
||||
);
|
||||
editor.insert(&text, cx);
|
||||
}
|
||||
cx.stop_propagation()
|
||||
}
|
||||
} else if cfg!(target_os = "linux")
|
||||
&& event.button == MouseButton::Middle
|
||||
&& (!text_hitbox.is_hovered(cx) || editor.read_only(cx))
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1315,7 +1296,7 @@ impl EditorElement {
|
|||
let hitbox = match hunk {
|
||||
DisplayDiffHunk::Unfolded { .. } => {
|
||||
let hunk_bounds = Self::diff_hunk_bounds(
|
||||
&snapshot,
|
||||
snapshot,
|
||||
line_height,
|
||||
gutter_hitbox.bounds,
|
||||
&hunk,
|
||||
|
@ -1610,7 +1591,7 @@ impl EditorElement {
|
|||
.tasks
|
||||
.as_ref()
|
||||
.map(|tasks| tasks.position.to_display_point(snapshot).row())
|
||||
.or_else(|| *deployed_from_indicator)
|
||||
.or(*deployed_from_indicator)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
@ -2794,7 +2775,7 @@ impl EditorElement {
|
|||
let hover_popovers = self.editor.update(cx, |editor, cx| {
|
||||
editor
|
||||
.hover_state
|
||||
.render(&snapshot, visible_display_row_range.clone(), max_size, cx)
|
||||
.render(snapshot, visible_display_row_range.clone(), max_size, cx)
|
||||
});
|
||||
let Some((position, hover_popovers)) = hover_popovers else {
|
||||
return;
|
||||
|
@ -3199,7 +3180,7 @@ impl EditorElement {
|
|||
&layout.position_map.snapshot,
|
||||
line_height,
|
||||
layout.gutter_hitbox.bounds,
|
||||
&hunk,
|
||||
hunk,
|
||||
);
|
||||
Some((
|
||||
hunk_bounds,
|
||||
|
@ -3454,7 +3435,7 @@ impl EditorElement {
|
|||
let corner_radius = 0.15 * layout.position_map.line_height;
|
||||
|
||||
for (player_color, selections) in &layout.selections {
|
||||
for selection in selections.into_iter() {
|
||||
for selection in selections.iter() {
|
||||
self.paint_highlighted_range(
|
||||
selection.range.clone(),
|
||||
player_color.selection,
|
||||
|
@ -3772,20 +3753,18 @@ impl EditorElement {
|
|||
if is_symbol_occurrences {
|
||||
color.fade_out(0.5);
|
||||
}
|
||||
let marker_row_ranges =
|
||||
background_ranges.into_iter().map(|range| {
|
||||
let display_start = range
|
||||
.start
|
||||
.to_display_point(&snapshot.display_snapshot);
|
||||
let display_end = range
|
||||
.end
|
||||
.to_display_point(&snapshot.display_snapshot);
|
||||
ColoredRange {
|
||||
start: display_start.row(),
|
||||
end: display_end.row(),
|
||||
color,
|
||||
}
|
||||
});
|
||||
let marker_row_ranges = background_ranges.iter().map(|range| {
|
||||
let display_start = range
|
||||
.start
|
||||
.to_display_point(&snapshot.display_snapshot);
|
||||
let display_end =
|
||||
range.end.to_display_point(&snapshot.display_snapshot);
|
||||
ColoredRange {
|
||||
start: display_start.row(),
|
||||
end: display_end.row(),
|
||||
color,
|
||||
}
|
||||
});
|
||||
marker_quads.extend(
|
||||
scrollbar_layout
|
||||
.marker_quads_for_ranges(marker_row_ranges, Some(1)),
|
||||
|
@ -4608,7 +4587,7 @@ impl LineWithInvisibles {
|
|||
}
|
||||
|
||||
self.draw_invisibles(
|
||||
&selection_ranges,
|
||||
selection_ranges,
|
||||
layout,
|
||||
content_origin,
|
||||
line_y,
|
||||
|
@ -4661,7 +4640,7 @@ impl LineWithInvisibles {
|
|||
|
||||
let invisible_iter = self.invisibles.iter().map(extract_whitespace_info);
|
||||
match whitespace_setting {
|
||||
ShowWhitespaceSetting::None => return,
|
||||
ShowWhitespaceSetting::None => (),
|
||||
ShowWhitespaceSetting::All => invisible_iter.for_each(|(_, paint)| paint(cx)),
|
||||
ShowWhitespaceSetting::Selection => invisible_iter.for_each(|([start, _], paint)| {
|
||||
let invisible_point = DisplayPoint::new(row, start as u32);
|
||||
|
@ -4717,7 +4696,7 @@ impl LineWithInvisibles {
|
|||
last_seen = Some((should_render, end, paint));
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn x_for_index(&self, index: usize) -> Pixels {
|
||||
|
@ -5452,7 +5431,7 @@ impl Element for EditorElement {
|
|||
|
||||
let show_code_actions = snapshot
|
||||
.show_code_actions
|
||||
.unwrap_or_else(|| gutter_settings.code_actions);
|
||||
.unwrap_or(gutter_settings.code_actions);
|
||||
if show_code_actions {
|
||||
let newest_selection_point =
|
||||
newest_selection_head.to_point(&snapshot.display_snapshot);
|
||||
|
@ -6273,6 +6252,73 @@ fn scale_horizontal_mouse_autoscroll_delta(delta: Pixels) -> f32 {
|
|||
(delta.pow(1.2) / 300.0).into()
|
||||
}
|
||||
|
||||
pub fn register_action<T: Action>(
|
||||
view: &View<Editor>,
|
||||
cx: &mut WindowContext,
|
||||
listener: impl Fn(&mut Editor, &T, &mut ViewContext<Editor>) + 'static,
|
||||
) {
|
||||
let view = view.clone();
|
||||
cx.on_action(TypeId::of::<T>(), move |action, phase, cx| {
|
||||
let action = action.downcast_ref().unwrap();
|
||||
if phase == DispatchPhase::Bubble {
|
||||
view.update(cx, |editor, cx| {
|
||||
listener(editor, action, cx);
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn compute_auto_height_layout(
|
||||
editor: &mut Editor,
|
||||
max_lines: usize,
|
||||
max_line_number_width: Pixels,
|
||||
known_dimensions: Size<Option<Pixels>>,
|
||||
available_width: AvailableSpace,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) -> Option<Size<Pixels>> {
|
||||
let width = known_dimensions.width.or({
|
||||
if let AvailableSpace::Definite(available_width) = available_width {
|
||||
Some(available_width)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})?;
|
||||
if let Some(height) = known_dimensions.height {
|
||||
return Some(size(width, height));
|
||||
}
|
||||
|
||||
let style = editor.style.as_ref().unwrap();
|
||||
let font_id = cx.text_system().resolve_font(&style.text.font());
|
||||
let font_size = style.text.font_size.to_pixels(cx.rem_size());
|
||||
let line_height = style.text.line_height_in_pixels(cx.rem_size());
|
||||
let em_width = cx
|
||||
.text_system()
|
||||
.typographic_bounds(font_id, font_size, 'm')
|
||||
.unwrap()
|
||||
.size
|
||||
.width;
|
||||
|
||||
let mut snapshot = editor.snapshot(cx);
|
||||
let gutter_dimensions =
|
||||
snapshot.gutter_dimensions(font_id, font_size, em_width, max_line_number_width, cx);
|
||||
|
||||
editor.gutter_dimensions = gutter_dimensions;
|
||||
let text_width = width - gutter_dimensions.width;
|
||||
let overscroll = size(em_width, px(0.));
|
||||
|
||||
let editor_width = text_width - gutter_dimensions.margin - overscroll.width - em_width;
|
||||
if editor.set_wrap_width(Some(editor_width), cx) {
|
||||
snapshot = editor.snapshot(cx);
|
||||
}
|
||||
|
||||
let scroll_height = Pixels::from(snapshot.max_point().row().next_row().0) * line_height;
|
||||
let height = scroll_height
|
||||
.max(line_height)
|
||||
.min(line_height * max_lines as f32);
|
||||
|
||||
Some(size(width, height))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -6605,7 +6651,7 @@ mod tests {
|
|||
});
|
||||
|
||||
let actual_invisibles =
|
||||
collect_invisibles_from_new_editor(cx, EditorMode::Full, &input_text, px(500.0));
|
||||
collect_invisibles_from_new_editor(cx, EditorMode::Full, input_text, px(500.0));
|
||||
|
||||
assert_eq!(expected_invisibles, actual_invisibles);
|
||||
}
|
||||
|
@ -6730,7 +6776,7 @@ mod tests {
|
|||
editor_width.0
|
||||
);
|
||||
let window = cx.add_window(|cx| {
|
||||
let buffer = MultiBuffer::build_simple(&input_text, cx);
|
||||
let buffer = MultiBuffer::build_simple(input_text, cx);
|
||||
Editor::new(editor_mode, buffer, None, true, cx)
|
||||
});
|
||||
let cx = &mut VisualTestContext::from_window(*window, cx);
|
||||
|
@ -6754,70 +6800,3 @@ mod tests {
|
|||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register_action<T: Action>(
|
||||
view: &View<Editor>,
|
||||
cx: &mut WindowContext,
|
||||
listener: impl Fn(&mut Editor, &T, &mut ViewContext<Editor>) + 'static,
|
||||
) {
|
||||
let view = view.clone();
|
||||
cx.on_action(TypeId::of::<T>(), move |action, phase, cx| {
|
||||
let action = action.downcast_ref().unwrap();
|
||||
if phase == DispatchPhase::Bubble {
|
||||
view.update(cx, |editor, cx| {
|
||||
listener(editor, action, cx);
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn compute_auto_height_layout(
|
||||
editor: &mut Editor,
|
||||
max_lines: usize,
|
||||
max_line_number_width: Pixels,
|
||||
known_dimensions: Size<Option<Pixels>>,
|
||||
available_width: AvailableSpace,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) -> Option<Size<Pixels>> {
|
||||
let width = known_dimensions.width.or_else(|| {
|
||||
if let AvailableSpace::Definite(available_width) = available_width {
|
||||
Some(available_width)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})?;
|
||||
if let Some(height) = known_dimensions.height {
|
||||
return Some(size(width, height));
|
||||
}
|
||||
|
||||
let style = editor.style.as_ref().unwrap();
|
||||
let font_id = cx.text_system().resolve_font(&style.text.font());
|
||||
let font_size = style.text.font_size.to_pixels(cx.rem_size());
|
||||
let line_height = style.text.line_height_in_pixels(cx.rem_size());
|
||||
let em_width = cx
|
||||
.text_system()
|
||||
.typographic_bounds(font_id, font_size, 'm')
|
||||
.unwrap()
|
||||
.size
|
||||
.width;
|
||||
|
||||
let mut snapshot = editor.snapshot(cx);
|
||||
let gutter_dimensions =
|
||||
snapshot.gutter_dimensions(font_id, font_size, em_width, max_line_number_width, cx);
|
||||
|
||||
editor.gutter_dimensions = gutter_dimensions;
|
||||
let text_width = width - gutter_dimensions.width;
|
||||
let overscroll = size(em_width, px(0.));
|
||||
|
||||
let editor_width = text_width - gutter_dimensions.margin - overscroll.width - em_width;
|
||||
if editor.set_wrap_width(Some(editor_width), cx) {
|
||||
snapshot = editor.snapshot(cx);
|
||||
}
|
||||
|
||||
let scroll_height = Pixels::from(snapshot.max_point().row().next_row().0) * line_height;
|
||||
let height = scroll_height
|
||||
.max(line_height)
|
||||
.min(line_height * max_lines as f32);
|
||||
|
||||
Some(size(width, height))
|
||||
}
|
||||
|
|
|
@ -455,7 +455,7 @@ async fn parse_commit_messages(
|
|||
.and_then(|remote_url| parse_git_remote_url(provider_registry, remote_url));
|
||||
|
||||
for (oid, message) in messages {
|
||||
let parsed_message = parse_markdown(&message, &languages).await;
|
||||
let parsed_message = parse_markdown(&message, languages).await;
|
||||
|
||||
let permalink = if let Some((provider, git_remote)) = parsed_remote_url.as_ref() {
|
||||
Some(provider.build_commit_permalink(
|
||||
|
|
|
@ -134,7 +134,7 @@ impl Editor {
|
|||
}
|
||||
None => {
|
||||
update_inlay_link_and_hover_points(
|
||||
&snapshot,
|
||||
snapshot,
|
||||
point_for_position,
|
||||
self,
|
||||
modifiers.secondary(),
|
||||
|
@ -490,12 +490,12 @@ pub fn show_link_definition(
|
|||
.is_some_and(|d| matches!(d, HoverLink::Url(_)));
|
||||
|
||||
if same_kind {
|
||||
if is_cached && (&hovered_link_state.last_trigger_point == &trigger_point)
|
||||
if is_cached && (hovered_link_state.last_trigger_point == trigger_point)
|
||||
|| hovered_link_state
|
||||
.symbol_range
|
||||
.as_ref()
|
||||
.is_some_and(|symbol_range| {
|
||||
symbol_range.point_within_range(&trigger_point, &snapshot)
|
||||
symbol_range.point_within_range(&trigger_point, snapshot)
|
||||
})
|
||||
{
|
||||
editor.hovered_link_state = Some(hovered_link_state);
|
||||
|
@ -596,7 +596,7 @@ pub fn show_link_definition(
|
|||
if let Some((symbol_range, definitions)) = result {
|
||||
hovered_link_state.links = definitions;
|
||||
|
||||
let underline_hovered_link = hovered_link_state.links.len() > 0
|
||||
let underline_hovered_link = !hovered_link_state.links.is_empty()
|
||||
|| hovered_link_state.symbol_range.is_some();
|
||||
|
||||
if underline_hovered_link {
|
||||
|
@ -718,7 +718,7 @@ pub(crate) async fn find_file(
|
|||
|
||||
let existing_path = project
|
||||
.update(cx, |project, cx| {
|
||||
project.resolve_existing_file_path(&candidate_file_path, &buffer, cx)
|
||||
project.resolve_existing_file_path(&candidate_file_path, buffer, cx)
|
||||
})
|
||||
.ok()?
|
||||
.await?;
|
||||
|
@ -1258,7 +1258,7 @@ mod tests {
|
|||
let variable« »= TestStruct;
|
||||
}
|
||||
"})
|
||||
.get(0)
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap();
|
||||
let midpoint = cx.update_editor(|editor, cx| {
|
||||
|
|
|
@ -74,7 +74,7 @@ pub fn show_keyboard_hover(editor: &mut Editor, cx: &mut ViewContext<Editor>) ->
|
|||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
false
|
||||
}
|
||||
|
||||
pub struct InlayHover {
|
||||
|
@ -648,7 +648,7 @@ impl HoverState {
|
|||
}
|
||||
}
|
||||
}
|
||||
return hover_popover_is_focused;
|
||||
hover_popover_is_focused
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1445,7 +1445,7 @@ mod tests {
|
|||
let variable« »= TestNewType(TestStruct);
|
||||
}
|
||||
"})
|
||||
.get(0)
|
||||
.first()
|
||||
.cloned()
|
||||
.unwrap();
|
||||
let new_type_hint_part_hover_position = cx.update_editor(|editor, cx| {
|
||||
|
|
|
@ -337,8 +337,8 @@ impl Editor {
|
|||
.offset_to_point(hunk.diff_base_byte_range.start)
|
||||
.row;
|
||||
let diff_end_row = diff_base.offset_to_point(hunk.diff_base_byte_range.end).row;
|
||||
let line_count = diff_end_row - diff_start_row;
|
||||
line_count
|
||||
|
||||
diff_end_row - diff_start_row
|
||||
})?;
|
||||
Some((diff_base_buffer, deleted_text_lines))
|
||||
} else {
|
||||
|
@ -358,7 +358,7 @@ impl Editor {
|
|||
|
||||
let block = match hunk.status {
|
||||
DiffHunkStatus::Removed => {
|
||||
self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, &hunk, cx)
|
||||
self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, hunk, cx)
|
||||
}
|
||||
DiffHunkStatus::Added => {
|
||||
self.highlight_rows::<DiffRowHighlight>(
|
||||
|
@ -376,7 +376,7 @@ impl Editor {
|
|||
false,
|
||||
cx,
|
||||
);
|
||||
self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, &hunk, cx)
|
||||
self.insert_deleted_text_block(diff_base_buffer, deleted_text_lines, hunk, cx)
|
||||
}
|
||||
};
|
||||
self.expanded_hunks.hunks.insert(
|
||||
|
@ -591,7 +591,7 @@ impl Editor {
|
|||
.to_display_point(&snapshot)
|
||||
.row();
|
||||
while let Some(buffer_hunk) = recalculated_hunks.peek() {
|
||||
match diff_hunk_to_display(&buffer_hunk, &snapshot) {
|
||||
match diff_hunk_to_display(buffer_hunk, &snapshot) {
|
||||
DisplayDiffHunk::Folded { display_row } => {
|
||||
recalculated_hunks.next();
|
||||
if !expanded_hunk.folded
|
||||
|
@ -710,12 +710,12 @@ fn to_diff_hunk(
|
|||
.multi_buffer_range
|
||||
.start
|
||||
.buffer_id
|
||||
.or_else(|| hovered_hunk.multi_buffer_range.end.buffer_id)?;
|
||||
.or(hovered_hunk.multi_buffer_range.end.buffer_id)?;
|
||||
let buffer_range = hovered_hunk.multi_buffer_range.start.text_anchor
|
||||
..hovered_hunk.multi_buffer_range.end.text_anchor;
|
||||
let point_range = hovered_hunk
|
||||
.multi_buffer_range
|
||||
.to_point(&multi_buffer_snapshot);
|
||||
.to_point(multi_buffer_snapshot);
|
||||
Some(DiffHunk {
|
||||
associated_range: MultiBufferRow(point_range.start.row)
|
||||
..MultiBufferRow(point_range.end.row),
|
||||
|
|
|
@ -466,7 +466,7 @@ impl InlayHintCache {
|
|||
to_insert.push(Inlay::hint(
|
||||
cached_hint_id.id(),
|
||||
anchor,
|
||||
&cached_hint,
|
||||
cached_hint,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
@ -490,7 +490,7 @@ impl InlayHintCache {
|
|||
to_insert.push(Inlay::hint(
|
||||
cached_hint_id.id(),
|
||||
anchor,
|
||||
&maybe_missed_cached_hint,
|
||||
maybe_missed_cached_hint,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
@ -844,7 +844,7 @@ fn new_update_task(
|
|||
.get_mut(&query.excerpt_id)
|
||||
{
|
||||
let buffer_snapshot = excerpt_buffer.read(cx).snapshot();
|
||||
task_ranges.invalidate_range(&buffer_snapshot, &range);
|
||||
task_ranges.invalidate_range(&buffer_snapshot, range);
|
||||
}
|
||||
})
|
||||
.ok()
|
||||
|
@ -3424,7 +3424,7 @@ pub mod tests {
|
|||
|
||||
pub fn cached_hint_labels(editor: &Editor) -> Vec<String> {
|
||||
let mut labels = Vec::new();
|
||||
for (_, excerpt_hints) in &editor.inlay_hint_cache().hints {
|
||||
for excerpt_hints in editor.inlay_hint_cache().hints.values() {
|
||||
let excerpt_hints = excerpt_hints.read();
|
||||
for id in &excerpt_hints.ordered_hints {
|
||||
labels.push(excerpt_hints.hints_by_id[id].text());
|
||||
|
|
|
@ -356,7 +356,7 @@ async fn update_editor_from_message(
|
|||
.collect::<Vec<_>>();
|
||||
removed_excerpt_ids.sort_by({
|
||||
let multibuffer = multibuffer.read(cx);
|
||||
move |a, b| a.cmp(&b, &multibuffer)
|
||||
move |a, b| a.cmp(b, &multibuffer)
|
||||
});
|
||||
|
||||
let mut insertions = message.inserted_excerpts.into_iter().peekable();
|
||||
|
@ -604,7 +604,7 @@ impl Item for Editor {
|
|||
.and_then(|path| FileIcons::get_icon(path.path.as_ref(), cx))
|
||||
})
|
||||
.flatten()
|
||||
.map(|icon| Icon::from_path(icon))
|
||||
.map(Icon::from_path)
|
||||
}
|
||||
|
||||
fn tab_content(&self, params: TabContentParams, cx: &WindowContext) -> AnyElement {
|
||||
|
@ -631,7 +631,7 @@ impl Item for Editor {
|
|||
return None;
|
||||
}
|
||||
|
||||
Some(util::truncate_and_trailoff(&description, MAX_TAB_TITLE_LEN))
|
||||
Some(util::truncate_and_trailoff(description, MAX_TAB_TITLE_LEN))
|
||||
});
|
||||
|
||||
h_flex()
|
||||
|
@ -829,7 +829,7 @@ impl Item for Editor {
|
|||
let cursor = self.selections.newest_anchor().head();
|
||||
let multibuffer = &self.buffer().read(cx);
|
||||
let (buffer_id, symbols) =
|
||||
multibuffer.symbols_containing(cursor, Some(&variant.syntax()), cx)?;
|
||||
multibuffer.symbols_containing(cursor, Some(variant.syntax()), cx)?;
|
||||
let buffer = multibuffer.buffer(buffer_id)?;
|
||||
|
||||
let buffer = buffer.read(cx);
|
||||
|
@ -1154,7 +1154,7 @@ impl SearchableItem for Editor {
|
|||
self.background_highlights
|
||||
.get(&TypeId::of::<BufferSearchHighlights>())
|
||||
.map_or(Vec::new(), |(_color, ranges)| {
|
||||
ranges.iter().map(|range| range.clone()).collect()
|
||||
ranges.iter().cloned().collect()
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1254,7 +1254,7 @@ impl SearchableItem for Editor {
|
|||
self.unfold_ranges(matches.to_vec(), false, false, cx);
|
||||
let mut ranges = Vec::new();
|
||||
for m in matches {
|
||||
ranges.push(self.range_for_match(&m))
|
||||
ranges.push(self.range_for_match(m))
|
||||
}
|
||||
self.change_selections(None, cx, |s| s.select_ranges(ranges));
|
||||
}
|
||||
|
@ -1335,7 +1335,7 @@ impl SearchableItem for Editor {
|
|||
.cmp(¤t_index_position, &buffer)
|
||||
.is_gt()
|
||||
{
|
||||
count = count - 1
|
||||
count -= 1
|
||||
}
|
||||
|
||||
(current_index + count) % matches.len()
|
||||
|
@ -1346,7 +1346,7 @@ impl SearchableItem for Editor {
|
|||
.cmp(¤t_index_position, &buffer)
|
||||
.is_lt()
|
||||
{
|
||||
count = count - 1;
|
||||
count -= 1;
|
||||
}
|
||||
|
||||
if current_index >= count {
|
||||
|
@ -1368,7 +1368,7 @@ impl SearchableItem for Editor {
|
|||
.background_highlights
|
||||
.get(&TypeId::of::<SearchWithinRange>())
|
||||
.map_or(vec![], |(_color, ranges)| {
|
||||
ranges.iter().map(|range| range.clone()).collect::<Vec<_>>()
|
||||
ranges.iter().cloned().collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
cx.background_executor().spawn(async move {
|
||||
|
@ -1411,7 +1411,7 @@ impl SearchableItem for Editor {
|
|||
if !search_range.is_empty() {
|
||||
ranges.extend(
|
||||
query
|
||||
.search(&search_buffer, Some(search_range.clone()))
|
||||
.search(search_buffer, Some(search_range.clone()))
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|match_range| {
|
||||
|
|
|
@ -73,7 +73,7 @@ pub(super) fn refresh_linked_ranges(this: &mut Editor, cx: &mut ViewContext<Edit
|
|||
let snapshot = buffer.read(cx).snapshot();
|
||||
let buffer_id = buffer.read(cx).remote_id();
|
||||
|
||||
let linked_edits_task = project.linked_edit(&buffer, *start, cx);
|
||||
let linked_edits_task = project.linked_edit(buffer, *start, cx);
|
||||
let highlights = move || async move {
|
||||
let edits = linked_edits_task.await.log_err()?;
|
||||
// Find the range containing our current selection.
|
||||
|
@ -87,9 +87,7 @@ pub(super) fn refresh_linked_ranges(this: &mut Editor, cx: &mut ViewContext<Edit
|
|||
range.start.to_point(&snapshot) <= start_point
|
||||
&& range.end.to_point(&snapshot) >= end_point
|
||||
});
|
||||
if _current_selection_contains_range.is_none() {
|
||||
return None;
|
||||
}
|
||||
_current_selection_contains_range?;
|
||||
// Now link every range as each-others sibling.
|
||||
let mut siblings: HashMap<Range<text::Anchor>, Vec<_>> = Default::default();
|
||||
let mut insert_sorted_anchor =
|
||||
|
|
|
@ -23,7 +23,7 @@ where
|
|||
editor
|
||||
.selections
|
||||
.disjoint_anchors()
|
||||
.into_iter()
|
||||
.iter()
|
||||
.filter(|selection| selection.start == selection.end)
|
||||
.filter_map(|selection| Some((selection.start.buffer_id?, selection.start)))
|
||||
.filter_map(|(buffer_id, trigger_anchor)| {
|
||||
|
|
|
@ -113,7 +113,7 @@ fn display_ranges<'a>(
|
|||
.disjoint
|
||||
.iter()
|
||||
.chain(pending)
|
||||
.map(move |s| s.start.to_display_point(&display_map)..s.end.to_display_point(&display_map))
|
||||
.map(move |s| s.start.to_display_point(display_map)..s.end.to_display_point(display_map))
|
||||
}
|
||||
|
||||
pub fn deploy_context_menu(
|
||||
|
|
|
@ -416,7 +416,7 @@ pub fn find_preceding_boundary_point(
|
|||
mut is_boundary: impl FnMut(char, char) -> bool,
|
||||
) -> Point {
|
||||
let mut prev_ch = None;
|
||||
let mut offset = from.to_offset(&buffer_snapshot);
|
||||
let mut offset = from.to_offset(buffer_snapshot);
|
||||
|
||||
for ch in buffer_snapshot.reversed_chars_at(offset) {
|
||||
if find_range == FindRange::SingleLine && ch == '\n' {
|
||||
|
@ -432,7 +432,7 @@ pub fn find_preceding_boundary_point(
|
|||
prev_ch = Some(ch);
|
||||
}
|
||||
|
||||
offset.to_point(&buffer_snapshot)
|
||||
offset.to_point(buffer_snapshot)
|
||||
}
|
||||
|
||||
/// Scans for a boundary preceding the given start point `from` until a boundary is found,
|
||||
|
@ -466,7 +466,7 @@ pub fn find_boundary_point(
|
|||
mut is_boundary: impl FnMut(char, char) -> bool,
|
||||
return_point_before_boundary: bool,
|
||||
) -> DisplayPoint {
|
||||
let mut offset = from.to_offset(&map, Bias::Right);
|
||||
let mut offset = from.to_offset(map, Bias::Right);
|
||||
let mut prev_offset = offset;
|
||||
let mut prev_ch = None;
|
||||
|
||||
|
@ -496,7 +496,7 @@ pub fn find_boundary(
|
|||
find_range: FindRange,
|
||||
is_boundary: impl FnMut(char, char) -> bool,
|
||||
) -> DisplayPoint {
|
||||
return find_boundary_point(map, from, find_range, is_boundary, false);
|
||||
find_boundary_point(map, from, find_range, is_boundary, false)
|
||||
}
|
||||
|
||||
pub fn find_boundary_exclusive(
|
||||
|
@ -505,7 +505,7 @@ pub fn find_boundary_exclusive(
|
|||
find_range: FindRange,
|
||||
is_boundary: impl FnMut(char, char) -> bool,
|
||||
) -> DisplayPoint {
|
||||
return find_boundary_point(map, from, find_range, is_boundary, true);
|
||||
find_boundary_point(map, from, find_range, is_boundary, true)
|
||||
}
|
||||
|
||||
/// Returns an iterator over the characters following a given offset in the [`DisplaySnapshot`].
|
||||
|
@ -517,7 +517,7 @@ pub fn chars_after(
|
|||
) -> impl Iterator<Item = (char, Range<usize>)> + '_ {
|
||||
map.buffer_snapshot.chars_at(offset).map(move |ch| {
|
||||
let before = offset;
|
||||
offset = offset + ch.len_utf8();
|
||||
offset += ch.len_utf8();
|
||||
(ch, before..offset)
|
||||
})
|
||||
}
|
||||
|
@ -533,7 +533,7 @@ pub fn chars_before(
|
|||
.reversed_chars_at(offset)
|
||||
.map(move |ch| {
|
||||
let after = offset;
|
||||
offset = offset - ch.len_utf8();
|
||||
offset -= ch.len_utf8();
|
||||
(ch, offset..after)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -220,7 +220,6 @@ impl EditorDb {
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gpui;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_save_and_get_serialized_editor() {
|
||||
|
|
|
@ -19,7 +19,7 @@ fn is_rust_language(language: &Language) -> bool {
|
|||
pub fn apply_related_actions(editor: &View<Editor>, cx: &mut WindowContext) {
|
||||
if editor
|
||||
.update(cx, |e, cx| {
|
||||
find_specific_language_server_in_selection(e, cx, &is_rust_language, RUST_ANALYZER_NAME)
|
||||
find_specific_language_server_in_selection(e, cx, is_rust_language, RUST_ANALYZER_NAME)
|
||||
})
|
||||
.is_some()
|
||||
{
|
||||
|
@ -44,9 +44,9 @@ pub fn expand_macro_recursively(
|
|||
|
||||
let Some((trigger_anchor, rust_language, server_to_query, buffer)) =
|
||||
find_specific_language_server_in_selection(
|
||||
&editor,
|
||||
editor,
|
||||
cx,
|
||||
&is_rust_language,
|
||||
is_rust_language,
|
||||
RUST_ANALYZER_NAME,
|
||||
)
|
||||
else {
|
||||
|
|
|
@ -50,7 +50,7 @@ impl ScrollAnchor {
|
|||
scroll_position.y = 0.;
|
||||
} else {
|
||||
let scroll_top = self.anchor.to_display_point(snapshot).row().as_f32();
|
||||
scroll_position.y = scroll_top + scroll_position.y;
|
||||
scroll_position.y += scroll_top;
|
||||
}
|
||||
scroll_position
|
||||
}
|
||||
|
@ -224,7 +224,7 @@ impl ScrollManager {
|
|||
};
|
||||
|
||||
let scroll_top_buffer_point =
|
||||
DisplayPoint::new(DisplayRow(scroll_top as u32), 0).to_point(&map);
|
||||
DisplayPoint::new(DisplayRow(scroll_top as u32), 0).to_point(map);
|
||||
let top_anchor = map
|
||||
.buffer_snapshot
|
||||
.anchor_at(scroll_top_buffer_point, Bias::Right);
|
||||
|
@ -234,7 +234,7 @@ impl ScrollManager {
|
|||
anchor: top_anchor,
|
||||
offset: point(
|
||||
scroll_position.x.max(0.),
|
||||
scroll_top - top_anchor.to_display_point(&map).row().as_f32(),
|
||||
scroll_top - top_anchor.to_display_point(map).row().as_f32(),
|
||||
),
|
||||
},
|
||||
scroll_top_buffer_point.row,
|
||||
|
|
|
@ -339,7 +339,7 @@ impl SelectionsCollection {
|
|||
let is_empty = positions.start == positions.end;
|
||||
let line_len = display_map.line_len(row);
|
||||
|
||||
let line = display_map.layout_row(row, &text_layout_details);
|
||||
let line = display_map.layout_row(row, text_layout_details);
|
||||
|
||||
let start_col = line.closest_index_for_x(positions.start) as u32;
|
||||
if start_col < line_len || (is_empty && positions.start == line.width) {
|
||||
|
|
|
@ -13,7 +13,7 @@ pub use popover::SignatureHelpPopover;
|
|||
pub use state::SignatureHelpState;
|
||||
|
||||
// Language-specific settings may define quotes as "brackets", so filter them out separately.
|
||||
const QUOTE_PAIRS: [(&'static str, &'static str); 3] = [("'", "'"), ("\"", "\""), ("`", "`")];
|
||||
const QUOTE_PAIRS: [(&str, &str); 3] = [("'", "'"), ("\"", "\""), ("`", "`")];
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum SignatureHelpHiddenBy {
|
||||
|
@ -167,10 +167,10 @@ impl Editor {
|
|||
let language = editor.language_at(position, cx);
|
||||
let project = editor.project.clone()?;
|
||||
let (markdown, language_registry) = {
|
||||
project.update(cx, |project, mut cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
let language_registry = project.languages().clone();
|
||||
(
|
||||
project.signature_help(&buffer, buffer_position, &mut cx),
|
||||
project.signature_help(&buffer, buffer_position, cx),
|
||||
language_registry,
|
||||
)
|
||||
})
|
||||
|
|
|
@ -120,8 +120,7 @@ impl EditorTestContext {
|
|||
where
|
||||
F: FnOnce(&Editor, &ViewContext<Editor>) -> T,
|
||||
{
|
||||
self.editor
|
||||
.update(&mut self.cx, |this, cx| read(&this, &cx))
|
||||
self.editor.update(&mut self.cx, |this, cx| read(this, cx))
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
|
@ -327,8 +326,8 @@ impl EditorTestContext {
|
|||
.background_highlights
|
||||
.get(&TypeId::of::<Tag>())
|
||||
.map(|h| h.1.clone())
|
||||
.unwrap_or_else(|| Arc::default())
|
||||
.into_iter()
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|range| range.to_offset(&snapshot.buffer_snapshot))
|
||||
.collect()
|
||||
});
|
||||
|
@ -424,6 +423,12 @@ pub struct AssertionContextManager {
|
|||
contexts: Arc<RwLock<BTreeMap<usize, String>>>,
|
||||
}
|
||||
|
||||
impl Default for AssertionContextManager {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl AssertionContextManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
|
|
|
@ -77,7 +77,7 @@ impl ExtensionBuilder {
|
|||
extension_manifest: &mut ExtensionManifest,
|
||||
options: CompileExtensionOptions,
|
||||
) -> Result<()> {
|
||||
populate_defaults(extension_manifest, &extension_dir)?;
|
||||
populate_defaults(extension_manifest, extension_dir)?;
|
||||
|
||||
if extension_dir.is_relative() {
|
||||
bail!(
|
||||
|
@ -123,7 +123,7 @@ impl ExtensionBuilder {
|
|||
self.install_rust_wasm_target_if_needed()?;
|
||||
let adapter_bytes = self.install_wasi_preview1_adapter_if_needed().await?;
|
||||
|
||||
let cargo_toml_content = fs::read_to_string(&extension_dir.join("Cargo.toml"))?;
|
||||
let cargo_toml_content = fs::read_to_string(extension_dir.join("Cargo.toml"))?;
|
||||
let cargo_toml: CargoToml = toml::from_str(&cargo_toml_content)?;
|
||||
|
||||
log::info!(
|
||||
|
@ -135,7 +135,7 @@ impl ExtensionBuilder {
|
|||
.args(options.release.then_some("--release"))
|
||||
.arg("--target-dir")
|
||||
.arg(extension_dir.join("target"))
|
||||
.current_dir(&extension_dir)
|
||||
.current_dir(extension_dir)
|
||||
.output()
|
||||
.context("failed to run `cargo`")?;
|
||||
if !output.status.success() {
|
||||
|
@ -281,12 +281,12 @@ impl ExtensionBuilder {
|
|||
);
|
||||
}
|
||||
} else {
|
||||
fs::create_dir_all(&directory).with_context(|| {
|
||||
fs::create_dir_all(directory).with_context(|| {
|
||||
format!("failed to create grammar directory {}", directory.display(),)
|
||||
})?;
|
||||
let init_output = Command::new("git")
|
||||
.arg("init")
|
||||
.current_dir(&directory)
|
||||
.current_dir(directory)
|
||||
.output()?;
|
||||
if !init_output.status.success() {
|
||||
bail!(
|
||||
|
@ -312,15 +312,15 @@ impl ExtensionBuilder {
|
|||
let fetch_output = Command::new("git")
|
||||
.arg("--git-dir")
|
||||
.arg(&git_dir)
|
||||
.args(["fetch", "--depth", "1", "origin", &rev])
|
||||
.args(["fetch", "--depth", "1", "origin", rev])
|
||||
.output()
|
||||
.context("failed to execute `git fetch`")?;
|
||||
|
||||
let checkout_output = Command::new("git")
|
||||
.arg("--git-dir")
|
||||
.arg(&git_dir)
|
||||
.args(["checkout", &rev])
|
||||
.current_dir(&directory)
|
||||
.args(["checkout", rev])
|
||||
.current_dir(directory)
|
||||
.output()
|
||||
.context("failed to execute `git checkout`")?;
|
||||
if !checkout_output.status.success() {
|
||||
|
@ -488,14 +488,10 @@ impl ExtensionBuilder {
|
|||
_ => {}
|
||||
}
|
||||
|
||||
match &payload {
|
||||
CustomSection(c) => {
|
||||
if strip_custom_section(c.name()) {
|
||||
continue;
|
||||
}
|
||||
if let CustomSection(c) = &payload {
|
||||
if strip_custom_section(c.name()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if let Some((id, range)) = payload.as_section() {
|
||||
|
|
|
@ -243,7 +243,7 @@ impl LspAdapter for ExtensionLspAdapter {
|
|||
language: &Arc<Language>,
|
||||
) -> Result<Vec<Option<CodeLabel>>> {
|
||||
let completions = completions
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|completion| wit::Completion::from(completion.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
|
@ -276,7 +276,7 @@ impl LspAdapter for ExtensionLspAdapter {
|
|||
language: &Arc<Language>,
|
||||
) -> Result<Vec<Option<CodeLabel>>> {
|
||||
let symbols = symbols
|
||||
.into_iter()
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|(name, kind)| wit::Symbol {
|
||||
name,
|
||||
|
@ -317,7 +317,7 @@ fn labels_from_wit(
|
|||
} else {
|
||||
language.highlight_text(&label.code.as_str().into(), 0..label.code.len())
|
||||
};
|
||||
build_code_label(&label, &runs, &language)
|
||||
build_code_label(&label, &runs, language)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
@ -364,7 +364,7 @@ fn build_code_label(
|
|||
.grammar()
|
||||
.zip(span.highlight_name.as_ref())
|
||||
.and_then(|(grammar, highlight_name)| {
|
||||
grammar.highlight_id_for_name(&highlight_name)
|
||||
grammar.highlight_id_for_name(highlight_name)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let ix = text.len();
|
||||
|
|
|
@ -290,7 +290,7 @@ impl ExtensionStore {
|
|||
// it must be asynchronously rebuilt.
|
||||
let mut extension_index = ExtensionIndex::default();
|
||||
let mut extension_index_needs_rebuild = true;
|
||||
if let Some(index_content) = index_content.ok() {
|
||||
if let Ok(index_content) = index_content {
|
||||
if let Some(index) = serde_json::from_str(&index_content).log_err() {
|
||||
extension_index = index;
|
||||
if let (Ok(Some(index_metadata)), Ok(Some(extensions_metadata))) =
|
||||
|
@ -582,11 +582,11 @@ impl ExtensionStore {
|
|||
query: &[(&str, &str)],
|
||||
cx: &mut ModelContext<'_, ExtensionStore>,
|
||||
) -> Task<Result<Vec<ExtensionMetadata>>> {
|
||||
let url = self.http_client.build_zed_api_url(path, &query);
|
||||
let url = self.http_client.build_zed_api_url(path, query);
|
||||
let http_client = self.http_client.clone();
|
||||
cx.spawn(move |_, _| async move {
|
||||
let mut response = http_client
|
||||
.get(&url?.as_ref(), AsyncBody::empty(), true)
|
||||
.get(url?.as_ref(), AsyncBody::empty(), true)
|
||||
.await?;
|
||||
|
||||
let mut body = Vec::new();
|
||||
|
@ -651,7 +651,7 @@ impl ExtensionStore {
|
|||
});
|
||||
|
||||
let mut response = http_client
|
||||
.get(&url.as_ref(), Default::default(), true)
|
||||
.get(url.as_ref(), Default::default(), true)
|
||||
.await
|
||||
.map_err(|err| anyhow!("error downloading extension: {}", err))?;
|
||||
|
||||
|
@ -687,14 +687,11 @@ impl ExtensionStore {
|
|||
})?
|
||||
.await;
|
||||
|
||||
match operation {
|
||||
ExtensionOperation::Install => {
|
||||
this.update(&mut cx, |_, cx| {
|
||||
cx.emit(Event::ExtensionInstalled(extension_id));
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
_ => {}
|
||||
if let ExtensionOperation::Install = operation {
|
||||
this.update(&mut cx, |_, cx| {
|
||||
cx.emit(Event::ExtensionInstalled(extension_id));
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
|
@ -873,10 +870,10 @@ impl ExtensionStore {
|
|||
.await?;
|
||||
|
||||
let output_path = &extensions_dir.join(extension_id.as_ref());
|
||||
if let Some(metadata) = fs.metadata(&output_path).await? {
|
||||
if let Some(metadata) = fs.metadata(output_path).await? {
|
||||
if metadata.is_symlink {
|
||||
fs.remove_file(
|
||||
&output_path,
|
||||
output_path,
|
||||
RemoveOptions {
|
||||
recursive: false,
|
||||
ignore_if_not_exists: true,
|
||||
|
@ -967,7 +964,7 @@ impl ExtensionStore {
|
|||
(Some(_), None) => {
|
||||
extensions_to_unload.push(old_keys.next().unwrap().0.clone());
|
||||
}
|
||||
(Some((old_key, _)), Some((new_key, _))) => match old_key.cmp(&new_key) {
|
||||
(Some((old_key, _)), Some((new_key, _))) => match old_key.cmp(new_key) {
|
||||
Ordering::Equal => {
|
||||
let (old_key, old_value) = old_keys.next().unwrap();
|
||||
let (new_key, new_value) = new_keys.next().unwrap();
|
||||
|
@ -1140,7 +1137,7 @@ impl ExtensionStore {
|
|||
async move {
|
||||
for theme_path in &themes_to_add {
|
||||
theme_registry
|
||||
.load_user_theme(&theme_path, fs.clone())
|
||||
.load_user_theme(theme_path, fs.clone())
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
|
|
|
@ -587,11 +587,8 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
|||
let executor = cx.executor();
|
||||
let _task = cx.executor().spawn(async move {
|
||||
while let Some(event) = events.next().await {
|
||||
match event {
|
||||
crate::Event::StartedReloading => {
|
||||
executor.advance_clock(RELOAD_DEBOUNCE_DURATION);
|
||||
}
|
||||
_ => (),
|
||||
if let crate::Event::StartedReloading = event {
|
||||
executor.advance_clock(RELOAD_DEBOUNCE_DURATION);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue