Merge pull request #15 from zed-industries/rescan

Update Worktrees when the file system changes
This commit is contained in:
Nathan Sobo 2021-04-26 20:35:01 -06:00 committed by GitHub
commit d31f2defa5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
35 changed files with 3954 additions and 1975 deletions

8
.vscode/launch.json vendored
View file

@ -7,15 +7,15 @@
{
"type": "lldb",
"request": "launch",
"name": "Debug executable 'zed'",
"name": "Debug executable 'Zed'",
"cargo": {
"args": [
"build",
"--bin=zed",
"--bin=Zed",
"--package=zed"
],
"filter": {
"name": "zed",
"name": "Zed",
"kind": "bin"
}
},
@ -63,4 +63,4 @@
"cwd": "${workspaceFolder}"
}
]
}
}

403
Cargo.lock generated
View file

@ -84,22 +84,6 @@ dependencies = [
"futures-lite",
]
[[package]]
name = "async-global-executor"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9586ec52317f36de58453159d48351bc244bc24ced3effc1fce22f3d48664af6"
dependencies = [
"async-channel",
"async-executor",
"async-io",
"async-mutex",
"blocking",
"futures-lite",
"num_cpus",
"once_cell",
]
[[package]]
name = "async-io"
version = "1.3.1"
@ -129,15 +113,6 @@ dependencies = [
"event-listener",
]
[[package]]
name = "async-mutex"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "479db852db25d9dbf6204e6cb6253698f175c15726470f78af0d918e99d6156e"
dependencies = [
"event-listener",
]
[[package]]
name = "async-net"
version = "1.5.0"
@ -166,39 +141,20 @@ dependencies = [
"winapi",
]
[[package]]
name = "async-std"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9f06685bad74e0570f5213741bea82158279a4103d988e57bfada11ad230341"
dependencies = [
"async-channel",
"async-global-executor",
"async-io",
"async-lock",
"async-process",
"crossbeam-utils 0.8.2",
"futures-channel",
"futures-core",
"futures-io",
"futures-lite",
"gloo-timers",
"kv-log-macro",
"log",
"memchr",
"num_cpus",
"once_cell",
"pin-project-lite",
"pin-utils",
"slab",
"wasm-bindgen-futures",
]
[[package]]
name = "async-task"
version = "4.0.3"
source = "git+https://github.com/zed-industries/async-task?rev=341b57d6de98cdfd7b418567b8de2022ca993a6e#341b57d6de98cdfd7b418567b8de2022ca993a6e"
[[package]]
name = "atomic"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3410529e8288c463bedb5930f82833bc0c90e5d2fe639a56582a4d09220b281"
dependencies = [
"autocfg",
]
[[package]]
name = "atomic-waker"
version = "1.0.0"
@ -297,12 +253,6 @@ dependencies = [
"memchr",
]
[[package]]
name = "bumpalo"
version = "3.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63396b8a4b9de3f4fdfb320ab6080762242f66a8ef174c49d8e19b674db4cdbe"
[[package]]
name = "bytemuck"
version = "1.5.1"
@ -498,22 +448,6 @@ dependencies = [
"cfg-if 1.0.0",
]
[[package]]
name = "crossbeam"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd66663db5a988098a89599d4857919b3acf7f61402e61365acfd3919857b9be"
[[package]]
name = "crossbeam-channel"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b153fe7cbef478c567df0f972e02e6d736db11affe43dfc9c56a9374d1adfb87"
dependencies = [
"crossbeam-utils 0.7.2",
"maybe-uninit",
]
[[package]]
name = "crossbeam-channel"
version = "0.5.0"
@ -521,18 +455,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775"
dependencies = [
"cfg-if 1.0.0",
"crossbeam-utils 0.8.2",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-utils"
version = "0.7.2"
name = "crossbeam-queue"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8"
checksum = "0f6cb3c7f5b8e51bc3ebb73a2327ad4abdbd119dc13223f14f961d2f38486756"
dependencies = [
"autocfg",
"cfg-if 0.1.10",
"lazy_static",
"cfg-if 1.0.0",
"crossbeam-utils",
]
[[package]]
@ -549,9 +482,9 @@ dependencies = [
[[package]]
name = "ctor"
version = "0.1.19"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8f45d9ad417bcef4817d614a501ab55cdd96a6fdb24f49aab89a54acfd66b19"
checksum = "5e98e2ad1a782e33928b96fc3948e7c355e5af34ba4de7670fe8bac2a3b2006d"
dependencies = [
"quote",
"syn",
@ -793,12 +726,45 @@ dependencies = [
"pkg-config",
]
[[package]]
name = "fsevent"
version = "2.0.2"
dependencies = [
"bitflags",
"fsevent-sys",
"parking_lot",
"tempdir",
]
[[package]]
name = "fsevent-sys"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a29c77f1ca394c3e73a9a5d24cfcabb734682d9634fc398f2204a63c994120"
dependencies = [
"libc",
]
[[package]]
name = "fuchsia-cprng"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
[[package]]
name = "futures"
version = "0.3.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da9052a1a50244d8d5aa9bf55cbc2fb6f357c86cc52e46c62ed390a7180cf150"
dependencies = [
"futures-channel",
"futures-core",
"futures-io",
"futures-sink",
"futures-task",
"futures-util",
]
[[package]]
name = "futures-channel"
version = "0.3.12"
@ -806,6 +772,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2d31b7ec7efab6eefc7c57233bb10b847986139d88cc2f5a02a1ae6871a1846"
dependencies = [
"futures-core",
"futures-sink",
]
[[package]]
@ -835,6 +802,31 @@ dependencies = [
"waker-fn",
]
[[package]]
name = "futures-sink"
version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c5629433c555de3d82861a7a4e3794a4c40040390907cfbfd7143a92a426c23"
[[package]]
name = "futures-task"
version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba7aa51095076f3ba6d9a1f702f74bd05ec65f555d70d2033d55ba8d69f581bc"
[[package]]
name = "futures-util"
version = "0.3.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "632a8cd0f2a4b3fdea1657f08bde063848c3bd00f9bbf6e256b8be78802e624b"
dependencies = [
"futures-core",
"futures-sink",
"futures-task",
"pin-project-lite",
"pin-utils",
]
[[package]]
name = "generator"
version = "0.6.23"
@ -878,8 +870,9 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
[[package]]
name = "globset"
version = "0.4.4"
source = "git+https://github.com/zed-industries/ripgrep?rev=1d152118f35b3e3590216709b86277062d79b8a0#1d152118f35b3e3590216709b86277062d79b8a0"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c152169ef1e421390738366d2f796655fec62621dabbd0fd476f905934061e4a"
dependencies = [
"aho-corasick",
"bstr",
@ -888,25 +881,11 @@ dependencies = [
"regex",
]
[[package]]
name = "gloo-timers"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47204a46aaff920a1ea58b11d03dec6f704287d27561724a4631e450654a891f"
dependencies = [
"futures-channel",
"futures-core",
"js-sys",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "gpui"
version = "0.1.0"
dependencies = [
"anyhow",
"async-std",
"async-task",
"bindgen",
"block",
@ -916,6 +895,7 @@ dependencies = [
"core-graphics",
"core-text",
"ctor",
"env_logger",
"etagere",
"font-kit",
"foreign-types",
@ -928,6 +908,7 @@ dependencies = [
"pathfinder_color",
"pathfinder_geometry",
"png",
"postage",
"rand 0.8.3",
"replace_with",
"resvg",
@ -966,11 +947,11 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "ignore"
version = "0.4.11"
source = "git+https://github.com/zed-industries/ripgrep?rev=1d152118f35b3e3590216709b86277062d79b8a0#1d152118f35b3e3590216709b86277062d79b8a0"
version = "0.4.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b287fb45c60bb826a0dc68ff08742b9d88a2fea13d6e0c286b3172065aaf878c"
dependencies = [
"crossbeam-channel 0.4.4",
"crossbeam-utils 0.7.2",
"crossbeam-utils",
"globset",
"lazy_static",
"log",
@ -1013,15 +994,6 @@ version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "229d53d58899083193af11e15917b5640cd40b29ff475a1fe4ef725deb02d0f2"
[[package]]
name = "js-sys"
version = "0.3.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d99f9e3e84b8f67f846ef5b4cbbc3b1c29f6c759fcbce6f01aa0e73d932a24c"
dependencies = [
"wasm-bindgen",
]
[[package]]
name = "kurbo"
version = "0.8.1"
@ -1031,15 +1003,6 @@ dependencies = [
"arrayvec",
]
[[package]]
name = "kv-log-macro"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f"
dependencies = [
"log",
]
[[package]]
name = "lazy_static"
version = "1.4.0"
@ -1074,7 +1037,7 @@ version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312"
dependencies = [
"scopeguard 1.1.0",
"scopeguard",
]
[[package]]
@ -1084,7 +1047,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710"
dependencies = [
"cfg-if 1.0.0",
"value-bag",
]
[[package]]
@ -1113,12 +1075,6 @@ version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
[[package]]
name = "maybe-uninit"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00"
[[package]]
name = "memchr"
version = "2.3.4"
@ -1330,6 +1286,26 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d70072c20945e1ab871c472a285fc772aefd4f5407723c206242f2c6f94595d6"
[[package]]
name = "pin-project"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc174859768806e91ae575187ada95c91a29e96a98dc5d2cd9a1fed039501ba6"
dependencies = [
"pin-project-internal",
]
[[package]]
name = "pin-project-internal"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a490329918e856ed1b083f244e3bfe2d8c4f336407e4ea9e1a9f479ff09049e5"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "pin-project-lite"
version = "0.2.4"
@ -1373,6 +1349,28 @@ dependencies = [
"winapi",
]
[[package]]
name = "pollster"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6cce106fd2646acbe31a0e4006f75779d535c26a44f153ada196e9edcfc6d944"
[[package]]
name = "postage"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a63d25391d04a097954b76aba742b6b5b74f213dfe3dbaeeb36e8ddc1c657f0b"
dependencies = [
"atomic",
"crossbeam-queue",
"futures",
"log",
"pin-project",
"pollster",
"static_assertions",
"thiserror",
]
[[package]]
name = "ppv-lite86"
version = "0.2.10"
@ -1592,7 +1590,7 @@ dependencies = [
"base64",
"blake2b_simd",
"constant_time_eq",
"crossbeam-utils 0.8.2",
"crossbeam-utils",
]
[[package]]
@ -1685,13 +1683,9 @@ dependencies = [
[[package]]
name = "scoped-pool"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "817a3a15e704545ce59ed2b5c60a5d32bda4d7869befb8b36667b658a6c00b43"
version = "0.0.1"
dependencies = [
"crossbeam",
"scopeguard 0.1.2",
"variance",
"crossbeam-channel",
]
[[package]]
@ -1700,12 +1694,6 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2"
[[package]]
name = "scopeguard"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59a076157c1e2dc561d8de585151ee6965d910dd4dcb5dabb7ae3e83981a6c57"
[[package]]
name = "scopeguard"
version = "1.1.0"
@ -1837,12 +1825,6 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac"
[[package]]
name = "slab"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
[[package]]
name = "smallvec"
version = "1.6.1"
@ -1878,6 +1860,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "strsim"
version = "0.8.0"
@ -1949,6 +1937,26 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "thread_local"
version = "1.1.3"
@ -2089,21 +2097,6 @@ dependencies = [
"xmlwriter",
]
[[package]]
name = "value-bag"
version = "1.0.0-alpha.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b676010e055c99033117c2343b33a40a30b91fecd6c49055ac9cd2d6c305ab1"
dependencies = [
"ctor",
]
[[package]]
name = "variance"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3abfc2be1fb59663871379ea884fd81de80c496f2274e021c01d6fe56cd77b05"
[[package]]
name = "vec-arena"
version = "1.0.0"
@ -2151,82 +2144,6 @@ version = "0.10.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
[[package]]
name = "wasm-bindgen"
version = "0.2.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83240549659d187488f91f33c0f8547cbfef0b2088bc470c116d1d260ef623d9"
dependencies = [
"cfg-if 1.0.0",
"wasm-bindgen-macro",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae70622411ca953215ca6d06d3ebeb1e915f0f6613e3b495122878d7ebec7dae"
dependencies = [
"bumpalo",
"lazy_static",
"log",
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81b8b767af23de6ac18bf2168b690bed2902743ddf0fb39252e36f9e2bfc63ea"
dependencies = [
"cfg-if 1.0.0",
"js-sys",
"wasm-bindgen",
"web-sys",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e734d91443f177bfdb41969de821e15c516931c3c3db3d318fa1b68975d0f6f"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
]
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d53739ff08c8a68b0fdbcd54c372b8ab800b1449ab3c9d706503bc7dd1621b2c"
dependencies = [
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9a543ae66aa233d14bb765ed9af4a33e81b8b58d1584cf1b47ff8cd0b9e4489"
[[package]]
name = "web-sys"
version = "0.3.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a905d57e488fec8861446d3393670fb50d27a262344013181c2cdf9fff5481be"
dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "wepoll-sys"
version = "3.0.1"
@ -2303,9 +2220,12 @@ version = "0.1.0"
dependencies = [
"anyhow",
"arrayvec",
"crossbeam-channel 0.5.0",
"crossbeam-channel",
"ctor",
"dirs",
"easy-parallel",
"env_logger",
"fsevent",
"futures-core",
"gpui",
"ignore",
@ -2314,6 +2234,7 @@ dependencies = [
"log",
"num_cpus",
"parking_lot",
"postage",
"rand 0.8.3",
"rust-embed",
"seahash",

View file

@ -1,5 +1,5 @@
[workspace]
members = ["zed", "gpui"]
members = ["zed", "gpui", "fsevent", "scoped_pool"]
[patch.crates-io]
async-task = {git = "https://github.com/zed-industries/async-task", rev = "341b57d6de98cdfd7b418567b8de2022ca993a6e"}
@ -9,3 +9,6 @@ cocoa = {git = "https://github.com/servo/core-foundation-rs", rev = "e9a65bb15d5
cocoa-foundation = {git = "https://github.com/servo/core-foundation-rs", rev = "e9a65bb15d591ec22649e03659db8095d4f2dd60"}
core-foundation = {git = "https://github.com/servo/core-foundation-rs", rev = "e9a65bb15d591ec22649e03659db8095d4f2dd60"}
core-graphics = {git = "https://github.com/servo/core-foundation-rs", rev = "e9a65bb15d591ec22649e03659db8095d4f2dd60"}
[profile.dev]
split-debuginfo = "unpacked"

16
fsevent/Cargo.toml Normal file
View file

@ -0,0 +1,16 @@
[package]
name = "fsevent"
version = "2.0.2"
license = "MIT"
edition = "2018"
[dependencies]
bitflags = "1"
fsevent-sys = "3.0.2"
parking_lot = "0.11.1"
[dev-dependencies]
tempdir = "0.3.7"
[package.metadata.docs.rs]
targets = ["x86_64-apple-darwin"]

View file

@ -0,0 +1,16 @@
use fsevent::EventStream;
use std::{env::args, path::Path, time::Duration};
fn main() {
let paths = args().skip(1).collect::<Vec<_>>();
let paths = paths.iter().map(Path::new).collect::<Vec<_>>();
assert!(paths.len() > 0, "Must pass 1 or more paths as arguments");
let (stream, _handle) = EventStream::new(&paths, Duration::from_millis(100));
stream.run(|events| {
eprintln!("event batch");
for event in events {
eprintln!(" {:?}", event);
}
true
});
}

354
fsevent/src/lib.rs Normal file
View file

@ -0,0 +1,354 @@
#![cfg(target_os = "macos")]
use bitflags::bitflags;
use fsevent_sys::{self as fs, core_foundation as cf};
use parking_lot::Mutex;
use std::{
convert::AsRef,
ffi::{c_void, CStr, OsStr},
os::unix::ffi::OsStrExt,
path::{Path, PathBuf},
slice,
sync::Arc,
time::Duration,
};
#[derive(Clone, Debug)]
pub struct Event {
pub event_id: u64,
pub flags: StreamFlags,
pub path: PathBuf,
}
pub struct EventStream {
stream: fs::FSEventStreamRef,
state: Arc<Mutex<Lifecycle>>,
callback: Box<Option<RunCallback>>,
}
type RunCallback = Box<dyn FnMut(Vec<Event>) -> bool>;
enum Lifecycle {
New,
Running(cf::CFRunLoopRef),
Stopped,
}
pub struct Handle(Arc<Mutex<Lifecycle>>);
unsafe impl Send for EventStream {}
unsafe impl Send for Lifecycle {}
impl EventStream {
pub fn new(paths: &[&Path], latency: Duration) -> (Self, Handle) {
unsafe {
let callback = Box::new(None);
let stream_context = fs::FSEventStreamContext {
version: 0,
info: callback.as_ref() as *const _ as *mut c_void,
retain: None,
release: None,
copy_description: None,
};
let cf_paths =
cf::CFArrayCreateMutable(cf::kCFAllocatorDefault, 0, &cf::kCFTypeArrayCallBacks);
assert!(!cf_paths.is_null());
for path in paths {
let path_bytes = path.as_os_str().as_bytes();
let cf_url = cf::CFURLCreateFromFileSystemRepresentation(
cf::kCFAllocatorDefault,
path_bytes.as_ptr() as *const i8,
path_bytes.len() as cf::CFIndex,
false,
);
let cf_path = cf::CFURLCopyFileSystemPath(cf_url, cf::kCFURLPOSIXPathStyle);
cf::CFArrayAppendValue(cf_paths, cf_path);
cf::CFRelease(cf_path);
cf::CFRelease(cf_url);
}
let stream = fs::FSEventStreamCreate(
cf::kCFAllocatorDefault,
Self::trampoline,
&stream_context,
cf_paths,
fs::kFSEventStreamEventIdSinceNow,
latency.as_secs_f64(),
fs::kFSEventStreamCreateFlagFileEvents
| fs::kFSEventStreamCreateFlagNoDefer
| fs::kFSEventStreamCreateFlagWatchRoot,
);
cf::CFRelease(cf_paths);
let state = Arc::new(Mutex::new(Lifecycle::New));
(
EventStream {
stream,
state: state.clone(),
callback,
},
Handle(state),
)
}
}
pub fn run<F>(mut self, f: F)
where
F: FnMut(Vec<Event>) -> bool + 'static,
{
*self.callback = Some(Box::new(f));
unsafe {
let run_loop = cf::CFRunLoopGetCurrent();
{
let mut state = self.state.lock();
match *state {
Lifecycle::New => *state = Lifecycle::Running(run_loop),
Lifecycle::Running(_) => unreachable!(),
Lifecycle::Stopped => return,
}
}
fs::FSEventStreamScheduleWithRunLoop(self.stream, run_loop, cf::kCFRunLoopDefaultMode);
fs::FSEventStreamStart(self.stream);
cf::CFRunLoopRun();
fs::FSEventStreamFlushSync(self.stream);
fs::FSEventStreamStop(self.stream);
fs::FSEventStreamRelease(self.stream);
}
}
extern "C" fn trampoline(
stream_ref: fs::FSEventStreamRef,
info: *mut ::std::os::raw::c_void,
num: usize, // size_t numEvents
event_paths: *mut ::std::os::raw::c_void, // void *eventPaths
event_flags: *const ::std::os::raw::c_void, // const FSEventStreamEventFlags eventFlags[]
event_ids: *const ::std::os::raw::c_void, // const FSEventStreamEventId eventIds[]
) {
unsafe {
let event_paths = event_paths as *const *const ::std::os::raw::c_char;
let e_ptr = event_flags as *mut u32;
let i_ptr = event_ids as *mut u64;
let callback = (info as *mut Option<RunCallback>)
.as_mut()
.unwrap()
.as_mut()
.unwrap();
let paths = slice::from_raw_parts(event_paths, num);
let flags = slice::from_raw_parts_mut(e_ptr, num);
let ids = slice::from_raw_parts_mut(i_ptr, num);
let mut events = Vec::with_capacity(num);
for p in 0..num {
let path_c_str = CStr::from_ptr(paths[p]);
let path = PathBuf::from(OsStr::from_bytes(path_c_str.to_bytes()));
if let Some(flag) = StreamFlags::from_bits(flags[p]) {
events.push(Event {
event_id: ids[p],
flags: flag,
path,
});
} else {
debug_assert!(false, "unknown flag set for fs event: {}", flags[p]);
}
}
if !callback(events) {
fs::FSEventStreamStop(stream_ref);
cf::CFRunLoopStop(cf::CFRunLoopGetCurrent());
}
}
}
}
impl Drop for Handle {
fn drop(&mut self) {
let mut state = self.0.lock();
if let Lifecycle::Running(run_loop) = *state {
unsafe {
cf::CFRunLoopStop(run_loop);
}
}
*state = Lifecycle::Stopped;
}
}
// Synchronize with
// /System/Library/Frameworks/CoreServices.framework/Versions/A/Frameworks/FSEvents.framework/Versions/A/Headers/FSEvents.h
bitflags! {
#[repr(C)]
pub struct StreamFlags: u32 {
const NONE = 0x00000000;
const MUST_SCAN_SUBDIRS = 0x00000001;
const USER_DROPPED = 0x00000002;
const KERNEL_DROPPED = 0x00000004;
const IDS_WRAPPED = 0x00000008;
const HISTORY_DONE = 0x00000010;
const ROOT_CHANGED = 0x00000020;
const MOUNT = 0x00000040;
const UNMOUNT = 0x00000080;
const ITEM_CREATED = 0x00000100;
const ITEM_REMOVED = 0x00000200;
const INODE_META_MOD = 0x00000400;
const ITEM_RENAMED = 0x00000800;
const ITEM_MODIFIED = 0x00001000;
const FINDER_INFO_MOD = 0x00002000;
const ITEM_CHANGE_OWNER = 0x00004000;
const ITEM_XATTR_MOD = 0x00008000;
const IS_FILE = 0x00010000;
const IS_DIR = 0x00020000;
const IS_SYMLINK = 0x00040000;
const OWN_EVENT = 0x00080000;
const IS_HARDLINK = 0x00100000;
const IS_LAST_HARDLINK = 0x00200000;
const ITEM_CLONED = 0x400000;
}
}
impl std::fmt::Display for StreamFlags {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
if self.contains(StreamFlags::MUST_SCAN_SUBDIRS) {
let _d = write!(f, "MUST_SCAN_SUBDIRS ");
}
if self.contains(StreamFlags::USER_DROPPED) {
let _d = write!(f, "USER_DROPPED ");
}
if self.contains(StreamFlags::KERNEL_DROPPED) {
let _d = write!(f, "KERNEL_DROPPED ");
}
if self.contains(StreamFlags::IDS_WRAPPED) {
let _d = write!(f, "IDS_WRAPPED ");
}
if self.contains(StreamFlags::HISTORY_DONE) {
let _d = write!(f, "HISTORY_DONE ");
}
if self.contains(StreamFlags::ROOT_CHANGED) {
let _d = write!(f, "ROOT_CHANGED ");
}
if self.contains(StreamFlags::MOUNT) {
let _d = write!(f, "MOUNT ");
}
if self.contains(StreamFlags::UNMOUNT) {
let _d = write!(f, "UNMOUNT ");
}
if self.contains(StreamFlags::ITEM_CREATED) {
let _d = write!(f, "ITEM_CREATED ");
}
if self.contains(StreamFlags::ITEM_REMOVED) {
let _d = write!(f, "ITEM_REMOVED ");
}
if self.contains(StreamFlags::INODE_META_MOD) {
let _d = write!(f, "INODE_META_MOD ");
}
if self.contains(StreamFlags::ITEM_RENAMED) {
let _d = write!(f, "ITEM_RENAMED ");
}
if self.contains(StreamFlags::ITEM_MODIFIED) {
let _d = write!(f, "ITEM_MODIFIED ");
}
if self.contains(StreamFlags::FINDER_INFO_MOD) {
let _d = write!(f, "FINDER_INFO_MOD ");
}
if self.contains(StreamFlags::ITEM_CHANGE_OWNER) {
let _d = write!(f, "ITEM_CHANGE_OWNER ");
}
if self.contains(StreamFlags::ITEM_XATTR_MOD) {
let _d = write!(f, "ITEM_XATTR_MOD ");
}
if self.contains(StreamFlags::IS_FILE) {
let _d = write!(f, "IS_FILE ");
}
if self.contains(StreamFlags::IS_DIR) {
let _d = write!(f, "IS_DIR ");
}
if self.contains(StreamFlags::IS_SYMLINK) {
let _d = write!(f, "IS_SYMLINK ");
}
if self.contains(StreamFlags::OWN_EVENT) {
let _d = write!(f, "OWN_EVENT ");
}
if self.contains(StreamFlags::IS_LAST_HARDLINK) {
let _d = write!(f, "IS_LAST_HARDLINK ");
}
if self.contains(StreamFlags::IS_HARDLINK) {
let _d = write!(f, "IS_HARDLINK ");
}
if self.contains(StreamFlags::ITEM_CLONED) {
let _d = write!(f, "ITEM_CLONED ");
}
write!(f, "")
}
}
#[test]
fn test_event_stream() {
use std::{fs, sync::mpsc, time::Duration};
use tempdir::TempDir;
let dir = TempDir::new("test_observe").unwrap();
let path = dir.path().canonicalize().unwrap();
fs::write(path.join("a"), "a contents").unwrap();
let (tx, rx) = mpsc::channel();
let (stream, handle) = EventStream::new(&[&path], Duration::from_millis(50));
std::thread::spawn(move || stream.run(move |events| tx.send(events.to_vec()).is_ok()));
fs::write(path.join("b"), "b contents").unwrap();
let events = rx.recv_timeout(Duration::from_millis(500)).unwrap();
let event = events.last().unwrap();
assert_eq!(event.path, path.join("b"));
assert!(event.flags.contains(StreamFlags::ITEM_CREATED));
fs::remove_file(path.join("a")).unwrap();
let events = rx.recv_timeout(Duration::from_millis(500)).unwrap();
let event = events.last().unwrap();
assert_eq!(event.path, path.join("a"));
assert!(event.flags.contains(StreamFlags::ITEM_REMOVED));
drop(handle);
}
#[test]
fn test_event_stream_shutdown() {
use std::{fs, sync::mpsc, time::Duration};
use tempdir::TempDir;
let dir = TempDir::new("test_observe").unwrap();
let path = dir.path().canonicalize().unwrap();
let (tx, rx) = mpsc::channel();
let (stream, handle) = EventStream::new(&[&path], Duration::from_millis(50));
std::thread::spawn(move || {
stream.run({
let tx = tx.clone();
move |_| {
tx.send(()).unwrap();
true
}
});
tx.send(()).unwrap();
});
fs::write(path.join("b"), "b contents").unwrap();
rx.recv_timeout(Duration::from_millis(500)).unwrap();
drop(handle);
rx.recv_timeout(Duration::from_millis(500)).unwrap();
}
#[test]
fn test_event_stream_shutdown_before_run() {
use std::time::Duration;
use tempdir::TempDir;
let dir = TempDir::new("test_observe").unwrap();
let path = dir.path().canonicalize().unwrap();
let (stream, handle) = EventStream::new(&[&path], Duration::from_millis(50));
drop(handle);
stream.run(|_| true);
}

View file

@ -5,7 +5,6 @@ name = "gpui"
version = "0.1.0"
[dependencies]
async-std = {version = "1.9.0", features = ["unstable"]}
async-task = "4.0.3"
ctor = "0.1"
etagere = "0.2"
@ -15,10 +14,11 @@ ordered-float = "2.1.1"
parking_lot = "0.11.1"
pathfinder_color = "0.5"
pathfinder_geometry = "0.5"
postage = {version = "0.4.1", features = ["futures-traits"]}
rand = "0.8.3"
replace_with = "0.1.7"
resvg = "0.14"
scoped-pool = "1.0.0"
scoped-pool = {path = "../scoped_pool"}
seahash = "4.1"
serde = {version = "1.0.125", features = ["derive"]}
serde_json = "1.0.64"
@ -33,6 +33,7 @@ bindgen = "0.57"
cc = "1.0.67"
[dev-dependencies]
env_logger = "0.8"
png = "0.16"
simplelog = "0.9"

View file

@ -4,26 +4,27 @@ use crate::{
keymap::{self, Keystroke},
platform::{self, WindowOptions},
presenter::Presenter,
util::post_inc,
util::{post_inc, timeout},
AssetCache, AssetSource, ClipboardItem, FontCache, PathPromptOptions, TextLayoutCache,
};
use anyhow::{anyhow, Result};
use async_std::sync::Condvar;
use keymap::MatchResult;
use parking_lot::Mutex;
use pathfinder_geometry::{rect::RectF, vector::vec2f};
use platform::Event;
use postage::{sink::Sink as _, stream::Stream as _};
use smol::prelude::*;
use std::{
any::{type_name, Any, TypeId},
cell::RefCell,
collections::{HashMap, HashSet, VecDeque},
collections::{hash_map::Entry, HashMap, HashSet, VecDeque},
fmt::{self, Debug},
hash::{Hash, Hasher},
marker::PhantomData,
path::PathBuf,
rc::{self, Rc},
sync::{Arc, Weak},
time::Duration,
};
pub trait Entity: 'static + Send + Sync {
@ -324,10 +325,6 @@ impl TestAppContext {
result
}
pub fn finish_pending_tasks(&self) -> impl Future<Output = ()> {
self.0.borrow().finish_pending_tasks()
}
pub fn font_cache(&self) -> Arc<FontCache> {
self.0.borrow().font_cache.clone()
}
@ -384,6 +381,7 @@ pub struct MutableAppContext {
next_task_id: usize,
subscriptions: HashMap<usize, Vec<Subscription>>,
observations: HashMap<usize, Vec<Observation>>,
async_observations: HashMap<usize, postage::broadcast::Sender<()>>,
window_invalidations: HashMap<usize, WindowInvalidation>,
presenters_and_platform_windows:
HashMap<usize, (Rc<RefCell<Presenter>>, Box<dyn platform::Window>)>,
@ -391,7 +389,6 @@ pub struct MutableAppContext {
foreground: Rc<executor::Foreground>,
future_handlers: Rc<RefCell<HashMap<usize, FutureHandler>>>,
stream_handlers: Rc<RefCell<HashMap<usize, StreamHandler>>>,
task_done: Arc<Condvar>,
pending_effects: VecDeque<Effect>,
pending_flushes: usize,
flushing_effects: bool,
@ -414,7 +411,7 @@ impl MutableAppContext {
windows: HashMap::new(),
ref_counts: Arc::new(Mutex::new(RefCounts::default())),
background: Arc::new(executor::Background::new()),
scoped_pool: scoped_pool::Pool::new(num_cpus::get()),
thread_pool: scoped_pool::Pool::new(num_cpus::get(), "app"),
},
actions: HashMap::new(),
global_actions: HashMap::new(),
@ -424,13 +421,13 @@ impl MutableAppContext {
next_task_id: 0,
subscriptions: HashMap::new(),
observations: HashMap::new(),
async_observations: HashMap::new(),
window_invalidations: HashMap::new(),
presenters_and_platform_windows: HashMap::new(),
debug_elements_callbacks: HashMap::new(),
foreground,
future_handlers: Default::default(),
stream_handlers: Default::default(),
task_done: Default::default(),
pending_effects: VecDeque::new(),
pending_flushes: 0,
flushing_effects: false,
@ -877,11 +874,13 @@ impl MutableAppContext {
self.ctx.models.remove(&model_id);
self.subscriptions.remove(&model_id);
self.observations.remove(&model_id);
self.async_observations.remove(&model_id);
}
for (window_id, view_id) in dropped_views {
self.subscriptions.remove(&view_id);
self.observations.remove(&view_id);
self.async_observations.remove(&view_id);
if let Some(window) = self.ctx.windows.get_mut(&window_id) {
self.window_invalidations
.entry(window_id)
@ -1047,6 +1046,12 @@ impl MutableAppContext {
}
}
}
if let Entry::Occupied(mut entry) = self.async_observations.entry(observed_id) {
if entry.get_mut().blocking_send(()).is_err() {
entry.remove_entry();
}
}
}
fn notify_view_observers(&mut self, window_id: usize, view_id: usize) {
@ -1055,6 +1060,12 @@ impl MutableAppContext {
.or_default()
.updated
.insert(view_id);
if let Entry::Occupied(mut entry) = self.async_observations.entry(view_id) {
if entry.get_mut().blocking_send(()).is_err() {
entry.remove_entry();
}
}
}
fn focus(&mut self, window_id: usize, focused_id: usize) {
@ -1125,7 +1136,6 @@ impl MutableAppContext {
task_id,
task,
TaskHandlerMap::Future(self.future_handlers.clone()),
self.task_done.clone(),
)
}
@ -1161,7 +1171,6 @@ impl MutableAppContext {
task_id,
task,
TaskHandlerMap::Stream(self.stream_handlers.clone()),
self.task_done.clone(),
)
}
@ -1170,7 +1179,6 @@ impl MutableAppContext {
let future_callback = self.future_handlers.borrow_mut().remove(&task_id).unwrap();
let result = future_callback(output, self);
self.flush_effects();
self.task_done.notify_all();
result
}
@ -1192,44 +1200,9 @@ impl MutableAppContext {
let result = (handler.done_callback)(self);
self.flush_effects();
self.task_done.notify_all();
result
}
pub fn finish_pending_tasks(&self) -> impl Future<Output = ()> {
let mut pending_tasks = self
.future_handlers
.borrow()
.keys()
.cloned()
.collect::<HashSet<_>>();
pending_tasks.extend(self.stream_handlers.borrow().keys());
let task_done = self.task_done.clone();
let future_handlers = self.future_handlers.clone();
let stream_handlers = self.stream_handlers.clone();
async move {
// A Condvar expects the condition to be protected by a Mutex, but in this case we know
// that this logic will always run on the main thread.
let mutex = async_std::sync::Mutex::new(());
loop {
{
let future_handlers = future_handlers.borrow();
let stream_handlers = stream_handlers.borrow();
pending_tasks.retain(|task_id| {
future_handlers.contains_key(task_id)
|| stream_handlers.contains_key(task_id)
});
if pending_tasks.is_empty() {
break;
}
}
task_done.wait(mutex.lock().await).await;
}
}
}
pub fn write_to_clipboard(&self, item: ClipboardItem) {
self.platform.write_to_clipboard(item);
}
@ -1337,7 +1310,7 @@ pub struct AppContext {
windows: HashMap<usize, Window>,
background: Arc<executor::Background>,
ref_counts: Arc<Mutex<RefCounts>>,
scoped_pool: scoped_pool::Pool,
thread_pool: scoped_pool::Pool,
}
impl AppContext {
@ -1377,8 +1350,8 @@ impl AppContext {
&self.background
}
pub fn scoped_pool(&self) -> &scoped_pool::Pool {
&self.scoped_pool
pub fn thread_pool(&self) -> &scoped_pool::Pool {
&self.thread_pool
}
}
@ -1526,6 +1499,10 @@ impl<'a, T: Entity> ModelContext<'a, T> {
&self.app.ctx.background
}
pub fn thread_pool(&self) -> &scoped_pool::Pool {
&self.app.ctx.thread_pool
}
pub fn halt_stream(&mut self) {
self.halt_stream = true;
}
@ -2008,6 +1985,47 @@ impl<T: Entity> ModelHandle<T> {
{
app.update_model(self, update)
}
pub fn condition(
&self,
ctx: &TestAppContext,
mut predicate: impl 'static + FnMut(&T, &AppContext) -> bool,
) -> impl 'static + Future<Output = ()> {
let mut ctx = ctx.0.borrow_mut();
let tx = ctx
.async_observations
.entry(self.id())
.or_insert_with(|| postage::broadcast::channel(128).0);
let mut rx = tx.subscribe();
let ctx = ctx.weak_self.as_ref().unwrap().upgrade().unwrap();
let handle = self.downgrade();
async move {
timeout(Duration::from_millis(200), async move {
loop {
{
let ctx = ctx.borrow();
let ctx = ctx.as_ref();
if predicate(
handle
.upgrade(ctx)
.expect("model dropped with pending condition")
.read(ctx),
ctx,
) {
break;
}
}
rx.recv()
.await
.expect("model dropped with pending condition");
}
})
.await
.expect("condition timed out");
}
}
}
impl<T> Clone for ModelHandle<T> {
@ -2141,6 +2159,47 @@ impl<T: View> ViewHandle<T> {
app.focused_view_id(self.window_id)
.map_or(false, |focused_id| focused_id == self.view_id)
}
pub fn condition(
&self,
ctx: &TestAppContext,
mut predicate: impl 'static + FnMut(&T, &AppContext) -> bool,
) -> impl 'static + Future<Output = ()> {
let mut ctx = ctx.0.borrow_mut();
let tx = ctx
.async_observations
.entry(self.id())
.or_insert_with(|| postage::broadcast::channel(128).0);
let mut rx = tx.subscribe();
let ctx = ctx.weak_self.as_ref().unwrap().upgrade().unwrap();
let handle = self.downgrade();
async move {
timeout(Duration::from_millis(200), async move {
loop {
{
let ctx = ctx.borrow();
let ctx = ctx.as_ref();
if predicate(
handle
.upgrade(ctx)
.expect("model dropped with pending condition")
.read(ctx),
ctx,
) {
break;
}
}
rx.recv()
.await
.expect("model dropped with pending condition");
}
})
.await
.expect("condition timed out");
}
}
}
impl<T> Clone for ViewHandle<T> {
@ -2364,7 +2423,6 @@ pub struct EntityTask<T> {
id: usize,
task: Option<executor::Task<T>>,
handler_map: TaskHandlerMap,
task_done: Arc<Condvar>,
}
enum TaskHandlerMap {
@ -2374,17 +2432,11 @@ enum TaskHandlerMap {
}
impl<T> EntityTask<T> {
fn new(
id: usize,
task: executor::Task<T>,
handler_map: TaskHandlerMap,
task_done: Arc<Condvar>,
) -> Self {
fn new(id: usize, task: executor::Task<T>, handler_map: TaskHandlerMap) -> Self {
Self {
id,
task: Some(task),
handler_map,
task_done,
}
}
@ -2424,7 +2476,6 @@ impl<T> Drop for EntityTask<T> {
map.borrow_mut().remove(&self.id);
}
}
self.task_done.notify_all();
}
}
@ -2432,6 +2483,7 @@ impl<T> Drop for EntityTask<T> {
mod tests {
use super::*;
use crate::elements::*;
use smol::future::poll_once;
#[test]
fn test_model_handles() {
@ -3233,6 +3285,180 @@ mod tests {
});
}
#[test]
fn test_model_condition() {
struct Counter(usize);
impl super::Entity for Counter {
type Event = ();
}
impl Counter {
fn inc(&mut self, ctx: &mut ModelContext<Self>) {
self.0 += 1;
ctx.notify();
}
}
App::test_async((), |mut app| async move {
let model = app.add_model(|_| Counter(0));
let condition1 = model.condition(&app, |model, _| model.0 == 2);
let condition2 = model.condition(&app, |model, _| model.0 == 3);
smol::pin!(condition1, condition2);
model.update(&mut app, |model, ctx| model.inc(ctx));
assert_eq!(poll_once(&mut condition1).await, None);
assert_eq!(poll_once(&mut condition2).await, None);
model.update(&mut app, |model, ctx| model.inc(ctx));
assert_eq!(poll_once(&mut condition1).await, Some(()));
assert_eq!(poll_once(&mut condition2).await, None);
model.update(&mut app, |model, ctx| model.inc(ctx));
assert_eq!(poll_once(&mut condition2).await, Some(()));
// Broadcast channel should be removed if no conditions remain on next notification.
model.update(&mut app, |_, ctx| ctx.notify());
app.update(|ctx| assert!(ctx.async_observations.get(&model.id()).is_none()));
});
}
#[test]
#[should_panic]
fn test_model_condition_timeout() {
struct Model;
impl super::Entity for Model {
type Event = ();
}
App::test_async((), |mut app| async move {
let model = app.add_model(|_| Model);
model.condition(&app, |_, _| false).await;
});
}
#[test]
#[should_panic(expected = "model dropped with pending condition")]
fn test_model_condition_panic_on_drop() {
struct Model;
impl super::Entity for Model {
type Event = ();
}
App::test_async((), |mut app| async move {
let model = app.add_model(|_| Model);
let condition = model.condition(&app, |_, _| false);
app.update(|_| drop(model));
condition.await;
});
}
#[test]
fn test_view_condition() {
struct Counter(usize);
impl super::Entity for Counter {
type Event = ();
}
impl super::View for Counter {
fn ui_name() -> &'static str {
"test view"
}
fn render(&self, _: &AppContext) -> ElementBox {
Empty::new().boxed()
}
}
impl Counter {
fn inc(&mut self, ctx: &mut ViewContext<Self>) {
self.0 += 1;
ctx.notify();
}
}
App::test_async((), |mut app| async move {
let (_, view) = app.add_window(|_| Counter(0));
let condition1 = view.condition(&app, |view, _| view.0 == 2);
let condition2 = view.condition(&app, |view, _| view.0 == 3);
smol::pin!(condition1, condition2);
view.update(&mut app, |view, ctx| view.inc(ctx));
assert_eq!(poll_once(&mut condition1).await, None);
assert_eq!(poll_once(&mut condition2).await, None);
view.update(&mut app, |view, ctx| view.inc(ctx));
assert_eq!(poll_once(&mut condition1).await, Some(()));
assert_eq!(poll_once(&mut condition2).await, None);
view.update(&mut app, |view, ctx| view.inc(ctx));
assert_eq!(poll_once(&mut condition2).await, Some(()));
// Broadcast channel should be removed if no conditions remain on next notification.
view.update(&mut app, |_, ctx| ctx.notify());
app.update(|ctx| assert!(ctx.async_observations.get(&view.id()).is_none()));
});
}
#[test]
#[should_panic]
fn test_view_condition_timeout() {
struct View;
impl super::Entity for View {
type Event = ();
}
impl super::View for View {
fn ui_name() -> &'static str {
"test view"
}
fn render(&self, _: &AppContext) -> ElementBox {
Empty::new().boxed()
}
}
App::test_async((), |mut app| async move {
let (_, view) = app.add_window(|_| View);
view.condition(&app, |_, _| false).await;
});
}
#[test]
#[should_panic(expected = "model dropped with pending condition")]
fn test_view_condition_panic_on_drop() {
struct View;
impl super::Entity for View {
type Event = ();
}
impl super::View for View {
fn ui_name() -> &'static str {
"test view"
}
fn render(&self, _: &AppContext) -> ElementBox {
Empty::new().boxed()
}
}
App::test_async((), |mut app| async move {
let window_id = app.add_window(|_| View).0;
let view = app.add_view(window_id, |_| View);
let condition = view.condition(&app, |_, _| false);
app.update(|_| drop(view));
condition.await;
});
}
// #[test]
// fn test_ui_and_window_updates() {
// struct View {
@ -3313,98 +3539,4 @@ mod tests {
// assert!(invalidation.removed.is_empty());
// });
// }
#[test]
fn test_finish_pending_tasks() {
struct View;
impl Entity for View {
type Event = ();
}
impl super::View for View {
fn render<'a>(&self, _: &AppContext) -> ElementBox {
Empty::new().boxed()
}
fn ui_name() -> &'static str {
"View"
}
}
struct Model;
impl Entity for Model {
type Event = ();
}
App::test_async((), |mut app| async move {
let model = app.add_model(|_| Model);
let (_, view) = app.add_window(|_| View);
model.update(&mut app, |_, ctx| {
ctx.spawn(async {}, |_, _, _| {}).detach();
// Cancel this task
drop(ctx.spawn(async {}, |_, _, _| {}));
});
view.update(&mut app, |_, ctx| {
ctx.spawn(async {}, |_, _, _| {}).detach();
// Cancel this task
drop(ctx.spawn(async {}, |_, _, _| {}));
});
assert!(!app.0.borrow().future_handlers.borrow().is_empty());
app.finish_pending_tasks().await;
assert!(app.0.borrow().future_handlers.borrow().is_empty());
app.finish_pending_tasks().await; // Don't block if there are no tasks
model.update(&mut app, |_, ctx| {
ctx.spawn_stream(smol::stream::iter(vec![1, 2, 3]), |_, _, _| {}, |_, _| {})
.detach();
// Cancel this task
drop(ctx.spawn_stream(smol::stream::iter(vec![1, 2, 3]), |_, _, _| {}, |_, _| {}));
});
view.update(&mut app, |_, ctx| {
ctx.spawn_stream(smol::stream::iter(vec![1, 2, 3]), |_, _, _| {}, |_, _| {})
.detach();
// Cancel this task
drop(ctx.spawn_stream(smol::stream::iter(vec![1, 2, 3]), |_, _, _| {}, |_, _| {}));
});
assert!(!app.0.borrow().stream_handlers.borrow().is_empty());
app.finish_pending_tasks().await;
assert!(app.0.borrow().stream_handlers.borrow().is_empty());
app.finish_pending_tasks().await; // Don't block if there are no tasks
// Tasks are considered finished when we drop handles
let mut tasks = Vec::new();
model.update(&mut app, |_, ctx| {
tasks.push(Box::new(ctx.spawn(async {}, |_, _, _| {})));
tasks.push(Box::new(ctx.spawn_stream(
smol::stream::iter(vec![1, 2, 3]),
|_, _, _| {},
|_, _| {},
)));
});
view.update(&mut app, |_, ctx| {
tasks.push(Box::new(ctx.spawn(async {}, |_, _, _| {})));
tasks.push(Box::new(ctx.spawn_stream(
smol::stream::iter(vec![1, 2, 3]),
|_, _, _| {},
|_, _| {},
)));
});
assert!(!app.0.borrow().stream_handlers.borrow().is_empty());
let finish_pending_tasks = app.finish_pending_tasks();
drop(tasks);
finish_pending_tasks.await;
assert!(app.0.borrow().stream_handlers.borrow().is_empty());
app.finish_pending_tasks().await; // Don't block if there are no tasks
});
}
}

View file

@ -68,16 +68,12 @@ where
fn scroll(
&self,
position: Vector2F,
_: Vector2F,
delta: Vector2F,
precise: bool,
scroll_max: f32,
ctx: &mut EventContext,
) -> bool {
if !self.rect().unwrap().contains_point(position) {
return false;
}
if !precise {
todo!("still need to handle non-precise scroll events from a mouse wheel");
}
@ -111,11 +107,6 @@ where
fn scroll_top(&self) -> f32 {
self.state.0.lock().scroll_top
}
fn rect(&self) -> Option<RectF> {
todo!()
// try_rect(self.origin, self.size)
}
}
impl<F> Element for UniformList<F>
@ -213,7 +204,7 @@ where
fn dispatch_event(
&mut self,
event: &Event,
_: RectF,
bounds: RectF,
layout: &mut Self::LayoutState,
_: &mut Self::PaintState,
ctx: &mut EventContext,
@ -229,8 +220,10 @@ where
delta,
precise,
} => {
if self.scroll(*position, *delta, *precise, layout.scroll_max, ctx) {
handled = true;
if bounds.contains_point(*position) {
if self.scroll(*position, *delta, *precise, layout.scroll_max, ctx) {
handled = true;
}
}
}
_ => {}

View file

@ -1,8 +1,6 @@
use ctor::ctor;
use simplelog::SimpleLogger;
use log::LevelFilter;
#[ctor]
fn init_logger() {
SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
env_logger::init();
}

View file

@ -1,5 +1,20 @@
use smol::future::FutureExt;
use std::{future::Future, time::Duration};
pub fn post_inc(value: &mut usize) -> usize {
let prev = *value;
*value += 1;
prev
}
pub async fn timeout<F, T>(timeout: Duration, f: F) -> Result<T, ()>
where
F: Future<Output = T>,
{
let timer = async {
smol::Timer::after(timeout).await;
Err(())
};
let future = async move { Ok(f.await) };
timer.race(future).await
}

8
scoped_pool/Cargo.toml Normal file
View file

@ -0,0 +1,8 @@
[package]
name = "scoped-pool"
version = "0.0.1"
license = "MIT"
edition = "2018"
[dependencies]
crossbeam-channel = "0.5"

188
scoped_pool/src/lib.rs Normal file
View file

@ -0,0 +1,188 @@
use crossbeam_channel as chan;
use std::{marker::PhantomData, mem::transmute, thread};
#[derive(Clone)]
pub struct Pool {
req_tx: chan::Sender<Request>,
thread_count: usize,
}
pub struct Scope<'a> {
req_count: usize,
req_tx: chan::Sender<Request>,
resp_tx: chan::Sender<()>,
resp_rx: chan::Receiver<()>,
phantom: PhantomData<&'a ()>,
}
struct Request {
callback: Box<dyn FnOnce() + Send + 'static>,
resp_tx: chan::Sender<()>,
}
impl Pool {
pub fn new(thread_count: usize, name: impl AsRef<str>) -> Self {
let (req_tx, req_rx) = chan::unbounded();
for i in 0..thread_count {
thread::Builder::new()
.name(format!("scoped_pool {} {}", name.as_ref(), i))
.spawn({
let req_rx = req_rx.clone();
move || loop {
match req_rx.recv() {
Err(_) => break,
Ok(Request { callback, resp_tx }) => {
callback();
resp_tx.send(()).ok();
}
}
}
})
.expect("scoped_pool: failed to spawn thread");
}
Self {
req_tx,
thread_count,
}
}
pub fn thread_count(&self) -> usize {
self.thread_count
}
pub fn scoped<'scope, F, R>(&self, scheduler: F) -> R
where
F: FnOnce(&mut Scope<'scope>) -> R,
{
let (resp_tx, resp_rx) = chan::bounded(1);
let mut scope = Scope {
resp_tx,
resp_rx,
req_count: 0,
phantom: PhantomData,
req_tx: self.req_tx.clone(),
};
let result = scheduler(&mut scope);
scope.wait();
result
}
}
impl<'scope> Scope<'scope> {
pub fn execute<F>(&mut self, callback: F)
where
F: FnOnce() + Send + 'scope,
{
// Transmute the callback's lifetime to be 'static. This is safe because in ::wait,
// we block until all the callbacks have been called and dropped.
let callback = unsafe {
transmute::<Box<dyn FnOnce() + Send + 'scope>, Box<dyn FnOnce() + Send + 'static>>(
Box::new(callback),
)
};
self.req_count += 1;
self.req_tx
.send(Request {
callback,
resp_tx: self.resp_tx.clone(),
})
.unwrap();
}
fn wait(&self) {
for _ in 0..self.req_count {
self.resp_rx.recv().unwrap();
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::{Arc, Mutex};
#[test]
fn test_execute() {
let pool = Pool::new(3, "test");
{
let vec = Mutex::new(Vec::new());
pool.scoped(|scope| {
for _ in 0..3 {
scope.execute(|| {
for i in 0..5 {
vec.lock().unwrap().push(i);
}
});
}
});
let mut vec = vec.into_inner().unwrap();
vec.sort_unstable();
assert_eq!(vec, [0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4])
}
}
#[test]
fn test_clone_send_and_execute() {
let pool = Pool::new(3, "test");
let mut threads = Vec::new();
for _ in 0..3 {
threads.push(thread::spawn({
let pool = pool.clone();
move || {
let vec = Mutex::new(Vec::new());
pool.scoped(|scope| {
for _ in 0..3 {
scope.execute(|| {
for i in 0..5 {
vec.lock().unwrap().push(i);
}
});
}
});
let mut vec = vec.into_inner().unwrap();
vec.sort_unstable();
assert_eq!(vec, [0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4])
}
}));
}
for thread in threads {
thread.join().unwrap();
}
}
#[test]
fn test_share_and_execute() {
let pool = Arc::new(Pool::new(3, "test"));
let mut threads = Vec::new();
for _ in 0..3 {
threads.push(thread::spawn({
let pool = pool.clone();
move || {
let vec = Mutex::new(Vec::new());
pool.scoped(|scope| {
for _ in 0..3 {
scope.execute(|| {
for i in 0..5 {
vec.lock().unwrap().push(i);
}
});
}
});
let mut vec = vec.into_inner().unwrap();
vec.sort_unstable();
assert_eq!(vec, [0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4])
}
}));
}
for thread in threads {
thread.join().unwrap();
}
}
}

View file

@ -16,25 +16,29 @@ path = "src/main.rs"
anyhow = "1.0.38"
arrayvec = "0.5.2"
crossbeam-channel = "0.5.0"
ctor = "0.1.20"
dirs = "3.0"
easy-parallel = "3.1.0"
fsevent = {path = "../fsevent"}
futures-core = "0.3"
gpui = {path = "../gpui"}
ignore = {git = "https://github.com/zed-industries/ripgrep", rev = "1d152118f35b3e3590216709b86277062d79b8a0"}
ignore = "0.4"
lazy_static = "1.4.0"
libc = "0.2"
log = "0.4"
num_cpus = "1.13.0"
parking_lot = "0.11.1"
postage = {version = "0.4.1", features = ["futures-traits"]}
rand = "0.8.3"
rust-embed = "5.9.0"
seahash = "4.1"
serde = {version = "1", features = ["derive"]}
simplelog = "0.9"
serde = { version = "1", features = ["derive"] }
smallvec = "1.6.1"
smol = "1.2.5"
[dev-dependencies]
env_logger = "0.8"
serde_json = {version = "1.0.64", features = ["preserve_order"]}
tempdir = "0.3.7"
unindent = "0.1.7"

File diff suppressed because it is too large Load diff

View file

@ -20,6 +20,7 @@ use std::{
fmt::Write,
iter::FromIterator,
ops::Range,
path::Path,
sync::Arc,
time::Duration,
};
@ -118,7 +119,7 @@ struct ClipboardSelection {
impl BufferView {
pub fn single_line(settings: watch::Receiver<Settings>, ctx: &mut ViewContext<Self>) -> Self {
let buffer = ctx.add_model(|_| Buffer::new(0, String::new()));
let buffer = ctx.add_model(|ctx| Buffer::new(0, String::new(), ctx));
let mut view = Self::for_buffer(buffer, settings, ctx);
view.single_line = true;
view
@ -1315,6 +1316,7 @@ impl BufferView {
buffer::Event::Edited(_) => ctx.emit(Event::Edited),
buffer::Event::Dirtied => ctx.emit(Event::Dirtied),
buffer::Event::Saved => ctx.emit(Event::Saved),
buffer::Event::FileHandleChanged => ctx.emit(Event::FileHandleChanged),
}
}
}
@ -1325,6 +1327,7 @@ pub enum Event {
Blurred,
Dirtied,
Saved,
FileHandleChanged,
}
impl Entity for BufferView {
@ -1371,11 +1374,14 @@ impl workspace::ItemView for BufferView {
}
fn should_update_tab_on_event(event: &Self::Event) -> bool {
matches!(event, Event::Saved | Event::Dirtied)
matches!(
event,
Event::Saved | Event::Dirtied | Event::FileHandleChanged
)
}
fn title(&self, app: &AppContext) -> std::string::String {
if let Some(path) = self.buffer.read(app).path(app) {
if let Some(path) = self.buffer.read(app).path() {
path.file_name()
.expect("buffer's path is always to a file")
.to_string_lossy()
@ -1385,7 +1391,7 @@ impl workspace::ItemView for BufferView {
}
}
fn entry_id(&self, app: &AppContext) -> Option<(usize, usize)> {
fn entry_id(&self, app: &AppContext) -> Option<(usize, Arc<Path>)> {
self.buffer.read(app).entry_id()
}
@ -1418,7 +1424,8 @@ mod tests {
#[test]
fn test_selection_with_mouse() {
App::test((), |app| {
let buffer = app.add_model(|_| Buffer::new(0, "aaaaaa\nbbbbbb\ncccccc\ndddddd\n"));
let buffer =
app.add_model(|ctx| Buffer::new(0, "aaaaaa\nbbbbbb\ncccccc\ndddddd\n", ctx));
let settings = settings::channel(&app.font_cache()).unwrap().1;
let (_, buffer_view) =
app.add_window(|ctx| BufferView::for_buffer(buffer, settings, ctx));
@ -1532,7 +1539,7 @@ mod tests {
let layout_cache = TextLayoutCache::new(app.platform().fonts());
let font_cache = app.font_cache().clone();
let buffer = app.add_model(|_| Buffer::new(0, sample_text(6, 6)));
let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(6, 6), ctx));
let settings = settings::channel(&font_cache).unwrap().1;
let (_, view) =
@ -1549,7 +1556,7 @@ mod tests {
#[test]
fn test_fold() {
App::test((), |app| {
let buffer = app.add_model(|_| {
let buffer = app.add_model(|ctx| {
Buffer::new(
0,
"
@ -1570,6 +1577,7 @@ mod tests {
}
"
.unindent(),
ctx,
)
});
let settings = settings::channel(&app.font_cache()).unwrap().1;
@ -1643,7 +1651,7 @@ mod tests {
#[test]
fn test_move_cursor() -> Result<()> {
App::test((), |app| {
let buffer = app.add_model(|_| Buffer::new(0, sample_text(6, 6)));
let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(6, 6), ctx));
let settings = settings::channel(&app.font_cache()).unwrap().1;
let (_, view) =
app.add_window(|ctx| BufferView::for_buffer(buffer.clone(), settings, ctx));
@ -1680,8 +1688,12 @@ mod tests {
#[test]
fn test_backspace() {
App::test((), |app| {
let buffer = app.add_model(|_| {
Buffer::new(0, "one two three\nfour five six\nseven eight nine\nten\n")
let buffer = app.add_model(|ctx| {
Buffer::new(
0,
"one two three\nfour five six\nseven eight nine\nten\n",
ctx,
)
});
let settings = settings::channel(&app.font_cache()).unwrap().1;
let (_, view) =
@ -1713,7 +1725,7 @@ mod tests {
#[test]
fn test_clipboard() {
App::test((), |app| {
let buffer = app.add_model(|_| Buffer::new(0, "one two three four five six "));
let buffer = app.add_model(|ctx| Buffer::new(0, "one two three four five six ", ctx));
let settings = settings::channel(&app.font_cache()).unwrap().1;
let view = app
.add_window(|ctx| BufferView::for_buffer(buffer.clone(), settings, ctx))

View file

@ -471,7 +471,7 @@ mod tests {
#[test]
fn test_basic_folds() {
App::test((), |app| {
let buffer = app.add_model(|_| Buffer::new(0, sample_text(5, 6)));
let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
map.fold(
@ -522,7 +522,7 @@ mod tests {
#[test]
fn test_overlapping_folds() {
App::test((), |app| {
let buffer = app.add_model(|_| Buffer::new(0, sample_text(5, 6)));
let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
map.fold(
vec![
@ -541,7 +541,7 @@ mod tests {
#[test]
fn test_merging_folds_via_edit() {
App::test((), |app| {
let buffer = app.add_model(|_| Buffer::new(0, sample_text(5, 6)));
let buffer = app.add_model(|ctx| Buffer::new(0, sample_text(5, 6), ctx));
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
map.fold(
@ -589,10 +589,10 @@ mod tests {
let mut rng = StdRng::seed_from_u64(seed);
App::test((), |app| {
let buffer = app.add_model(|_| {
let buffer = app.add_model(|ctx| {
let len = rng.gen_range(0..10);
let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
Buffer::new(0, text)
Buffer::new(0, text, ctx)
});
let mut map = FoldMap::new(buffer.clone(), app.as_ref());
@ -664,7 +664,7 @@ mod tests {
fn test_buffer_rows() {
App::test((), |app| {
let text = sample_text(6, 6) + "\n";
let buffer = app.add_model(|_| Buffer::new(0, text));
let buffer = app.add_model(|ctx| Buffer::new(0, text, ctx));
let mut map = FoldMap::new(buffer.clone(), app.as_ref());

View file

@ -298,7 +298,7 @@ mod tests {
fn test_chars_at() {
App::test((), |app| {
let text = sample_text(6, 6);
let buffer = app.add_model(|_| Buffer::new(0, text));
let buffer = app.add_model(|ctx| Buffer::new(0, text, ctx));
let map = app.add_model(|ctx| DisplayMap::new(buffer.clone(), 4, ctx));
buffer
.update(app, |buffer, ctx| {
@ -365,7 +365,7 @@ mod tests {
#[test]
fn test_max_point() {
App::test((), |app| {
let buffer = app.add_model(|_| Buffer::new(0, "aaa\n\t\tbbb"));
let buffer = app.add_model(|ctx| Buffer::new(0, "aaa\n\t\tbbb", ctx));
let map = app.add_model(|ctx| DisplayMap::new(buffer.clone(), 4, ctx));
assert_eq!(
map.read(app).max_point(app.as_ref()),

View file

@ -14,7 +14,14 @@ use gpui::{
AppContext, Axis, Border, Entity, ModelHandle, MutableAppContext, View, ViewContext,
ViewHandle, WeakViewHandle,
};
use std::cmp;
use std::{
cmp,
path::Path,
sync::{
atomic::{self, AtomicBool},
Arc,
},
};
pub struct FileFinder {
handle: WeakViewHandle<Self>,
@ -24,7 +31,9 @@ pub struct FileFinder {
search_count: usize,
latest_search_id: usize,
matches: Vec<PathMatch>,
selected: usize,
include_root_name: bool,
selected: Option<Arc<Path>>,
cancel_flag: Arc<AtomicBool>,
list_state: UniformListState,
}
@ -32,8 +41,8 @@ pub fn init(app: &mut MutableAppContext) {
app.add_action("file_finder:toggle", FileFinder::toggle);
app.add_action("file_finder:confirm", FileFinder::confirm);
app.add_action("file_finder:select", FileFinder::select);
app.add_action("buffer:move_up", FileFinder::select_prev);
app.add_action("buffer:move_down", FileFinder::select_next);
app.add_action("menu:select_prev", FileFinder::select_prev);
app.add_action("menu:select_next", FileFinder::select_next);
app.add_action("uniform_list:scroll", FileFinder::scroll);
app.add_bindings(vec![
@ -44,7 +53,7 @@ pub fn init(app: &mut MutableAppContext) {
}
pub enum Event {
Selected(usize, usize),
Selected(usize, Arc<Path>),
Dismissed,
}
@ -137,24 +146,24 @@ impl FileFinder {
app: &AppContext,
) -> Option<ElementBox> {
let tree_id = path_match.tree_id;
let entry_id = path_match.entry_id;
self.worktree(tree_id, app).map(|tree| {
let path = tree.entry_path(entry_id).unwrap();
let file_name = path
let prefix = if self.include_root_name {
tree.root_name()
} else {
""
};
let path = path_match.path.clone();
let path_string = path_match.path.to_string_lossy();
let file_name = path_match
.path
.file_name()
.unwrap_or_default()
.to_string_lossy()
.to_string();
let mut path = path.to_string_lossy().to_string();
if path_match.skipped_prefix_len > 0 {
let mut i = 0;
path.retain(|_| util::post_inc(&mut i) >= path_match.skipped_prefix_len)
}
.to_string_lossy();
let path_positions = path_match.positions.clone();
let file_name_start = path.chars().count() - file_name.chars().count();
let file_name_start =
prefix.len() + path_string.chars().count() - file_name.chars().count();
let mut file_name_positions = Vec::new();
file_name_positions.extend(path_positions.iter().filter_map(|pos| {
if pos >= &file_name_start {
@ -168,6 +177,9 @@ impl FileFinder {
let highlight_color = ColorU::from_u32(0x304ee2ff);
let bold = *Properties::new().weight(Weight::BOLD);
let mut full_path = prefix.to_string();
full_path.push_str(&path_string);
let mut container = Container::new(
Flex::row()
.with_child(
@ -188,7 +200,7 @@ impl FileFinder {
Flex::column()
.with_child(
Label::new(
file_name,
file_name.to_string(),
settings.ui_font_family,
settings.ui_font_size,
)
@ -197,7 +209,7 @@ impl FileFinder {
)
.with_child(
Label::new(
path.into(),
full_path,
settings.ui_font_family,
settings.ui_font_size,
)
@ -212,18 +224,19 @@ impl FileFinder {
)
.with_uniform_padding(6.0);
if index == self.selected || index < self.matches.len() - 1 {
let selected_index = self.selected_index();
if index == selected_index || index < self.matches.len() - 1 {
container =
container.with_border(Border::bottom(1.0, ColorU::from_u32(0xdbdbdcff)));
}
if index == self.selected {
if index == selected_index {
container = container.with_background_color(ColorU::from_u32(0xdbdbdcff));
}
EventHandler::new(container.boxed())
.on_mouse_down(move |ctx| {
ctx.dispatch_action("file_finder:select", (tree_id, entry_id));
ctx.dispatch_action("file_finder:select", (tree_id, path.clone()));
true
})
.named("match")
@ -251,8 +264,8 @@ impl FileFinder {
ctx: &mut ViewContext<WorkspaceView>,
) {
match event {
Event::Selected(tree_id, entry_id) => {
workspace_view.open_entry((*tree_id, *entry_id), ctx);
Event::Selected(tree_id, path) => {
workspace_view.open_entry((*tree_id, path.clone()), ctx);
workspace_view.dismiss_modal(ctx);
}
Event::Dismissed => {
@ -281,7 +294,9 @@ impl FileFinder {
search_count: 0,
latest_search_id: 0,
matches: Vec::new(),
selected: 0,
include_root_name: false,
selected: None,
cancel_flag: Arc::new(AtomicBool::new(false)),
list_state: UniformListState::new(),
}
}
@ -313,19 +328,34 @@ impl FileFinder {
}
}
fn select_prev(&mut self, _: &(), ctx: &mut ViewContext<Self>) {
if self.selected > 0 {
self.selected -= 1;
fn selected_index(&self) -> usize {
if let Some(selected) = self.selected.as_ref() {
for (ix, path_match) in self.matches.iter().enumerate() {
if path_match.path.as_ref() == selected.as_ref() {
return ix;
}
}
}
self.list_state.scroll_to(self.selected);
0
}
fn select_prev(&mut self, _: &(), ctx: &mut ViewContext<Self>) {
let mut selected_index = self.selected_index();
if selected_index > 0 {
selected_index -= 1;
self.selected = Some(self.matches[selected_index].path.clone());
}
self.list_state.scroll_to(selected_index);
ctx.notify();
}
fn select_next(&mut self, _: &(), ctx: &mut ViewContext<Self>) {
if self.selected + 1 < self.matches.len() {
self.selected += 1;
let mut selected_index = self.selected_index();
if selected_index + 1 < self.matches.len() {
selected_index += 1;
self.selected = Some(self.matches[selected_index].path.clone());
}
self.list_state.scroll_to(self.selected);
self.list_state.scroll_to(selected_index);
ctx.notify();
}
@ -334,23 +364,41 @@ impl FileFinder {
}
fn confirm(&mut self, _: &(), ctx: &mut ViewContext<Self>) {
if let Some(m) = self.matches.get(self.selected) {
ctx.emit(Event::Selected(m.tree_id, m.entry_id));
if let Some(m) = self.matches.get(self.selected_index()) {
ctx.emit(Event::Selected(m.tree_id, m.path.clone()));
}
}
fn select(&mut self, entry: &(usize, usize), ctx: &mut ViewContext<Self>) {
let (tree_id, entry_id) = *entry;
ctx.emit(Event::Selected(tree_id, entry_id));
fn select(&mut self, (tree_id, path): &(usize, Arc<Path>), ctx: &mut ViewContext<Self>) {
ctx.emit(Event::Selected(*tree_id, path.clone()));
}
fn spawn_search(&mut self, query: String, ctx: &mut ViewContext<Self>) {
let worktrees = self.worktrees(ctx.as_ref());
let snapshots = self
.workspace
.read(ctx)
.worktrees()
.iter()
.map(|tree| tree.read(ctx).snapshot())
.collect::<Vec<_>>();
let search_id = util::post_inc(&mut self.search_count);
let pool = ctx.as_ref().scoped_pool().clone();
let pool = ctx.as_ref().thread_pool().clone();
self.cancel_flag.store(true, atomic::Ordering::Relaxed);
self.cancel_flag = Arc::new(AtomicBool::new(false));
let cancel_flag = self.cancel_flag.clone();
let task = ctx.background_executor().spawn(async move {
let matches = match_paths(worktrees.as_slice(), &query, false, false, 100, pool);
(search_id, matches)
let include_root_name = snapshots.len() > 1;
let matches = match_paths(
snapshots.iter(),
&query,
include_root_name,
false,
false,
100,
cancel_flag,
pool,
);
(search_id, include_root_name, matches)
});
ctx.spawn(task, Self::update_matches).detach();
@ -358,14 +406,14 @@ impl FileFinder {
fn update_matches(
&mut self,
(search_id, matches): (usize, Vec<PathMatch>),
(search_id, include_root_name, matches): (usize, bool, Vec<PathMatch>),
ctx: &mut ViewContext<Self>,
) {
if search_id >= self.latest_search_id {
self.latest_search_id = search_id;
self.matches = matches;
self.selected = 0;
self.list_state.scroll_to(0);
self.include_root_name = include_root_name;
self.list_state.scroll_to(self.selected_index());
ctx.notify();
}
}
@ -377,15 +425,6 @@ impl FileFinder {
.get(&tree_id)
.map(|worktree| worktree.read(app))
}
fn worktrees(&self, app: &AppContext) -> Vec<Worktree> {
self.workspace
.read(app)
.worktrees()
.iter()
.map(|worktree| worktree.read(app).clone())
.collect()
}
}
#[cfg(test)]
@ -419,7 +458,8 @@ mod tests {
let workspace = app.add_model(|ctx| Workspace::new(vec![tmp_dir.path().into()], ctx));
let (window_id, workspace_view) =
app.add_window(|ctx| WorkspaceView::new(workspace.clone(), settings, ctx));
app.finish_pending_tasks().await; // Open and populate worktree.
app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
.await;
app.dispatch_action(
window_id,
vec![workspace_view.id()],
@ -442,33 +482,30 @@ mod tests {
app.dispatch_action(window_id, chain.clone(), "buffer:insert", "b".to_string());
app.dispatch_action(window_id, chain.clone(), "buffer:insert", "n".to_string());
app.dispatch_action(window_id, chain.clone(), "buffer:insert", "a".to_string());
app.finish_pending_tasks().await; // Complete path search.
finder
.condition(&app, |finder, _| finder.matches.len() == 2)
.await;
// let view_state = finder.state(&app);
// assert!(view_state.matches.len() > 1);
// app.dispatch_action(
// window_id,
// vec![workspace_view.id(), finder.id()],
// "menu:select_next",
// (),
// );
// app.dispatch_action(
// window_id,
// vec![workspace_view.id(), finder.id()],
// "file_finder:confirm",
// (),
// );
// app.finish_pending_tasks().await; // Load Buffer and open BufferView.
// let active_pane = workspace_view.as_ref(app).active_pane().clone();
// assert_eq!(
// active_pane.state(&app),
// pane::State {
// tabs: vec![pane::TabState {
// title: "bandana".into(),
// active: true,
// }]
// }
// );
let active_pane = app.read(|ctx| workspace_view.read(ctx).active_pane().clone());
app.dispatch_action(
window_id,
vec![workspace_view.id(), finder.id()],
"menu:select_next",
(),
);
app.dispatch_action(
window_id,
vec![workspace_view.id(), finder.id()],
"file_finder:confirm",
(),
);
active_pane
.condition(&app, |pane, _| pane.active_item().is_some())
.await;
app.read(|ctx| {
let active_item = active_pane.read(ctx).active_item().unwrap();
assert_eq!(active_item.title(ctx), "bandana");
});
});
}
}

View file

@ -8,7 +8,6 @@ mod sum_tree;
#[cfg(test)]
mod test;
mod time;
mod timer;
mod util;
pub mod watch;
pub mod workspace;

View file

@ -1,3 +1,6 @@
// Allow binary to be called Zed for a nice application menu when running executable direcly
#![allow(non_snake_case)]
use fs::OpenOptions;
use log::LevelFilter;
use simplelog::SimpleLogger;

View file

@ -35,11 +35,7 @@ impl<T: Operation> OperationQueue<T> {
pub fn insert(&mut self, mut ops: Vec<T>) {
ops.sort_by_key(|op| op.timestamp());
ops.dedup_by_key(|op| op.timestamp());
let mut edits = ops
.into_iter()
.map(|op| Edit::Insert(op))
.collect::<Vec<_>>();
self.0.edit(&mut edits);
self.0.edit(ops.into_iter().map(Edit::Insert).collect());
}
pub fn drain(&mut self) -> Self {

View file

@ -199,6 +199,9 @@ where
}
pub fn next(&mut self) {
if !self.did_seek {
self.descend_to_first_item(self.tree, |_| true)
}
self.next_internal(|_| true)
}
@ -271,6 +274,7 @@ where
}
self.at_end = self.stack.is_empty();
debug_assert!(self.stack.is_empty() || self.stack.last().unwrap().tree.0.is_leaf());
}
pub fn descend_to_first_item<F>(&mut self, mut subtree: &'a SumTree<T>, filter_node: F)
@ -656,6 +660,7 @@ where
}
self.at_end = self.stack.is_empty();
debug_assert!(self.stack.is_empty() || self.stack.last().unwrap().tree.0.is_leaf());
if bias == SeekBias::Left {
let mut end = self.seek_dimension.clone();
if let Some(summary) = self.item_summary() {

View file

@ -10,7 +10,7 @@ const TREE_BASE: usize = 2;
#[cfg(not(test))]
const TREE_BASE: usize = 6;
pub trait Item: Clone + Eq + fmt::Debug {
pub trait Item: Clone + fmt::Debug {
type Summary: for<'a> AddAssign<&'a Self::Summary> + Default + Clone + fmt::Debug;
fn summary(&self) -> Self::Summary;
@ -22,7 +22,7 @@ pub trait KeyedItem: Item {
fn key(&self) -> Self::Key;
}
pub trait Dimension<'a, Summary: Default>: 'a + Clone + fmt::Debug + Default {
pub trait Dimension<'a, Summary: Default>: Clone + fmt::Debug + Default {
fn add_summary(&mut self, summary: &'a Summary);
}
@ -332,11 +332,12 @@ impl<T: KeyedItem> SumTree<T> {
};
}
pub fn edit(&mut self, edits: &mut [Edit<T>]) {
pub fn edit(&mut self, mut edits: Vec<Edit<T>>) -> Vec<T> {
if edits.is_empty() {
return;
return Vec::new();
}
let mut removed = Vec::new();
edits.sort_unstable_by_key(|item| item.key());
*self = {
@ -358,13 +359,19 @@ impl<T: KeyedItem> SumTree<T> {
new_tree.push_tree(slice);
old_item = cursor.item();
}
if old_item.map_or(false, |old_item| old_item.key() == new_key) {
cursor.next();
if let Some(old_item) = old_item {
if old_item.key() == new_key {
removed.push(old_item.clone());
cursor.next();
}
}
match edit {
Edit::Insert(item) => {
buffered_items.push(item.clone());
buffered_items.push(item);
}
Edit::Remove(_) => {}
}
}
@ -372,6 +379,23 @@ impl<T: KeyedItem> SumTree<T> {
new_tree.push_tree(cursor.suffix());
new_tree
};
removed
}
pub fn get(&self, key: &T::Key) -> Option<&T> {
let mut cursor = self.cursor::<T::Key, ()>();
if cursor.seek(key, SeekBias::Left) {
cursor.item()
} else {
None
}
}
}
impl<T: Item> Default for SumTree<T> {
fn default() -> Self {
Self::new()
}
}
@ -446,12 +470,14 @@ impl<T: Item> Node<T> {
#[derive(Debug)]
pub enum Edit<T: KeyedItem> {
Insert(T),
Remove(T::Key),
}
impl<T: KeyedItem> Edit<T> {
fn key(&self) -> T::Key {
match self {
Edit::Insert(item) => item.key(),
Edit::Remove(key) => key.clone(),
}
}
}
@ -471,6 +497,7 @@ where
#[cfg(test)]
mod tests {
use super::*;
use std::cmp;
use std::ops::Add;
#[test]
@ -754,11 +781,33 @@ mod tests {
assert_eq!(cursor.slice(&Count(6), SeekBias::Right).items(), vec![6]);
}
#[test]
fn test_edit() {
let mut tree = SumTree::<u8>::new();
let removed = tree.edit(vec![Edit::Insert(1), Edit::Insert(2), Edit::Insert(0)]);
assert_eq!(tree.items(), vec![0, 1, 2]);
assert_eq!(removed, Vec::<u8>::new());
assert_eq!(tree.get(&0), Some(&0));
assert_eq!(tree.get(&1), Some(&1));
assert_eq!(tree.get(&2), Some(&2));
assert_eq!(tree.get(&4), None);
let removed = tree.edit(vec![Edit::Insert(2), Edit::Insert(4), Edit::Remove(0)]);
assert_eq!(tree.items(), vec![1, 2, 4]);
assert_eq!(removed, vec![0, 2]);
assert_eq!(tree.get(&0), None);
assert_eq!(tree.get(&1), Some(&1));
assert_eq!(tree.get(&2), Some(&2));
assert_eq!(tree.get(&4), Some(&4));
}
#[derive(Clone, Default, Debug)]
pub struct IntegersSummary {
count: Count,
sum: Sum,
contains_even: bool,
max: u8,
}
#[derive(Ord, PartialOrd, Default, Eq, PartialEq, Clone, Debug)]
@ -775,15 +824,31 @@ mod tests {
count: Count(1),
sum: Sum(*self as usize),
contains_even: (*self & 1) == 0,
max: *self,
}
}
}
impl KeyedItem for u8 {
type Key = u8;
fn key(&self) -> Self::Key {
*self
}
}
impl<'a> Dimension<'a, IntegersSummary> for u8 {
fn add_summary(&mut self, summary: &IntegersSummary) {
*self = summary.max;
}
}
impl<'a> AddAssign<&'a Self> for IntegersSummary {
fn add_assign(&mut self, other: &Self) {
self.count.0 += &other.count.0;
self.sum.0 += &other.sum.0;
self.contains_even |= other.contains_even;
self.max = cmp::max(self.max, other.max);
}
}
@ -793,15 +858,6 @@ mod tests {
}
}
// impl<'a> Add<&'a Self> for Count {
// type Output = Self;
//
// fn add(mut self, other: &Self) -> Self {
// self.0 += other.0;
// self
// }
// }
impl<'a> Dimension<'a, IntegersSummary> for Sum {
fn add_summary(&mut self, summary: &IntegersSummary) {
self.0 += summary.sum.0;

View file

@ -1,3 +1,5 @@
use crate::time::ReplicaId;
use ctor::ctor;
use rand::Rng;
use std::{
collections::BTreeMap,
@ -5,7 +7,10 @@ use std::{
};
use tempdir::TempDir;
use crate::time::ReplicaId;
#[ctor]
fn init_logger() {
env_logger::init();
}
#[derive(Clone)]
struct Envelope<T: Clone> {

View file

@ -1,42 +0,0 @@
use smol::prelude::*;
use std::{
pin::Pin,
task::Poll,
time::{Duration, Instant},
};
pub struct Repeat {
timer: smol::Timer,
period: Duration,
}
impl Stream for Repeat {
type Item = Instant;
fn poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Option<Self::Item>> {
match self.as_mut().timer().poll(cx) {
Poll::Ready(instant) => {
let period = self.as_ref().period;
self.as_mut().timer().set_after(period);
Poll::Ready(Some(instant))
}
Poll::Pending => Poll::Pending,
}
}
}
impl Repeat {
fn timer(self: std::pin::Pin<&mut Self>) -> Pin<&mut smol::Timer> {
unsafe { self.map_unchecked_mut(|s| &mut s.timer) }
}
}
pub fn repeat(period: Duration) -> Repeat {
Repeat {
timer: smol::Timer::after(period),
period,
}
}

View file

@ -7,7 +7,7 @@ use gpui::{
keymap::Binding,
AppContext, Border, Entity, MutableAppContext, Quad, View, ViewContext,
};
use std::cmp;
use std::{cmp, path::Path, sync::Arc};
pub fn init(app: &mut MutableAppContext) {
app.add_action(
@ -107,7 +107,7 @@ impl Pane {
pub fn activate_entry(
&mut self,
entry_id: (usize, usize),
entry_id: (usize, Arc<Path>),
ctx: &mut ViewContext<Self>,
) -> bool {
if let Some(index) = self.items.iter().position(|item| {

View file

@ -1,6 +1,6 @@
use super::{ItemView, ItemViewHandle};
use crate::{
editor::Buffer,
editor::{Buffer, History},
settings::Settings,
time::ReplicaId,
watch,
@ -76,7 +76,7 @@ enum OpenedItem {
pub struct Workspace {
replica_id: ReplicaId,
worktrees: HashSet<ModelHandle<Worktree>>,
items: HashMap<(usize, usize), OpenedItem>,
items: HashMap<(usize, u64), OpenedItem>,
}
impl Workspace {
@ -94,6 +94,19 @@ impl Workspace {
&self.worktrees
}
pub fn worktree_scans_complete(&self, ctx: &AppContext) -> impl Future<Output = ()> + 'static {
let futures = self
.worktrees
.iter()
.map(|worktree| worktree.read(ctx).scan_complete())
.collect::<Vec<_>>();
async move {
for future in futures {
future.await;
}
}
}
pub fn contains_paths(&self, paths: &[PathBuf], app: &AppContext) -> bool {
paths.iter().all(|path| self.contains_path(&path, app))
}
@ -101,7 +114,7 @@ impl Workspace {
pub fn contains_path(&self, path: &Path, app: &AppContext) -> bool {
self.worktrees
.iter()
.any(|worktree| worktree.read(app).contains_path(path))
.any(|worktree| worktree.read(app).contains_abs_path(path))
}
pub fn open_paths(&mut self, paths: &[PathBuf], ctx: &mut ModelContext<Self>) {
@ -112,12 +125,12 @@ impl Workspace {
pub fn open_path<'a>(&'a mut self, path: PathBuf, ctx: &mut ModelContext<Self>) {
for tree in self.worktrees.iter() {
if tree.read(ctx).contains_path(&path) {
if tree.read(ctx).contains_abs_path(&path) {
return;
}
}
let worktree = ctx.add_model(|ctx| Worktree::new(ctx.model_id(), path, Some(ctx)));
let worktree = ctx.add_model(|ctx| Worktree::new(path, ctx));
ctx.observe(&worktree, Self::on_worktree_updated);
self.worktrees.insert(worktree);
ctx.notify();
@ -125,10 +138,22 @@ impl Workspace {
pub fn open_entry(
&mut self,
entry: (usize, usize),
(worktree_id, path): (usize, Arc<Path>),
ctx: &mut ModelContext<'_, Self>,
) -> anyhow::Result<Pin<Box<dyn Future<Output = OpenResult> + Send>>> {
if let Some(item) = self.items.get(&entry).cloned() {
let worktree = self
.worktrees
.get(&worktree_id)
.cloned()
.ok_or_else(|| anyhow!("worktree {} does not exist", worktree_id,))?;
let inode = worktree
.read(ctx)
.inode_for_path(&path)
.ok_or_else(|| anyhow!("path {:?} does not exist", path))?;
let item_key = (worktree_id, inode);
if let Some(item) = self.items.get(&item_key).cloned() {
return Ok(async move {
match item {
OpenedItem::Loaded(handle) => {
@ -146,25 +171,22 @@ impl Workspace {
.boxed());
}
let worktree = self
.worktrees
.get(&entry.0)
.cloned()
.ok_or(anyhow!("worktree {} does not exist", entry.0,))?;
let replica_id = self.replica_id;
let file = worktree.file(entry.1, ctx.as_ref())?;
let file = worktree.file(path.clone(), ctx.as_ref())?;
let history = file.load_history(ctx.as_ref());
let buffer = async move { Ok(Buffer::from_history(replica_id, file, history.await?)) };
// let buffer = async move { Ok(Buffer::from_history(replica_id, file, history.await?)) };
let (mut tx, rx) = watch::channel(None);
self.items.insert(entry, OpenedItem::Loading(rx));
self.items.insert(item_key, OpenedItem::Loading(rx));
ctx.spawn(
buffer,
move |me, buffer: anyhow::Result<Buffer>, ctx| match buffer {
Ok(buffer) => {
let handle = Box::new(ctx.add_model(|_| buffer)) as Box<dyn ItemHandle>;
me.items.insert(entry, OpenedItem::Loaded(handle.clone()));
history,
move |me, history: anyhow::Result<History>, ctx| match history {
Ok(history) => {
let handle = Box::new(
ctx.add_model(|ctx| Buffer::from_history(replica_id, file, history, ctx)),
) as Box<dyn ItemHandle>;
me.items
.insert(item_key, OpenedItem::Loaded(handle.clone()));
ctx.spawn(
async move {
tx.update(|value| *value = Some(Ok(handle))).await;
@ -186,7 +208,7 @@ impl Workspace {
)
.detach();
self.open_entry(entry, ctx)
self.open_entry((worktree_id, path), ctx)
}
fn on_worktree_updated(&mut self, _: ModelHandle<Worktree>, ctx: &mut ModelContext<Self>) {
@ -200,20 +222,20 @@ impl Entity for Workspace {
#[cfg(test)]
pub trait WorkspaceHandle {
fn file_entries(&self, app: &AppContext) -> Vec<(usize, usize)>;
fn file_entries(&self, app: &AppContext) -> Vec<(usize, Arc<Path>)>;
}
#[cfg(test)]
impl WorkspaceHandle for ModelHandle<Workspace> {
fn file_entries(&self, app: &AppContext) -> Vec<(usize, usize)> {
fn file_entries(&self, app: &AppContext) -> Vec<(usize, Arc<Path>)> {
self.read(app)
.worktrees()
.iter()
.flat_map(|tree| {
let tree_id = tree.id();
tree.read(app)
.files()
.map(move |file| (tree_id, file.entry_id))
.files(0)
.map(move |f| (tree_id, f.path().clone()))
})
.collect::<Vec<_>>()
}
@ -237,18 +259,19 @@ mod tests {
}));
let workspace = app.add_model(|ctx| Workspace::new(vec![dir.path().into()], ctx));
app.finish_pending_tasks().await; // Open and populate worktree.
app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
.await;
// Get the first file entry.
let tree = app.read(|ctx| workspace.read(ctx).worktrees.iter().next().unwrap().clone());
let entry_id = app.read(|ctx| tree.read(ctx).files().next().unwrap().entry_id);
let entry = (tree.id(), entry_id);
let path = app.read(|ctx| tree.read(ctx).files(0).next().unwrap().path().clone());
let entry = (tree.id(), path);
// Open the same entry twice before it finishes loading.
let (future_1, future_2) = workspace.update(&mut app, |w, app| {
(
w.open_entry(entry, app).unwrap(),
w.open_entry(entry, app).unwrap(),
w.open_entry(entry.clone(), app).unwrap(),
w.open_entry(entry.clone(), app).unwrap(),
)
});

View file

@ -5,8 +5,12 @@ use gpui::{
color::rgbu, elements::*, json::to_string_pretty, keymap::Binding, AnyViewHandle, AppContext,
ClipboardItem, Entity, ModelHandle, MutableAppContext, View, ViewContext, ViewHandle,
};
use log::{error, info};
use std::{collections::HashSet, path::PathBuf};
use log::error;
use std::{
collections::HashSet,
path::{Path, PathBuf},
sync::Arc,
};
pub fn init(app: &mut MutableAppContext) {
app.add_action("workspace:save", WorkspaceView::save_active_item);
@ -19,7 +23,7 @@ pub fn init(app: &mut MutableAppContext) {
pub trait ItemView: View {
fn title(&self, app: &AppContext) -> String;
fn entry_id(&self, app: &AppContext) -> Option<(usize, usize)>;
fn entry_id(&self, app: &AppContext) -> Option<(usize, Arc<Path>)>;
fn clone_on_split(&self, _: &mut ViewContext<Self>) -> Option<Self>
where
Self: Sized,
@ -42,7 +46,7 @@ pub trait ItemView: View {
pub trait ItemViewHandle: Send + Sync {
fn title(&self, app: &AppContext) -> String;
fn entry_id(&self, app: &AppContext) -> Option<(usize, usize)>;
fn entry_id(&self, app: &AppContext) -> Option<(usize, Arc<Path>)>;
fn boxed_clone(&self) -> Box<dyn ItemViewHandle>;
fn clone_on_split(&self, app: &mut MutableAppContext) -> Option<Box<dyn ItemViewHandle>>;
fn set_parent_pane(&self, pane: &ViewHandle<Pane>, app: &mut MutableAppContext);
@ -57,7 +61,7 @@ impl<T: ItemView> ItemViewHandle for ViewHandle<T> {
self.read(app).title(app)
}
fn entry_id(&self, app: &AppContext) -> Option<(usize, usize)> {
fn entry_id(&self, app: &AppContext) -> Option<(usize, Arc<Path>)> {
self.read(app).entry_id(app)
}
@ -124,7 +128,7 @@ pub struct WorkspaceView {
center: PaneGroup,
panes: Vec<ViewHandle<Pane>>,
active_pane: ViewHandle<Pane>,
loading_entries: HashSet<(usize, usize)>,
loading_entries: HashSet<(usize, Arc<Path>)>,
}
impl WorkspaceView {
@ -189,24 +193,23 @@ impl WorkspaceView {
}
}
pub fn open_entry(&mut self, entry: (usize, usize), ctx: &mut ViewContext<Self>) {
pub fn open_entry(&mut self, entry: (usize, Arc<Path>), ctx: &mut ViewContext<Self>) {
if self.loading_entries.contains(&entry) {
return;
}
if self
.active_pane()
.update(ctx, |pane, ctx| pane.activate_entry(entry, ctx))
.update(ctx, |pane, ctx| pane.activate_entry(entry.clone(), ctx))
{
return;
}
self.loading_entries.insert(entry);
self.loading_entries.insert(entry.clone());
match self
.workspace
.update(ctx, |workspace, ctx| workspace.open_entry(entry, ctx))
{
match self.workspace.update(ctx, |workspace, ctx| {
workspace.open_entry(entry.clone(), ctx)
}) {
Err(error) => error!("{}", error),
Ok(item) => {
let settings = self.settings.clone();
@ -227,19 +230,6 @@ impl WorkspaceView {
}
}
pub fn open_example_entry(&mut self, ctx: &mut ViewContext<Self>) {
if let Some(tree) = self.workspace.read(ctx).worktrees().iter().next() {
if let Some(file) = tree.read(ctx).files().next() {
info!("open_entry ({}, {})", tree.id(), file.entry_id);
self.open_entry((tree.id(), file.entry_id), ctx);
} else {
error!("No example file found for worktree {}", tree.id());
}
} else {
error!("No worktree found while opening example entry");
}
}
pub fn save_active_item(&mut self, _: &(), ctx: &mut ViewContext<Self>) {
self.active_pane.update(ctx, |pane, ctx| {
if let Some(item) = pane.active_item() {
@ -398,80 +388,59 @@ mod tests {
App::test_async((), |mut app| async move {
let dir = temp_tree(json!({
"a": {
"aa": "aa contents",
"ab": "ab contents",
"ac": "ab contents",
"file1": "contents 1",
"file2": "contents 2",
"file3": "contents 3",
},
}));
let settings = settings::channel(&app.font_cache()).unwrap().1;
let workspace = app.add_model(|ctx| Workspace::new(vec![dir.path().into()], ctx));
app.finish_pending_tasks().await; // Open and populate worktree.
app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
.await;
let entries = app.read(|ctx| workspace.file_entries(ctx));
let file1 = entries[0].clone();
let file2 = entries[1].clone();
let file3 = entries[2].clone();
let (_, workspace_view) =
app.add_window(|ctx| WorkspaceView::new(workspace.clone(), settings, ctx));
let pane = app.read(|ctx| workspace_view.read(ctx).active_pane().clone());
// Open the first entry
workspace_view.update(&mut app, |w, ctx| w.open_entry(entries[0], ctx));
app.finish_pending_tasks().await;
app.read(|ctx| {
assert_eq!(
workspace_view
.read(ctx)
.active_pane()
.read(ctx)
.items()
.len(),
1
)
});
workspace_view.update(&mut app, |w, ctx| w.open_entry(file1.clone(), ctx));
pane.condition(&app, |pane, _| pane.items().len() == 1)
.await;
// Open the second entry
workspace_view.update(&mut app, |w, ctx| w.open_entry(entries[1], ctx));
app.finish_pending_tasks().await;
workspace_view.update(&mut app, |w, ctx| w.open_entry(file2.clone(), ctx));
pane.condition(&app, |pane, _| pane.items().len() == 2)
.await;
app.read(|ctx| {
let active_pane = workspace_view.read(ctx).active_pane().read(ctx);
assert_eq!(active_pane.items().len(), 2);
let pane = pane.read(ctx);
assert_eq!(
active_pane.active_item().unwrap().entry_id(ctx),
Some(entries[1])
pane.active_item().unwrap().entry_id(ctx),
Some(file2.clone())
);
});
// Open the first entry again
workspace_view.update(&mut app, |w, ctx| w.open_entry(entries[0], ctx));
app.finish_pending_tasks().await;
workspace_view.update(&mut app, |w, ctx| w.open_entry(file1.clone(), ctx));
pane.condition(&app, move |pane, ctx| {
pane.active_item().unwrap().entry_id(ctx) == Some(file1.clone())
})
.await;
app.read(|ctx| {
let active_pane = workspace_view.read(ctx).active_pane().read(ctx);
assert_eq!(active_pane.items().len(), 2);
assert_eq!(
active_pane.active_item().unwrap().entry_id(ctx),
Some(entries[0])
);
assert_eq!(pane.read(ctx).items().len(), 2);
});
// Open the third entry twice concurrently
workspace_view.update(&mut app, |w, ctx| {
w.open_entry(entries[2], ctx);
w.open_entry(entries[2], ctx);
});
app.finish_pending_tasks().await;
app.read(|ctx| {
assert_eq!(
workspace_view
.read(ctx)
.active_pane()
.read(ctx)
.items()
.len(),
3
);
w.open_entry(file3.clone(), ctx);
w.open_entry(file3.clone(), ctx);
});
pane.condition(&app, |pane, _| pane.items().len() == 3)
.await;
});
}
@ -482,44 +451,45 @@ mod tests {
let dir = temp_tree(json!({
"a": {
"aa": "aa contents",
"ab": "ab contents",
"ac": "ab contents",
"file1": "contents 1",
"file2": "contents 2",
"file3": "contents 3",
},
}));
let settings = settings::channel(&app.font_cache()).unwrap().1;
let workspace = app.add_model(|ctx| Workspace::new(vec![dir.path().into()], ctx));
app.finish_pending_tasks().await; // Open and populate worktree.
app.read(|ctx| workspace.read(ctx).worktree_scans_complete(ctx))
.await;
let entries = app.read(|ctx| workspace.file_entries(ctx));
let file1 = entries[0].clone();
let (window_id, workspace_view) =
app.add_window(|ctx| WorkspaceView::new(workspace.clone(), settings, ctx));
workspace_view.update(&mut app, |w, ctx| w.open_entry(entries[0], ctx));
app.finish_pending_tasks().await;
let pane_1 = app.read(|ctx| workspace_view.read(ctx).active_pane().clone());
workspace_view.update(&mut app, |w, ctx| w.open_entry(file1.clone(), ctx));
{
let file1 = file1.clone();
pane_1
.condition(&app, move |pane, ctx| {
pane.active_item().and_then(|i| i.entry_id(ctx)) == Some(file1.clone())
})
.await;
}
app.dispatch_action(window_id, vec![pane_1.id()], "pane:split_right", ());
app.update(|ctx| {
let pane_2 = workspace_view.read(ctx).active_pane().clone();
assert_ne!(pane_1, pane_2);
assert_eq!(
pane_2
.read(ctx)
.active_item()
.unwrap()
.entry_id(ctx.as_ref()),
Some(entries[0])
);
let pane2_item = pane_2.read(ctx).active_item().unwrap();
assert_eq!(pane2_item.entry_id(ctx.as_ref()), Some(file1.clone()));
ctx.dispatch_action(window_id, vec![pane_2.id()], "pane:close_active_item", ());
let w = workspace_view.read(ctx);
assert_eq!(w.panes.len(), 1);
assert_eq!(w.active_pane(), &pane_1);
let workspace_view = workspace_view.read(ctx);
assert_eq!(workspace_view.panes.len(), 1);
assert_eq!(workspace_view.active_pane(), &pane_1);
});
});
}

1720
zed/src/worktree.rs Normal file

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,6 @@
#[derive(Copy, Clone, Debug)]
use std::iter::FromIterator;
#[derive(Copy, Clone, Debug, Default)]
pub struct CharBag(u64);
impl CharBag {
@ -23,6 +25,22 @@ impl CharBag {
}
}
impl Extend<char> for CharBag {
fn extend<T: IntoIterator<Item = char>>(&mut self, iter: T) {
for c in iter {
self.insert(c);
}
}
}
impl FromIterator<char> for CharBag {
fn from_iter<T: IntoIterator<Item = char>>(iter: T) -> Self {
let mut result = Self::default();
result.extend(iter);
result
}
}
impl From<&str> for CharBag {
fn from(s: &str) -> Self {
let mut bag = Self(0);

View file

@ -1,22 +1,21 @@
use super::{char_bag::CharBag, EntryKind, Snapshot};
use gpui::scoped_pool;
use super::char_bag::CharBag;
use std::{
cmp::{max, min, Ordering, Reverse},
collections::BinaryHeap,
path::Path,
sync::atomic::{self, AtomicBool},
sync::Arc,
};
const BASE_DISTANCE_PENALTY: f64 = 0.6;
const ADDITIONAL_DISTANCE_PENALTY: f64 = 0.05;
const MIN_DISTANCE_PENALTY: f64 = 0.2;
pub struct PathEntry {
pub entry_id: usize,
pub path_chars: CharBag,
pub path: Vec<char>,
pub lowercase_path: Vec<char>,
pub is_ignored: bool,
#[derive(Clone, Debug)]
pub struct MatchCandidate<'a> {
pub path: &'a Arc<Path>,
pub char_bag: CharBag,
}
#[derive(Clone, Debug)]
@ -24,8 +23,7 @@ pub struct PathMatch {
pub score: f64,
pub positions: Vec<usize>,
pub tree_id: usize,
pub entry_id: usize,
pub skipped_prefix_len: usize,
pub path: Arc<Path>,
}
impl PartialEq for PathMatch {
@ -48,29 +46,40 @@ impl Ord for PathMatch {
}
}
pub fn match_paths(
paths_by_tree_id: &[(usize, usize, &[PathEntry])],
pub fn match_paths<'a, T>(
snapshots: T,
query: &str,
include_root_name: bool,
include_ignored: bool,
smart_case: bool,
max_results: usize,
cancel_flag: Arc<AtomicBool>,
pool: scoped_pool::Pool,
) -> Vec<PathMatch> {
) -> Vec<PathMatch>
where
T: Clone + Send + Iterator<Item = &'a Snapshot> + 'a,
{
let lowercase_query = query.to_lowercase().chars().collect::<Vec<_>>();
let query = query.chars().collect::<Vec<_>>();
let lowercase_query = &lowercase_query;
let query = &query;
let query_chars = CharBag::from(&lowercase_query[..]);
let cpus = num_cpus::get();
let path_count = paths_by_tree_id
.iter()
.fold(0, |sum, (_, _, paths)| sum + paths.len());
let path_count: usize = if include_ignored {
snapshots.clone().map(Snapshot::file_count).sum()
} else {
snapshots.clone().map(Snapshot::visible_file_count).sum()
};
let segment_size = (path_count + cpus - 1) / cpus;
let mut segment_results = (0..cpus).map(|_| BinaryHeap::new()).collect::<Vec<_>>();
pool.scoped(|scope| {
for (segment_idx, results) in segment_results.iter_mut().enumerate() {
let trees = snapshots.clone();
let cancel_flag = &cancel_flag;
scope.execute(move || {
let segment_start = segment_idx * segment_size;
let segment_end = segment_start + segment_size;
@ -84,22 +93,38 @@ pub fn match_paths(
let mut best_position_matrix = Vec::new();
let mut tree_start = 0;
for (tree_id, skipped_prefix_len, paths) in paths_by_tree_id {
let tree_end = tree_start + paths.len();
for snapshot in trees {
let tree_end = if include_ignored {
tree_start + snapshot.file_count()
} else {
tree_start + snapshot.visible_file_count()
};
if tree_start < segment_end && segment_start < tree_end {
let start = max(tree_start, segment_start) - tree_start;
let end = min(tree_end, segment_end) - tree_start;
let entries = if include_ignored {
snapshot.files(start).take(end - start)
} else {
snapshot.visible_files(start).take(end - start)
};
let paths = entries.map(|entry| {
if let EntryKind::File(char_bag) = entry.kind {
MatchCandidate {
path: &entry.path,
char_bag,
}
} else {
unreachable!()
}
});
match_single_tree_paths(
*tree_id,
*skipped_prefix_len,
snapshot,
include_root_name,
paths,
start,
end,
query,
lowercase_query,
query_chars,
include_ignored,
smart_case,
results,
max_results,
@ -108,6 +133,7 @@ pub fn match_paths(
&mut last_positions,
&mut score_matrix,
&mut best_position_matrix,
&cancel_flag,
);
}
if tree_end >= segment_end {
@ -129,16 +155,13 @@ pub fn match_paths(
results
}
fn match_single_tree_paths(
tree_id: usize,
skipped_prefix_len: usize,
path_entries: &[PathEntry],
start: usize,
end: usize,
fn match_single_tree_paths<'a>(
snapshot: &Snapshot,
include_root_name: bool,
path_entries: impl Iterator<Item = MatchCandidate<'a>>,
query: &[char],
lowercase_query: &[char],
query_chars: CharBag,
include_ignored: bool,
smart_case: bool,
results: &mut BinaryHeap<Reverse<PathMatch>>,
max_results: usize,
@ -147,39 +170,61 @@ fn match_single_tree_paths(
last_positions: &mut Vec<usize>,
score_matrix: &mut Vec<Option<f64>>,
best_position_matrix: &mut Vec<usize>,
cancel_flag: &AtomicBool,
) {
for i in start..end {
let path_entry = unsafe { &path_entries.get_unchecked(i) };
let mut path_chars = Vec::new();
let mut lowercase_path_chars = Vec::new();
if !include_ignored && path_entry.is_ignored {
let prefix = if include_root_name {
snapshot.root_name()
} else {
""
}
.chars()
.collect::<Vec<_>>();
let lowercase_prefix = prefix
.iter()
.map(|c| c.to_ascii_lowercase())
.collect::<Vec<_>>();
for candidate in path_entries {
if !candidate.char_bag.is_superset(query_chars) {
continue;
}
if !path_entry.path_chars.is_superset(query_chars) {
continue;
if cancel_flag.load(atomic::Ordering::Relaxed) {
break;
}
path_chars.clear();
lowercase_path_chars.clear();
for c in candidate.path.to_string_lossy().chars() {
path_chars.push(c);
lowercase_path_chars.push(c.to_ascii_lowercase());
}
if !find_last_positions(
last_positions,
skipped_prefix_len,
&path_entry.lowercase_path,
&lowercase_prefix,
&lowercase_path_chars,
&lowercase_query[..],
) {
continue;
}
let matrix_len = query.len() * (path_entry.path.len() - skipped_prefix_len);
let matrix_len = query.len() * (path_chars.len() + prefix.len());
score_matrix.clear();
score_matrix.resize(matrix_len, None);
best_position_matrix.clear();
best_position_matrix.resize(matrix_len, skipped_prefix_len);
best_position_matrix.resize(matrix_len, 0);
let score = score_match(
&query[..],
&lowercase_query[..],
&path_entry.path,
&path_entry.lowercase_path,
skipped_prefix_len,
&path_chars,
&lowercase_path_chars,
&prefix,
&lowercase_prefix,
smart_case,
&last_positions,
score_matrix,
@ -190,11 +235,10 @@ fn match_single_tree_paths(
if score > 0.0 {
results.push(Reverse(PathMatch {
tree_id,
entry_id: path_entry.entry_id,
tree_id: snapshot.id,
path: candidate.path.clone(),
score,
positions: match_positions.clone(),
skipped_prefix_len,
}));
if results.len() == max_results {
*min_score = results.peek().unwrap().0.score;
@ -205,18 +249,17 @@ fn match_single_tree_paths(
fn find_last_positions(
last_positions: &mut Vec<usize>,
skipped_prefix_len: usize,
prefix: &[char],
path: &[char],
query: &[char],
) -> bool {
let mut path = path.iter();
let mut prefix_iter = prefix.iter();
for (i, char) in query.iter().enumerate().rev() {
if let Some(j) = path.rposition(|c| c == char) {
if j >= skipped_prefix_len {
last_positions[i] = j;
} else {
return false;
}
last_positions[i] = j + prefix.len();
} else if let Some(j) = prefix_iter.rposition(|c| c == char) {
last_positions[i] = j;
} else {
return false;
}
@ -229,7 +272,8 @@ fn score_match(
query_cased: &[char],
path: &[char],
path_cased: &[char],
skipped_prefix_len: usize,
prefix: &[char],
lowercase_prefix: &[char],
smart_case: bool,
last_positions: &[usize],
score_matrix: &mut [Option<f64>],
@ -242,14 +286,15 @@ fn score_match(
query_cased,
path,
path_cased,
skipped_prefix_len,
prefix,
lowercase_prefix,
smart_case,
last_positions,
score_matrix,
best_position_matrix,
min_score,
0,
skipped_prefix_len,
0,
query.len() as f64,
) * query.len() as f64;
@ -257,10 +302,10 @@ fn score_match(
return 0.0;
}
let path_len = path.len() - skipped_prefix_len;
let path_len = path.len() + prefix.len();
let mut cur_start = 0;
for i in 0..query.len() {
match_positions[i] = best_position_matrix[i * path_len + cur_start] - skipped_prefix_len;
match_positions[i] = best_position_matrix[i * path_len + cur_start];
cur_start = match_positions[i] + 1;
}
@ -272,7 +317,8 @@ fn recursive_score_match(
query_cased: &[char],
path: &[char],
path_cased: &[char],
skipped_prefix_len: usize,
prefix: &[char],
lowercase_prefix: &[char],
smart_case: bool,
last_positions: &[usize],
score_matrix: &mut [Option<f64>],
@ -286,9 +332,9 @@ fn recursive_score_match(
return 1.0;
}
let path_len = path.len() - skipped_prefix_len;
let path_len = prefix.len() + path.len();
if let Some(memoized) = score_matrix[query_idx * path_len + path_idx - skipped_prefix_len] {
if let Some(memoized) = score_matrix[query_idx * path_len + path_idx] {
return memoized;
}
@ -300,7 +346,11 @@ fn recursive_score_match(
let mut last_slash = 0;
for j in path_idx..=limit {
let path_char = path_cased[j];
let path_char = if j < prefix.len() {
lowercase_prefix[j]
} else {
path_cased[j - prefix.len()]
};
let is_path_sep = path_char == '/' || path_char == '\\';
if query_idx == 0 && is_path_sep {
@ -308,10 +358,19 @@ fn recursive_score_match(
}
if query_char == path_char || (is_path_sep && query_char == '_' || query_char == '\\') {
let curr = if j < prefix.len() {
prefix[j]
} else {
path[j - prefix.len()]
};
let mut char_score = 1.0;
if j > path_idx {
let last = path[j - 1];
let curr = path[j];
let last = if j - 1 < prefix.len() {
prefix[j - 1]
} else {
path[j - 1 - prefix.len()]
};
if last == '/' {
char_score = 0.9;
@ -334,15 +393,15 @@ fn recursive_score_match(
// Apply a severe penalty if the case doesn't match.
// This will make the exact matches have higher score than the case-insensitive and the
// path insensitive matches.
if (smart_case || path[j] == '/') && query[query_idx] != path[j] {
if (smart_case || curr == '/') && query[query_idx] != curr {
char_score *= 0.001;
}
let mut multiplier = char_score;
// Scale the score based on how deep within the patch we found the match.
// Scale the score based on how deep within the path we found the match.
if query_idx == 0 {
multiplier /= (path.len() - last_slash) as f64;
multiplier /= ((prefix.len() + path.len()) - last_slash) as f64;
}
let mut next_score = 1.0;
@ -363,7 +422,8 @@ fn recursive_score_match(
query_cased,
path,
path_cased,
skipped_prefix_len,
prefix,
lowercase_prefix,
smart_case,
last_positions,
score_matrix,
@ -386,16 +446,49 @@ fn recursive_score_match(
}
if best_position != 0 {
best_position_matrix[query_idx * path_len + path_idx - skipped_prefix_len] = best_position;
best_position_matrix[query_idx * path_len + path_idx] = best_position;
}
score_matrix[query_idx * path_len + path_idx - skipped_prefix_len] = Some(score);
score_matrix[query_idx * path_len + path_idx] = Some(score);
score
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::PathBuf;
#[test]
fn test_get_last_positions() {
let mut last_positions = vec![0; 2];
let result = find_last_positions(
&mut last_positions,
&['a', 'b', 'c'],
&['b', 'd', 'e', 'f'],
&['d', 'c'],
);
assert_eq!(result, false);
last_positions.resize(2, 0);
let result = find_last_positions(
&mut last_positions,
&['a', 'b', 'c'],
&['b', 'd', 'e', 'f'],
&['c', 'd'],
);
assert_eq!(result, true);
assert_eq!(last_positions, vec![2, 4]);
last_positions.resize(4, 0);
let result = find_last_positions(
&mut last_positions,
&['z', 'e', 'd', '/'],
&['z', 'e', 'd', '/', 'f'],
&['z', '/', 'z', 'f'],
);
assert_eq!(result, true);
assert_eq!(last_positions, vec![0, 3, 4, 8]);
}
#[test]
fn test_match_path_entries() {
@ -447,17 +540,17 @@ mod tests {
let query = query.chars().collect::<Vec<_>>();
let query_chars = CharBag::from(&lowercase_query[..]);
let path_arcs = paths
.iter()
.map(|path| Arc::from(PathBuf::from(path)))
.collect::<Vec<_>>();
let mut path_entries = Vec::new();
for (i, path) in paths.iter().enumerate() {
let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
let path_chars = CharBag::from(&lowercase_path[..]);
let path = path.chars().collect();
path_entries.push(PathEntry {
entry_id: i,
path_chars,
path,
lowercase_path,
is_ignored: false,
let char_bag = CharBag::from(lowercase_path.as_slice());
path_entries.push(MatchCandidate {
char_bag,
path: path_arcs.get(i).unwrap(),
});
}
@ -466,17 +559,22 @@ mod tests {
match_positions.resize(query.len(), 0);
last_positions.resize(query.len(), 0);
let cancel_flag = AtomicBool::new(false);
let mut results = BinaryHeap::new();
match_single_tree_paths(
0,
0,
&path_entries,
0,
path_entries.len(),
&Snapshot {
id: 0,
scan_id: 0,
abs_path: PathBuf::new().into(),
ignores: Default::default(),
entries: Default::default(),
root_name: Default::default(),
},
false,
path_entries.into_iter(),
&query[..],
&lowercase_query[..],
query_chars,
true,
smart_case,
&mut results,
100,
@ -485,12 +583,22 @@ mod tests {
&mut last_positions,
&mut Vec::new(),
&mut Vec::new(),
&cancel_flag,
);
results
.into_iter()
.rev()
.map(|result| (paths[result.0.entry_id].clone(), result.0.positions))
.map(|result| {
(
paths
.iter()
.copied()
.find(|p| result.0.path.as_ref() == Path::new(p))
.unwrap(),
result.0.positions,
)
})
.collect()
}
}

View file

@ -0,0 +1,57 @@
use ignore::gitignore::Gitignore;
use std::{ffi::OsStr, path::Path, sync::Arc};
pub enum IgnoreStack {
None,
Some {
base: Arc<Path>,
ignore: Arc<Gitignore>,
parent: Arc<IgnoreStack>,
},
All,
}
impl IgnoreStack {
pub fn none() -> Arc<Self> {
Arc::new(Self::None)
}
pub fn all() -> Arc<Self> {
Arc::new(Self::All)
}
pub fn is_all(&self) -> bool {
matches!(self, IgnoreStack::All)
}
pub fn append(self: Arc<Self>, base: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
match self.as_ref() {
IgnoreStack::All => self,
_ => Arc::new(Self::Some {
base,
ignore,
parent: self,
}),
}
}
pub fn is_path_ignored(&self, path: &Path, is_dir: bool) -> bool {
if is_dir && path.file_name() == Some(OsStr::new(".git")) {
return true;
}
match self {
Self::None => false,
Self::All => true,
Self::Some {
base,
ignore,
parent: prev,
} => match ignore.matched(path.strip_prefix(base).unwrap(), is_dir) {
ignore::Match::None => prev.is_path_ignored(path, is_dir),
ignore::Match::Ignore(_) => true,
ignore::Match::Whitelist(_) => false,
},
}
}
}

View file

@ -1,5 +0,0 @@
mod char_bag;
mod fuzzy;
mod worktree;
pub use worktree::{match_paths, FileHandle, PathMatch, Worktree, WorktreeHandle};

View file

@ -1,725 +0,0 @@
pub use super::fuzzy::PathMatch;
use super::{
char_bag::CharBag,
fuzzy::{self, PathEntry},
};
use crate::{
editor::{History, Snapshot},
timer,
util::post_inc,
};
use anyhow::{anyhow, Result};
use crossbeam_channel as channel;
use easy_parallel::Parallel;
use gpui::{scoped_pool, AppContext, Entity, ModelContext, ModelHandle, Task};
use ignore::dir::{Ignore, IgnoreBuilder};
use parking_lot::RwLock;
use smol::prelude::*;
use std::{
collections::HashMap,
ffi::{OsStr, OsString},
fmt, fs,
io::{self, Write},
os::unix::fs::MetadataExt,
path::Path,
path::PathBuf,
sync::Arc,
time::Duration,
};
#[derive(Clone)]
pub struct Worktree(Arc<RwLock<WorktreeState>>);
struct WorktreeState {
id: usize,
path: PathBuf,
entries: Vec<Entry>,
file_paths: Vec<PathEntry>,
histories: HashMap<usize, History>,
scanning: bool,
}
struct DirToScan {
id: usize,
path: PathBuf,
relative_path: PathBuf,
ignore: Option<Ignore>,
dirs_to_scan: channel::Sender<io::Result<DirToScan>>,
}
impl Worktree {
pub fn new<T>(id: usize, path: T, ctx: Option<&mut ModelContext<Self>>) -> Self
where
T: Into<PathBuf>,
{
let tree = Self(Arc::new(RwLock::new(WorktreeState {
id,
path: path.into(),
entries: Vec::new(),
file_paths: Vec::new(),
histories: HashMap::new(),
scanning: ctx.is_some(),
})));
if let Some(ctx) = ctx {
tree.0.write().scanning = true;
let tree = tree.clone();
let task = ctx.background_executor().spawn(async move {
tree.scan_dirs()?;
Ok(())
});
ctx.spawn(task, Self::done_scanning).detach();
ctx.spawn_stream(
timer::repeat(Duration::from_millis(100)).map(|_| ()),
Self::scanning,
|_, _| {},
)
.detach();
}
tree
}
fn scan_dirs(&self) -> io::Result<()> {
let path = self.0.read().path.clone();
let metadata = fs::metadata(&path)?;
let ino = metadata.ino();
let is_symlink = fs::symlink_metadata(&path)?.file_type().is_symlink();
let name = path
.file_name()
.map(|name| OsString::from(name))
.unwrap_or(OsString::from("/"));
let relative_path = PathBuf::from(&name);
let mut ignore = IgnoreBuilder::new().build().add_parents(&path).unwrap();
if metadata.is_dir() {
ignore = ignore.add_child(&path).unwrap();
}
let is_ignored = ignore.matched(&path, metadata.is_dir()).is_ignore();
if metadata.file_type().is_dir() {
let is_ignored = is_ignored || name == ".git";
let id = self.push_dir(None, name, ino, is_symlink, is_ignored);
let (tx, rx) = channel::unbounded();
let tx_ = tx.clone();
tx.send(Ok(DirToScan {
id,
path,
relative_path,
ignore: Some(ignore),
dirs_to_scan: tx_,
}))
.unwrap();
drop(tx);
Parallel::<io::Result<()>>::new()
.each(0..16, |_| {
while let Ok(result) = rx.recv() {
self.scan_dir(result?)?;
}
Ok(())
})
.run()
.into_iter()
.collect::<io::Result<()>>()?;
} else {
self.push_file(None, name, ino, is_symlink, is_ignored, relative_path);
}
Ok(())
}
fn scan_dir(&self, to_scan: DirToScan) -> io::Result<()> {
let mut new_children = Vec::new();
for child_entry in fs::read_dir(&to_scan.path)? {
let child_entry = child_entry?;
let name = child_entry.file_name();
let relative_path = to_scan.relative_path.join(&name);
let metadata = child_entry.metadata()?;
let ino = metadata.ino();
let is_symlink = metadata.file_type().is_symlink();
if metadata.is_dir() {
let path = to_scan.path.join(&name);
let mut is_ignored = true;
let mut ignore = None;
if let Some(parent_ignore) = to_scan.ignore.as_ref() {
let child_ignore = parent_ignore.add_child(&path).unwrap();
is_ignored = child_ignore.matched(&path, true).is_ignore() || name == ".git";
if !is_ignored {
ignore = Some(child_ignore);
}
}
let id = self.push_dir(Some(to_scan.id), name, ino, is_symlink, is_ignored);
new_children.push(id);
let dirs_to_scan = to_scan.dirs_to_scan.clone();
let _ = to_scan.dirs_to_scan.send(Ok(DirToScan {
id,
path,
relative_path,
ignore,
dirs_to_scan,
}));
} else {
let is_ignored = to_scan.ignore.as_ref().map_or(true, |i| {
i.matched(to_scan.path.join(&name), false).is_ignore()
});
new_children.push(self.push_file(
Some(to_scan.id),
name,
ino,
is_symlink,
is_ignored,
relative_path,
));
};
}
if let Entry::Dir { children, .. } = &mut self.0.write().entries[to_scan.id] {
*children = new_children.clone();
}
Ok(())
}
fn push_dir(
&self,
parent: Option<usize>,
name: OsString,
ino: u64,
is_symlink: bool,
is_ignored: bool,
) -> usize {
let entries = &mut self.0.write().entries;
let dir_id = entries.len();
entries.push(Entry::Dir {
parent,
name,
ino,
is_symlink,
is_ignored,
children: Vec::new(),
});
dir_id
}
fn push_file(
&self,
parent: Option<usize>,
name: OsString,
ino: u64,
is_symlink: bool,
is_ignored: bool,
path: PathBuf,
) -> usize {
let path = path.to_string_lossy();
let lowercase_path = path.to_lowercase().chars().collect::<Vec<_>>();
let path = path.chars().collect::<Vec<_>>();
let path_chars = CharBag::from(&path[..]);
let mut state = self.0.write();
let entry_id = state.entries.len();
state.entries.push(Entry::File {
parent,
name,
ino,
is_symlink,
is_ignored,
});
state.file_paths.push(PathEntry {
entry_id,
path_chars,
path,
lowercase_path,
is_ignored,
});
entry_id
}
pub fn entry_path(&self, mut entry_id: usize) -> Result<PathBuf> {
let state = self.0.read();
if entry_id >= state.entries.len() {
return Err(anyhow!("Entry does not exist in tree"));
}
let mut entries = Vec::new();
loop {
let entry = &state.entries[entry_id];
entries.push(entry);
if let Some(parent_id) = entry.parent() {
entry_id = parent_id;
} else {
break;
}
}
let mut path = PathBuf::new();
for entry in entries.into_iter().rev() {
path.push(entry.name());
}
Ok(path)
}
pub fn abs_entry_path(&self, entry_id: usize) -> Result<PathBuf> {
let mut path = self.0.read().path.clone();
path.pop();
Ok(path.join(self.entry_path(entry_id)?))
}
fn fmt_entry(&self, f: &mut fmt::Formatter<'_>, entry_id: usize, indent: usize) -> fmt::Result {
match &self.0.read().entries[entry_id] {
Entry::Dir { name, children, .. } => {
write!(
f,
"{}{}/ ({})\n",
" ".repeat(indent),
name.to_string_lossy(),
entry_id
)?;
for child_id in children.iter() {
self.fmt_entry(f, *child_id, indent + 2)?;
}
Ok(())
}
Entry::File { name, .. } => write!(
f,
"{}{} ({})\n",
" ".repeat(indent),
name.to_string_lossy(),
entry_id
),
}
}
pub fn path(&self) -> PathBuf {
PathBuf::from(&self.0.read().path)
}
pub fn contains_path(&self, path: &Path) -> bool {
path.starts_with(self.path())
}
pub fn iter(&self) -> Iter {
Iter {
tree: self.clone(),
stack: Vec::new(),
started: false,
}
}
pub fn files(&self) -> FilesIter {
FilesIter {
iter: self.iter(),
path: PathBuf::new(),
}
}
pub fn entry_count(&self) -> usize {
self.0.read().entries.len()
}
pub fn file_count(&self) -> usize {
self.0.read().file_paths.len()
}
pub fn load_history(&self, entry_id: usize) -> impl Future<Output = Result<History>> {
let tree = self.clone();
async move {
if let Some(history) = tree.0.read().histories.get(&entry_id) {
return Ok(history.clone());
}
let path = tree.abs_entry_path(entry_id)?;
let mut file = smol::fs::File::open(&path).await?;
let mut base_text = String::new();
file.read_to_string(&mut base_text).await?;
let history = History::new(Arc::from(base_text));
tree.0.write().histories.insert(entry_id, history.clone());
Ok(history)
}
}
pub fn save<'a>(
&self,
entry_id: usize,
content: Snapshot,
ctx: &AppContext,
) -> Task<Result<()>> {
let path = self.abs_entry_path(entry_id);
ctx.background_executor().spawn(async move {
let buffer_size = content.text_summary().bytes.min(10 * 1024);
let file = std::fs::File::create(&path?)?;
let mut writer = std::io::BufWriter::with_capacity(buffer_size, file);
for chunk in content.fragments() {
writer.write(chunk.as_bytes())?;
}
writer.flush()?;
Ok(())
})
}
fn scanning(&mut self, _: (), ctx: &mut ModelContext<Self>) {
if self.0.read().scanning {
ctx.notify();
} else {
ctx.halt_stream();
}
}
fn done_scanning(&mut self, result: io::Result<()>, ctx: &mut ModelContext<Self>) {
log::info!("done scanning");
self.0.write().scanning = false;
if let Err(error) = result {
log::error!("error populating worktree: {}", error);
} else {
ctx.notify();
}
}
}
impl fmt::Debug for Worktree {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.entry_count() == 0 {
write!(f, "Empty tree\n")
} else {
self.fmt_entry(f, 0, 0)
}
}
}
impl Entity for Worktree {
type Event = ();
}
pub trait WorktreeHandle {
fn file(&self, entry_id: usize, app: &AppContext) -> Result<FileHandle>;
}
impl WorktreeHandle for ModelHandle<Worktree> {
fn file(&self, entry_id: usize, app: &AppContext) -> Result<FileHandle> {
if entry_id >= self.read(app).entry_count() {
return Err(anyhow!("Entry does not exist in tree"));
}
Ok(FileHandle {
worktree: self.clone(),
entry_id,
})
}
}
#[derive(Clone, Debug)]
pub enum Entry {
Dir {
parent: Option<usize>,
name: OsString,
ino: u64,
is_symlink: bool,
is_ignored: bool,
children: Vec<usize>,
},
File {
parent: Option<usize>,
name: OsString,
ino: u64,
is_symlink: bool,
is_ignored: bool,
},
}
impl Entry {
fn parent(&self) -> Option<usize> {
match self {
Entry::Dir { parent, .. } | Entry::File { parent, .. } => *parent,
}
}
fn name(&self) -> &OsStr {
match self {
Entry::Dir { name, .. } | Entry::File { name, .. } => name,
}
}
}
#[derive(Clone)]
pub struct FileHandle {
worktree: ModelHandle<Worktree>,
entry_id: usize,
}
impl FileHandle {
pub fn path(&self, app: &AppContext) -> PathBuf {
self.worktree.read(app).entry_path(self.entry_id).unwrap()
}
pub fn load_history(&self, app: &AppContext) -> impl Future<Output = Result<History>> {
self.worktree.read(app).load_history(self.entry_id)
}
pub fn save<'a>(&self, content: Snapshot, ctx: &AppContext) -> Task<Result<()>> {
let worktree = self.worktree.read(ctx);
worktree.save(self.entry_id, content, ctx)
}
pub fn entry_id(&self) -> (usize, usize) {
(self.worktree.id(), self.entry_id)
}
}
struct IterStackEntry {
entry_id: usize,
child_idx: usize,
}
pub struct Iter {
tree: Worktree,
stack: Vec<IterStackEntry>,
started: bool,
}
impl Iterator for Iter {
type Item = Traversal;
fn next(&mut self) -> Option<Self::Item> {
let state = self.tree.0.read();
if !self.started {
self.started = true;
return if let Some(entry) = state.entries.first().cloned() {
self.stack.push(IterStackEntry {
entry_id: 0,
child_idx: 0,
});
Some(Traversal::Push { entry_id: 0, entry })
} else {
None
};
}
while let Some(parent) = self.stack.last_mut() {
if let Entry::Dir { children, .. } = &state.entries[parent.entry_id] {
if parent.child_idx < children.len() {
let child_id = children[post_inc(&mut parent.child_idx)];
self.stack.push(IterStackEntry {
entry_id: child_id,
child_idx: 0,
});
return Some(Traversal::Push {
entry_id: child_id,
entry: state.entries[child_id].clone(),
});
} else {
self.stack.pop();
return Some(Traversal::Pop);
}
} else {
self.stack.pop();
return Some(Traversal::Pop);
}
}
None
}
}
#[derive(Debug)]
pub enum Traversal {
Push { entry_id: usize, entry: Entry },
Pop,
}
pub struct FilesIter {
iter: Iter,
path: PathBuf,
}
pub struct FilesIterItem {
pub entry_id: usize,
pub path: PathBuf,
}
impl Iterator for FilesIter {
type Item = FilesIterItem;
fn next(&mut self) -> Option<Self::Item> {
loop {
match self.iter.next() {
Some(Traversal::Push {
entry_id, entry, ..
}) => match entry {
Entry::Dir { name, .. } => {
self.path.push(name);
}
Entry::File { name, .. } => {
self.path.push(name);
return Some(FilesIterItem {
entry_id,
path: self.path.clone(),
});
}
},
Some(Traversal::Pop) => {
self.path.pop();
}
None => {
return None;
}
}
}
}
}
trait UnwrapIgnoreTuple {
fn unwrap(self) -> Ignore;
}
impl UnwrapIgnoreTuple for (Ignore, Option<ignore::Error>) {
fn unwrap(self) -> Ignore {
if let Some(error) = self.1 {
log::error!("error loading gitignore data: {}", error);
}
self.0
}
}
pub fn match_paths(
trees: &[Worktree],
query: &str,
include_ignored: bool,
smart_case: bool,
max_results: usize,
pool: scoped_pool::Pool,
) -> Vec<PathMatch> {
let tree_states = trees.iter().map(|tree| tree.0.read()).collect::<Vec<_>>();
fuzzy::match_paths(
&tree_states
.iter()
.map(|tree| {
let skip_prefix = if trees.len() == 1 {
if let Some(Entry::Dir { name, .. }) = tree.entries.get(0) {
let name = name.to_string_lossy();
if name == "/" {
1
} else {
name.chars().count() + 1
}
} else {
0
}
} else {
0
};
(tree.id, skip_prefix, &tree.file_paths[..])
})
.collect::<Vec<_>>()[..],
query,
include_ignored,
smart_case,
max_results,
pool,
)
}
#[cfg(test)]
mod test {
use super::*;
use crate::editor::Buffer;
use crate::test::*;
use anyhow::Result;
use gpui::App;
use serde_json::json;
use std::os::unix;
#[test]
fn test_populate_and_search() {
App::test_async((), |mut app| async move {
let dir = temp_tree(json!({
"root": {
"apple": "",
"banana": {
"carrot": {
"date": "",
"endive": "",
}
},
"fennel": {
"grape": "",
}
}
}));
let root_link_path = dir.path().join("root_link");
unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
let tree = app.add_model(|ctx| Worktree::new(1, root_link_path, Some(ctx)));
app.finish_pending_tasks().await;
app.read(|ctx| {
let tree = tree.read(ctx);
assert_eq!(tree.file_count(), 4);
let results = match_paths(&[tree.clone()], "bna", false, false, 10, ctx.scoped_pool().clone())
.iter()
.map(|result| tree.entry_path(result.entry_id))
.collect::<Result<Vec<PathBuf>, _>>()
.unwrap();
assert_eq!(
results,
vec![
PathBuf::from("root_link/banana/carrot/date"),
PathBuf::from("root_link/banana/carrot/endive"),
]
);
})
});
}
#[test]
fn test_save_file() {
App::test_async((), |mut app| async move {
let dir = temp_tree(json!({
"file1": "the old contents",
}));
let tree = app.add_model(|ctx| Worktree::new(1, dir.path(), Some(ctx)));
app.finish_pending_tasks().await;
let buffer = Buffer::new(1, "a line of text.\n".repeat(10 * 1024));
let entry = app.read(|ctx| {
let entry = tree.read(ctx).files().next().unwrap();
assert_eq!(entry.path.file_name().unwrap(), "file1");
entry
});
let file_id = entry.entry_id;
tree.update(&mut app, |tree, ctx| {
smol::block_on(tree.save(file_id, buffer.snapshot(), ctx.as_ref())).unwrap()
});
let history = app
.read(|ctx| tree.read(ctx).load_history(file_id))
.await
.unwrap();
assert_eq!(history.base_text.as_ref(), buffer.text());
});
}
}