tidyup (#2)
Some checks are pending
rust-clippy analyze / Run rust-clippy analyzing (push) Waiting to run
Deploy mdBook site to Pages / build (srclang, x86_64-unknown-linux-gnu, stable) (push) Waiting to run
Deploy mdBook site to Pages / deploy (push) Blocked by required conditions
Rust / build (srclang, x86_64-unknown-linux-gnu, nightly) (push) Waiting to run
Rust / build (srclang, x86_64-unknown-linux-gnu, stable) (push) Waiting to run

Reviewed-on: #2
This commit is contained in:
sevki 2024-07-15 02:46:37 +00:00
parent 9f48827694
commit b90083f9ef
111 changed files with 117186 additions and 8097 deletions

View file

@ -4,5 +4,12 @@ default = "oksoftware"
[registries.oksoftware]
index = "https://ok.software/ok/_cargo-index.git" # Git
[registries.crates-io]
index = "https://github.com/rust-lang/crates.io-index"
[net]
git-fetch-with-cli = true
git-fetch-with-cli = true
[build]
rustflags = "--cfg=web_sys_unstable_apis"

84
.devcontainer/Dockerfile Normal file
View file

@ -0,0 +1,84 @@
ARG VARIANT "bookworm-20240701"
# FROM debian:${VARIANT}
FROM debian:bookworm-20240701
RUN apt-get update && apt-get install -y \
sudo \
libssl-dev \
lsb-release \
musl-dev \
make \
cmake \
git \
qemu-kvm \
perl \
gzip \
ca-certificates \
clang \
cloud-image-utils \
curl \
dpkg-dev \
expect \
g++ \
gcc \
git \
jq \
libavcodec-dev \
libc++abi1-14 \
libavutil-dev \
libcap-dev \
libclang-dev \
libdbus-1-dev \
libdrm-dev \
libepoxy-dev \
libglib2.0-dev \
libguestfs-tools \
libslirp-dev \
libssl-dev \
libswscale-dev \
libva-dev \
libunwind-dev \
libunwind-14 \
libwayland-dev \
libxext-dev \
lld \
make \
meson \
mypy \
nasm \
npm \
ncat \
ninja-build \
openssh-client \
pipx \
pkg-config \
protobuf-compiler \
python3 \
python3-argh \
python3-pip \
python3-rich \
qemu-system-x86 \
rsync \
screen \
strace \
tmux \
wayland-protocols \
wget \
&& rm -rf /var/lib/apt/lists/*
# Install Rust
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- --default-toolchain stable -y
ENV PATH="/root/.cargo/bin:${PATH}"
ENV CARGO_HOME=/root/.cargo
# install wasm32-unknown-unknown
RUN rustup target add wasm32-unknown-unknown
# update rust
RUN rustup update
RUN curl https://raw.githubusercontent.com/SeaQL/FireDBG.for.Rust/main/install.sh -sSf | sh
WORKDIR /workspaces

View file

@ -0,0 +1,32 @@
{
"name": "ok.dev",
"build": {
"dockerfile": "Dockerfile",
"args": {
"VARIANT": "bookworm-20240701"
}
},
"runArgs": [
"--device=/dev/kvm",
"--group-add=kvm",
"--pids-limit=4096"
],
// "remoteUser": "root",
"workspaceMount": "source=${localWorkspaceFolder},target=/workspaces/${localWorkspaceFolderBasename},type=bind,consistency=cached",
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
"updateRemoteUserUID": true,
"customizations": {
"vscode": {
"extensions": [
"rust-lang.rust-analyzer",
"tamasfe.even-better-toml",
"esbenp.prettier-vscode",
"ms-python.vscode-pylance",
"foxundermoon.shell-format",
"timonwong.shellcheck",
"GitHub.copilot",
"SeaQL.firedbg-rust"
]
}
}
}

View file

@ -21,20 +21,16 @@ on:
jobs:
rust-clippy-analyze:
name: Run rust-clippy analyzing
runs-on: self-hosted
runs-on: ubuntu-latest
container: ghcr.io/sevki/devcontainer:main
permissions:
contents: read
security-events: write
actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
steps:
- name: Modify hosts file
run: |
echo "66.241.125.220 ok.software" >> /etc/hosts
- name: Checkout code
uses: actions/checkout@v2
- name: Install Rust toolchain
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af #@v1
with:
@ -42,10 +38,8 @@ jobs:
toolchain: stable
components: clippy
override: true
- name: Install required cargo
run: cargo install clippy-sarif sarif-fmt
- name: Run rust-clippy
run:
cargo clippy
@ -57,4 +51,4 @@ jobs:
uses: github/codeql-action/upload-sarif@v2
with:
sarif_file: rust-clippy-results.sarif
wait-for-processing: true
wait-for-processing: true

View file

@ -8,6 +8,8 @@ on:
# Runs on pushes targeting the default branch
push:
branches: ["main"]
pull_request:
branches: ["main"]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
@ -28,29 +30,41 @@ jobs:
# Build job
build:
runs-on: ubuntu-latest
container: rust:latest
container: ghcr.io/sevki/devcontainer:main
strategy:
matrix:
targets:
- x86_64-unknown-linux-gnu
packages:
- srclang
toolchains:
- stable
env:
MDBOOK_VERSION: 0.4.40
steps:
- name: Modify hosts file
run: |
echo "66.241.125.220 ok.software" >> /etc/hosts
- uses: actions/checkout@v4
- name: rustup update
run: |
rustup toolchain install ${{ matrix.toolchains }}
rustup component add cargo
rustup component add clippy
rustup component add rust-src
rustup target add ${{ matrix.targets }}
rustup update
- name: Install mdBook
run: |
curl --proto '=https' --tlsv1.2 https://sh.rustup.rs -sSf -y | sh
rustup update
cargo install --version ${MDBOOK_VERSION} mdbook
- name: Setup Pages
id: pages
uses: actions/configure-pages@v5
- name: install deps
run: |
cargo install mdbook-svgbob@0.2.0
cargo install mdbook-alerts
- name: Build with mdBook
run: mdbook build
- name: Build the ide
run: |
cd packages/app
npm install
npm run build
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
@ -62,6 +76,7 @@ jobs:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/main'
needs: build
steps:
- name: Deploy to GitHub Pages

View file

@ -1,5 +1,4 @@
name: Rust
on:
push:
branches: [ "main" ]
@ -8,21 +7,35 @@ on:
env:
CARGO_TERM_COLOR: always
jobs:
build:
env:
MDBOOK_VERSION: 0.4.40
runs-on: ubuntu-latest
container: ghcr.io/sevki/devcontainer:main
container: rust:latest
strategy:
matrix:
targets:
- x86_64-unknown-linux-gnu
packages:
- srclang
toolchains:
- stable
- nightly
steps:
- name: Modify hosts file
run: |
echo "66.241.125.220 ok.software" >> /etc/hosts
- uses: actions/checkout@v4
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose
- name: rustup update
run: |
rustup toolchain install ${{ matrix.toolchains }}
rustup component add cargo
rustup component add clippy
rustup component add rust-src
rustup target add ${{ matrix.targets }}
rustup update
- name: Build ${{ matrix.packages }} for ${{ matrix.targets }}
run: cargo build --verbose --target ${{ matrix.targets }} -p ${{ matrix.packages }}
- name: Test ${{ matrix.packages }} for ${{ matrix.targets }}
run: cargo test --verbose --target ${{ matrix.targets }} -p ${{ matrix.packages }}

5
.gitignore vendored
View file

@ -1,2 +1,7 @@
book
target
node_modules
packages/app/assets/wasm/src_lsp_browser.d.ts
packages/app/assets/wasm/src_lsp_browser.js
packages/app/assets/wasm/src_lsp_browser_bg.wasm
packages/app/assets/wasm/src_lsp_browser_bg.wasm.d.ts

17
.rust-toolchain.toml Normal file
View file

@ -0,0 +1,17 @@
[toolchain]
channel = "1.79"
components = [
"rust-src",
"rustc-dev",
"llvm-tools-preview",
"clippy",
"rustfmt",
"miri",
"cargo",
]
targets = [
"wasm32-unknown-unknown",
"x86_64-unknown-linux-musl",
"x86_64-unknown-linux-gnu",
]

1232
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -3,6 +3,15 @@ name = "srclang"
version = "0.1.0"
edition = "2021"
[workspace]
members = [
"crates/src-collections",
"crates/src-derive",
"crates/src-derive-test",
"crates/src-lsp-browser",
"crates/src-lsp-server",
]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = ["lalrpop"]
@ -13,18 +22,20 @@ anyhow = "1.0.45"
phf_codegen = "0.10"
tiny-keccak = { version = "2", features = ["sha3"] }
[dependencies]
salsa = { version = "0.1.0", registry = "oksoftware", package = "salsa-2022" }
salsa-macros = { version = "0.1.0", registry = "oksoftware" , package = "salsa-2022-macros" }
insta = "1.38.0"
lalrpop = "0.20.2"
getrandom = { version = "0.2", features = ["js"] }
salsa = { version = "0.1.0", registry = "oksoftware", package = "salsa-2022" }
salsa-macros = { version = "0.1.0", registry = "oksoftware", package = "salsa-2022-macros" }
lalrpop-util = { version = "0.20.2", features = ["lexer", "unicode"] }
okstd = { version = "0.1.3", features = [], default-features = false, registry = "oksoftware" }
proptest = "1.4.0"
stringzilla = "3.8.1"
okstd = { features = [
"macros",
], default-features = false, registry = "oksoftware", version = "0.1.9" }
syn = "2.0.60"
bitflags = "2.5.0"
ropey = { version = "1.6.1", features = ["small_chunks"] }
hashbrown = "0.14.5"
src-derive = { version = "0.1.0", path = "crates/src-derive", registry = "oksoftware" }
paste = "1.0.15"
[dev-dependencies]
insta = "1.38.0"

47
Makefile.toml Normal file
View file

@ -0,0 +1,47 @@
[config]
default_to_workspace = false
skip_core_tasks = true
[tasks.deps]
script = '''
cargo install wasm-bindgen-cli --version 0.2.81 --registry crates-io
npm install
'''
[tasks.build-server]
script = '''
cargo build --release
wasm-bindgen --out-dir ./packages/app/assets/wasm --target web --typescript ./target/wasm32-unknown-unknown/release/demo_lsp_browser.wasm
'''
[tasks.build-app]
script = '''
npm run build --workspace=packages/app
'''
[tasks.build]
dependencies = ["build-server", "build-app"]
[tasks.clean-server]
script = '''
cargo clean
'''
[tasks.clean-app]
script = '''
rm -rf packages/app/dist
rm -rf packages/app/assets/wasm
'''
[tasks.clean]
dependencies = ["clean-server", "clean-app"]
[tasks.format]
script = '''
cargo +nightly fmt --all
'''
[tasks.run]
script = '''
npm run app --workspace=packages/app
'''

View file

@ -6,12 +6,15 @@ src = "docs"
# additional css https://raw.githubusercontent.com/oknotokcomputer/okcss/main/ok.css
[output.html]
additional-css = ["ok.css"]
additional-css = ["ok.css", "skill-tree.css"]
theme = "docs/theme"
default-theme = "dark"
git-repository-url = "https://ok.software/ok/src"
preferred-dark-theme = "rust"
additional-js =["viz.js", "full.render.js", "panzoom.min.js", "skill-tree.js"]
[preprocessor.svgbob]
# [preprocessor.svgbob]
[preprocessor.alerts]
[preprocessor.skill-tree]
command = "mdbook-skill-tree"

View file

@ -0,0 +1,10 @@
[package]
name = "src-collections"
version = "0.1.0"
edition = "2021"
publish = ["oksoftware"]
[dependencies]
rustc-hash = "1.1.0"
indexmap = "1.9.1"
typed-index-collections = "3.0.3"

View file

@ -0,0 +1,10 @@
use rustc_hash::FxHasher;
use std::hash::BuildHasherDefault;
pub use rustc_hash::FxHashMap as Map;
pub use rustc_hash::FxHashSet as Set;
pub type IndexMap<K, V> = indexmap::IndexMap<K, V, BuildHasherDefault<FxHasher>>;
pub type IndexSet<V> = indexmap::IndexSet<V, BuildHasherDefault<FxHasher>>;
pub type IndexVec<K, V> = typed_index_collections::TiVec<K, V>;

View file

@ -0,0 +1,15 @@
[package]
name = "src-derive-test"
version = "0.1.0"
edition = "2021"
publish = ["oksoftware"]
[dependencies]
okstd = { version = "0.1.9", registry = "oksoftware", features = ["macros"] }
src-derive = { version = "0.1.0", path = "../src-derive", registry = "oksoftware" }
srclang = { version = "0.1.0", path = "../..", registry = "oksoftware" }
# [[bin]]
# name = "src-derive-test"
# path = "expanded.rs"

View file

@ -0,0 +1,119 @@
use src_derive::node;
use srclang::lexer::Location;
use srclang::ops;
use srclang::parser::span::Spanned;
use srclang::span;
use std::fmt::Display;
use std::ops::Range;
#[node]
struct Ident {
name: String,
generics: Vec<Ident>,
}
#[node]
struct Field {
vis: Option<Visibility>,
name: String,
ty: Ident,
}
pub enum Literal {
Bool(bool),
Float(f64),
Integer(i64),
String(String),
}
#[derive(Debug)]
pub enum Visibility {
Private,
Public,
}
impl Display for Visibility {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Visibility::Public => write!(f, "pub"),
Visibility::Private => write!(f, "priv"),
}
}
}
pub enum Operator {
Add,
Sub,
Mul,
Div,
Mod,
And,
Or,
Not,
Eq,
Ne,
Lt,
Le,
Gt,
Ge,
}
struct PrettyPrinter;
impl FieldVisitor for PrettyPrinter {
fn visit_vis(&self, vis: &Visibility, range: &Range<Location>) -> ops::traversal::Result {
print!("{} ", vis);
ops::traversal::Result::Continue
}
fn visit_name(&self, name: &String, range: &Range<Location>) -> ops::traversal::Result {
print!("{} :", name);
ops::traversal::Result::Continue
}
fn visit_ty(&self, ty: &Ident, range: &Range<Location>) -> ops::traversal::Result {
ty.accept(self);
ops::traversal::Result::Continue
}
}
impl IdentVisitor for PrettyPrinter {
fn visit_name(&self, name: &String, range: &Range<Location>) -> ops::traversal::Result {
print!("{}", name);
ops::traversal::Result::Continue
}
fn visit_generics(&self, generic: &Ident, range: &Range<Location>) -> ops::traversal::Result {
print!("<");
generic.accept(self);
print!(">");
ops::traversal::Result::Continue
}
}
fn main() {
let b = Ident::new(
span!(Location::default(), "b".to_string(), Location::default()),
vec![span!(
Location::default(),
Ident::new(
span!(Location::default(), "c".to_string(), Location::default()),
vec![]
),
Location::default()
)],
);
let a = Field::new(
Some(span!(
Location::default(),
Visibility::Public,
Location::default()
)),
span!(Location::default(), "a".to_string(), Location::default()),
span!(Location::default(), b, Location::default()),
);
let pretty_printer = PrettyPrinter;
a.accept(&pretty_printer);
println!("");
}

View file

@ -0,0 +1,20 @@
[package]
name = "src-derive"
version = "0.1.0"
edition = "2021"
publish = ["oksoftware"]
[lib]
proc-macro = true
[dependencies]
quote = "1.0"
proc-macro2 = "1.0"
syn = { version = "1.0", features = ["full"] }
okstd = { version = "0.1.9", registry = "oksoftware", features = ["macros"] }
[dev-dependencies]
insta = "1.39.0"
pretty_assertions = "1.4.0"

View file

@ -0,0 +1,13 @@
use proc_macro::TokenStream;
mod node;
mod walker;
#[proc_macro_attribute]
pub fn node(_attr: TokenStream, item: TokenStream) -> TokenStream {
TokenStream::from(node::define_nodes(_attr, item))
}
#[proc_macro_attribute]
pub fn walker(_attr: TokenStream, item: TokenStream) -> TokenStream {
TokenStream::from(walker::generate_walker_impl(item))
}

View file

@ -0,0 +1,346 @@
use proc_macro::TokenStream;
use quote::{format_ident, quote, ToTokens};
use syn::{
parse_macro_input, Data, DeriveInput, Fields, GenericArgument, Ident, PathArguments, Type,
TypePath,
};
pub fn define_nodes(_attr: TokenStream, item: TokenStream) -> TokenStream {
let input = parse_macro_input!(item as DeriveInput);
let struct_name = &input.ident;
let fields = match &input.data {
Data::Struct(data) => &data.fields,
_ => {
return quote! {
compile_error!("define_nodes can only be used on structs");
}
.into()
}
};
let spanned_fields = fields.iter().map(|field| {
let field_name = &field.ident;
let field_type = &field.ty;
match field_type {
Type::Path(path) => {
let expanded_type = wrap_path_in_spanned(path);
quote! {
#field_name: #expanded_type,
}
}
_ => {
panic!(
"Only named fields are supported which {} is not",
field_type.into_token_stream()
);
}
}
});
let expanded_impl = fields.iter().map(|field| {
let field_name = &field.ident;
let field_span_getter = format_ident!("{}_span", field_name.as_ref().unwrap());
let field_node_getter = format_ident!("{}_node", field_name.as_ref().unwrap());
let field_type = &field.ty;
match field_type {
Type::Path(path) => {
let expanded_type = wrap_path_in_spanned(path);
let expanded_range_type = wrap_range_location(path);
quote! {
fn #field_node_getter(&self) -> &#expanded_type {
todo!()
}
fn #field_span_getter(&self) -> &#expanded_range_type {
todo!()
}
}
}
_ => {
panic!(
"Only named fields are supported which {} is not",
field_type.into_token_stream()
);
}
}
});
let visitor_name = format_ident!("{}Visitor", struct_name);
let visitor_trait_stub = fields.iter().map(|field| {
let field_name = &field.ident;
let field_visit = format_ident!("visit_{}", field_name.as_ref().unwrap());
let field_type = &field.ty;
match field_type {
Type::Path(path) => {
let unwrapped_type = unwrap_path(path);
quote! {
fn #field_visit(&self, node: &#unwrapped_type, span: &Range<Location>) -> ops::traversal::Result;
}
}
_ => {
panic!(
"Only named fields are supported which {} is not",
field_type.into_token_stream()
);
}
}
});
let accept_impl = fields.iter().map(|field| {
let field_name = &field.ident;
let field_type = &field.ty;
match field_type {
Type::Path(path) => {
let visit_fn = format_ident!("visit_{}", field_name.as_ref().unwrap());
match path.path.segments.last() {
Some(syn::PathSegment { ident, arguments }) => match arguments {
PathArguments::None => {
quote! {
if let cont = visitor.#visit_fn(
&self.#field_name.1,
&(self.#field_name.0..self.#field_name.2)
) {
return;
}
}
}
PathArguments::AngleBracketed(args) => match args.args.first() {
Some(GenericArgument::Type(_)) => match ident.to_string().as_str() {
"Option" => {
quote! {
if let Some(inner) = &self.#field_name {
if let cont = visitor.#visit_fn(
&inner.1,
&(inner.0..inner.2)
) {
return;
}
}
}
}
"Vec" => {
quote! {
for inner in self.#field_name.iter() {
if let cont = visitor.#visit_fn(
&inner.1,
&(inner.0..inner.2)
) {
return;
}
}
}
}
_ => {
quote! {
if let cont =visitor.#visit_fn(
&self.#field_name.1,
&(self.#field_name.0..self.#field_name.2)
) {
return;
}
}
}
},
_ => {
quote! {
if let cont = visitor.#visit_fn(
self.1,
self.0..self.2
) {
return;
}
}
}
},
PathArguments::Parenthesized(_) => todo!(),
},
None => {
quote! {
compile_error!("No path segments found");
}
}
}
}
_ => {
panic!(
"Only named fields are supported which {} is not",
field_type.into_token_stream()
);
}
}
});
let field_names = fields.iter().map(|field| {
let field_name = &field.ident;
quote! {
#field_name
}
});
let not_spanned_fileds = spanned_fields.clone();
let field_types = fields.iter().map(|field| {
let field_type = &field.ty;
match field_type {
Type::Path(path) => {
let expanded_type = wrap_path_in_spanned(path);
quote! {
#expanded_type
}
}
_ => {
panic!(
"Only named fields are supported which {} is not",
field_type.into_token_stream()
);
}
}
});
let field_types_clone = field_types.clone();
let struct_name_lower = format_ident!("{}", struct_name.to_string().to_lowercase());
let field_ids = fields.iter().enumerate().map(|field| {
let field_name = syn::Index::from(field.0);
quote! {
#struct_name_lower.#field_name
}
});
let vis = &input.vis;
let expanded = quote! {
#[derive(Debug)]
#vis struct #struct_name {
#(#spanned_fields)*
}
#vis trait #visitor_name {
#(#visitor_trait_stub)*
}
impl From<
(#(#field_types),*)
> for #struct_name {
fn from(
#struct_name_lower: (
#(#field_types_clone),*
)
) -> Self {
Self::new(
#(#field_ids),*
)
}
}
impl #struct_name {
fn new(#(#not_spanned_fileds)*) -> Self {
Self {
#(#field_names,)*
}
}
}
impl #struct_name {
fn accept(&self, visitor: &impl #visitor_name) {
#(#accept_impl)*
}
}
};
expanded.into()
}
fn wrap_range_location(path: &TypePath) -> impl ToTokens {
match path.path.segments.last() {
Some(syn::PathSegment { ident, arguments }) => {
if let syn::PathArguments::AngleBracketed(args) = arguments {
if let Some(GenericArgument::Type(inner_ty)) = args.args.first() {
quote! {
#ident<Range<Location>>
}
} else {
quote! {
Range<Location>
}
}
} else {
quote! {
Range<Location>
}
}
}
_ => {
quote! {
Range<Location>
}
}
}
}
fn wrap_path_in_spanned(path: &TypePath) -> impl ToTokens {
match path.path.segments.last() {
Some(syn::PathSegment { ident, arguments }) => {
if let syn::PathArguments::AngleBracketed(args) = arguments {
if let Some(GenericArgument::Type(inner_ty)) = args.args.first() {
quote! {
#ident<Spanned<#inner_ty>>
}
} else {
quote! {
Spanned<#ident>
}
}
} else {
quote! {
Spanned<#ident>
}
}
}
_ => {
quote! {
Spanned<#path>
}
}
}
}
fn unwrap_path(path: &TypePath) -> impl ToTokens {
match path.path.segments.last() {
Some(syn::PathSegment { ident, arguments }) => {
if let syn::PathArguments::AngleBracketed(args) = arguments {
if let Some(GenericArgument::Type(inner_ty)) = args.args.first() {
quote! {
#inner_ty
}
} else {
quote! {
#ident
}
}
} else {
quote! {
#ident
}
}
}
_ => {
quote! {
#path
}
}
}
}
fn wrap_type_in_spanned(ty: &Type, field_name: &Option<Ident>) -> impl ToTokens {
match (ty, field_name) {
(Type::Path(path), Some(field_name)) => {
let ty = wrap_path_in_spanned(path);
quote! {
#field_name: #ty,
}
}
(Type::Path(path), None) => {
let ty = wrap_path_in_spanned(path);
quote! {
#ty,
}
}
_ => {
quote! {
compile_error!("Only named fields are supported");
}
}
}
}

View file

@ -0,0 +1,5 @@
---
source: crates/src-derive/src/lib.rs
expression: expected.to_string()
---
pub trait BinaryOperationVisitor : BoxVisitor + SpannedVisitor + NodeVisitor + OperatorVisitor { fn visit (& mut self , node : & BinaryOperation) { self . visit_lhs (& node . lhs) ; self . visit_op (& node . op) ; self . visit_rhs (& node . rhs) ; } fn visit_lhs (& mut self , value : & Box < Spanned < Node >>) { } fn visit_op (& mut self , value : & Operator) { } fn visit_rhs (& mut self , value : & Box < Spanned < Node >>) { } }

View file

@ -0,0 +1,5 @@
---
source: crates/src-derive/src/lib.rs
expression: expected.to_string()
---
pub trait BinaryOperationVisitor : BoxVisitor + SpannedVisitor + NodeVisitor + OperatorVisitor { fn visit (& mut self , node : & BinaryOperation) { self . visit_lhs (& node . lhs) ; self . visit_op (& node . op) ; self . visit_rhs (& node . rhs) ; } fn visit_lhs (& mut self , value : & Box < Spanned < Node >>) { } fn visit_op (& mut self , value : & Operator) { } fn visit_rhs (& mut self , value : & Box < Spanned < Node >>) { } }

View file

@ -0,0 +1,179 @@
use proc_macro2::{Ident, TokenStream};
use quote::{format_ident, quote, ToTokens};
use syn::{
parse_macro_input, Data, DeriveInput, Fields, GenericArgument, PathArguments, Type, TypePath,
};
pub fn generate_walker_impl(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse_macro_input!(item as DeriveInput);
let type_name = &input.ident;
let walker_name = format_ident!("{}Walker", type_name);
let visitor_name = format_ident!("{}Visitor", type_name);
let walk_impl = generate_walk_impl(type_name, &input.data);
let expanded = quote! {
#input
pub struct #walker_name;
impl #walker_name {
pub fn walk<V: #visitor_name>(node: &Spanned<#type_name>, visitor: &mut V) {
#walk_impl
}
}
};
proc_macro::TokenStream::from(expanded)
}
fn generate_walk_impl(type_name: &Ident, data: &Data) -> TokenStream {
match data {
Data::Struct(data_struct) => generate_struct_walk_impl(data_struct),
Data::Enum(data_enum) => generate_enum_walk_impl(type_name, data_enum),
Data::Union(_) => panic!("Unions are not supported"),
}
}
fn generate_field_visit(ty: &Type, field_access: TokenStream) -> TokenStream {
match ty {
Type::Path(TypePath { path, .. }) => {
if let Some(segment) = path.segments.last() {
match segment.ident.to_string().as_str() {
"Spanned" => {
if let PathArguments::AngleBracketed(args) = &segment.arguments {
if let Some(GenericArgument::Type(inner_type)) = args.args.first() {
let visit_method = format_ident!(
"visit_{}",
inner_type.to_token_stream().to_string().to_lowercase()
);
return quote! {
visitor.#visit_method(&(#field_access).1, (#field_access).span());
};
}
}
}
"Box" => {
if let PathArguments::AngleBracketed(args) = &segment.arguments {
if let Some(GenericArgument::Type(inner_type)) = args.args.first() {
let inner_visit =
generate_field_visit(inner_type, quote! { (*#field_access) });
return quote! {
#inner_visit
};
}
}
}
"Option" => {
if let PathArguments::AngleBracketed(args) = &segment.arguments {
if let Some(GenericArgument::Type(inner_type)) = args.args.first() {
let inner_visit =
generate_field_visit(inner_type, quote! { inner });
return quote! {
if let Some(inner) = #field_access.as_ref() {
#inner_visit
}
};
}
}
}
"Vec" => {
if let PathArguments::AngleBracketed(args) = &segment.arguments {
if let Some(GenericArgument::Type(inner_type)) = args.args.first() {
let inner_visit = generate_field_visit(inner_type, quote! { item });
return quote! {
for item in #field_access.iter() {
#inner_visit
}
};
}
}
}
_ => {}
}
}
}
_ => {}
}
quote! {}
}
fn generate_struct_walk_impl(data_struct: &syn::DataStruct) -> TokenStream {
let field_visits = data_struct
.fields
.iter()
.map(|field| {
let field_name = &field.ident;
generate_field_visit(&field.ty, quote!(node.1.#field_name))
})
.collect::<Vec<_>>();
quote! {
#(#field_visits)*
}
}
fn generate_enum_walk_impl(enum_name: &Ident, data_enum: &syn::DataEnum) -> TokenStream {
let variant_matches = data_enum
.variants
.iter()
.map(|variant| {
let variant_name = &variant.ident;
match &variant.fields {
Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {
let field_visit = generate_field_visit(&fields.unnamed[0].ty, quote!(value));
quote! {
#enum_name::#variant_name(value) => {
#field_visit
}
}
}
_ => quote! {
#enum_name::#variant_name { .. } => {}
},
}
})
.collect::<Vec<_>>();
quote! {
match &node.1 {
#(#variant_matches)*
}
}
}
fn is_spanned_type(ty: &Type) -> bool {
if let Type::Path(TypePath { path, .. }) = ty {
if let Some(segment) = path.segments.last() {
return segment.ident == "Spanned";
}
}
false
}
fn is_box_type(ty: &Type) -> bool {
if let Type::Path(TypePath { path, .. }) = ty {
if let Some(segment) = path.segments.last() {
return segment.ident == "Box";
}
}
false
}
fn is_option_type(ty: &Type) -> bool {
if let Type::Path(TypePath { path, .. }) = ty {
if let Some(segment) = path.segments.last() {
return segment.ident == "Option";
}
}
false
}
fn is_vec_type(ty: &Type) -> bool {
if let Type::Path(TypePath { path, .. }) = ty {
if let Some(segment) = path.segments.last() {
return segment.ident == "Vec";
}
}
false
}

View file

@ -0,0 +1,30 @@
[package]
publish = false
edition = "2021"
name = "src-lsp-browser"
version = "0.0.0"
[features]
default = ["tower-lsp/runtime-agnostic"]
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
console_error_panic_hook = "0.1.7"
src-lsp-server = { version = "0.0", path = "../src-lsp-server", default-features = false }
futures = "0.3.21"
js-sys = "0.3.57"
tower-lsp = { version = "0.17.0", default-features = false }
wasm-bindgen = "0.2.81"
wasm-bindgen-futures = { version = "0.4.30", features = [
"futures-core-03-stream",
] }
wasm-streams = "0.2.3"
srclang = { version = "0.1.0", path = "../..", registry = "oksoftware" }
web-sys = { version = "0.3.69", features = [
"console",
"HtmlTextAreaElement",
"ReadableStream",
"WritableStream",
] }

View file

@ -0,0 +1,65 @@
#![deny(clippy::all)]
#![deny(unsafe_code)]
use futures::stream::TryStreamExt;
use tower_lsp::{LspService, Server};
use wasm_bindgen::{prelude::*, JsCast};
use wasm_bindgen_futures::stream::JsStream;
#[wasm_bindgen]
pub struct ServerConfig {
into_server: js_sys::AsyncIterator,
from_server: web_sys::WritableStream,
}
#[wasm_bindgen]
impl ServerConfig {
#[wasm_bindgen(constructor)]
pub fn new(into_server: js_sys::AsyncIterator, from_server: web_sys::WritableStream) -> Self {
Self {
into_server,
from_server,
}
}
}
// NOTE: we don't use web_sys::ReadableStream for input here because on the
// browser side we need to use a ReadableByteStreamController to construct it
// and so far only Chromium-based browsers support that functionality.
// NOTE: input needs to be an AsyncIterator<Uint8Array, never, void> specifically
#[wasm_bindgen]
pub async fn serve(config: ServerConfig) -> Result<(), JsValue> {
console_error_panic_hook::set_once();
web_sys::console::log_1(&"server::serve".into());
let ServerConfig {
into_server,
from_server,
} = config;
let input = JsStream::from(into_server);
let input = input
.map_ok(|value| {
value
.dyn_into::<js_sys::Uint8Array>()
.expect("could not cast stream item to Uint8Array")
.to_vec()
})
.map_err(|_err| std::io::Error::from(std::io::ErrorKind::Other))
.into_async_read();
let output = JsCast::unchecked_into::<wasm_streams::writable::sys::WritableStream>(from_server);
let output = wasm_streams::WritableStream::from_raw(output);
let output = output.try_into_async_write().map_err(|err| err.0)?;
let (service, messages) = LspService::new(|client| src_lsp_server::Server::new(client));
Server::new(input, output, messages).serve(service).await;
Ok(())
}
mod tokenizer;
pub use tokenizer::{token_type_as_js_string, tokenize};

View file

@ -0,0 +1,152 @@
use js_sys::JsString;
use srclang::lexer::{self};
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub struct TokenSpan {
pub start: usize,
pub end: usize,
pub scope: TokenType,
}
#[wasm_bindgen]
#[derive(Clone, Copy)]
pub enum TokenType {
Pipe,
Ampersand,
Semicolon,
Equals,
LessThan,
GreaterThan,
Variable,
Word,
String,
Comment,
Integer,
Float,
Eof,
NewLine,
LeftParen,
RightParen,
LeftBrace,
RightBrace,
LeftBracket,
RightBracket,
Comma,
Dot,
Colon,
Underscore,
Minus,
Plus,
Arrow,
FatArrow,
Divide,
Multiply,
Percent,
Dollar,
Exclamation,
Question,
Tilde,
At,
Caret,
Shebang,
}
#[wasm_bindgen]
pub fn token_type_as_js_string(token_type: TokenType) -> JsString {
match token_type {
TokenType::Pipe => "Pipe",
TokenType::Ampersand => "Ampersand",
TokenType::Semicolon => "Semicolon",
TokenType::Equals => "Equals",
TokenType::LessThan => "LessThan",
TokenType::GreaterThan => "GreaterThan",
TokenType::Variable => "Variable",
TokenType::Word => "Word",
TokenType::String => "String",
TokenType::Comment => "Comment",
TokenType::Integer => "Integer",
TokenType::Float => "Float",
TokenType::Eof => "Eof",
TokenType::NewLine => "NewLine",
TokenType::LeftParen => "LeftParen",
TokenType::RightParen => "RightParen",
TokenType::LeftBrace => "LeftBrace",
TokenType::RightBrace => "RightBrace",
TokenType::LeftBracket => "LeftBracket",
TokenType::RightBracket => "RightBracket",
TokenType::Comma => "Comma",
TokenType::Dot => "Dot",
TokenType::Colon => "Colon",
TokenType::Underscore => "Underscore",
TokenType::Minus => "Minus",
TokenType::Plus => "Plus",
TokenType::Arrow => "Arrow",
TokenType::FatArrow => "FatArrow",
TokenType::Divide => "Divide",
TokenType::Multiply => "Multiply",
TokenType::Percent => "Percent",
TokenType::Dollar => "Dollar",
TokenType::Exclamation => "Exclamation",
TokenType::Question => "Question",
TokenType::Tilde => "Tilde",
TokenType::At => "At",
TokenType::Caret => "Caret",
TokenType::Shebang => "Shebang",
}
.into()
}
#[wasm_bindgen]
pub fn tokenize(input: &str) -> Result<Vec<TokenSpan>, JsValue> {
let lexer = srclang::lexer::Lexer::new(input, 0);
let tokens: Vec<TokenSpan> = lexer
.map(|token| TokenSpan {
start: token.start,
end: token.end,
scope: match token.node {
lexer::Token::Pipe => TokenType::Pipe,
lexer::Token::Ampersand => TokenType::Ampersand,
lexer::Token::Semicolon => TokenType::Semicolon,
lexer::Token::Equals => TokenType::Equals,
lexer::Token::LessThan => TokenType::LessThan,
lexer::Token::GreaterThan => TokenType::GreaterThan,
lexer::Token::Variable(_) => TokenType::Variable,
lexer::Token::Word(_) => TokenType::Word,
lexer::Token::String(_) => TokenType::String,
lexer::Token::Comment(_) => TokenType::Comment,
lexer::Token::Integer(_) => TokenType::Integer,
lexer::Token::Float(_) => TokenType::Float,
lexer::Token::Eof => TokenType::Eof,
lexer::Token::NewLine => TokenType::NewLine,
lexer::Token::LeftParen => TokenType::LeftParen,
lexer::Token::RightParen => TokenType::RightParen,
lexer::Token::LeftBrace => TokenType::LeftBrace,
lexer::Token::RightBrace => TokenType::RightBrace,
lexer::Token::LeftBracket => TokenType::LeftBracket,
lexer::Token::RightBracket => TokenType::RightBracket,
lexer::Token::Comma => TokenType::Comma,
lexer::Token::Dot => TokenType::Dot,
lexer::Token::Colon => TokenType::Colon,
lexer::Token::Underscore => TokenType::Underscore,
lexer::Token::Minus => TokenType::Minus,
lexer::Token::Plus => TokenType::Plus,
lexer::Token::Arrow => TokenType::Arrow,
lexer::Token::FatArrow => TokenType::FatArrow,
lexer::Token::Divide => TokenType::Divide,
lexer::Token::Multiply => TokenType::Multiply,
lexer::Token::Percent => TokenType::Percent,
lexer::Token::Dollar => TokenType::Dollar,
lexer::Token::Exclamation => TokenType::Exclamation,
lexer::Token::Question => TokenType::Question,
lexer::Token::Tilde => TokenType::Tilde,
lexer::Token::At => TokenType::At,
lexer::Token::Caret => TokenType::Caret,
lexer::Token::Shebang => TokenType::Shebang,
},
})
.collect();
Ok(tokens)
}

View file

@ -0,0 +1,46 @@
[package]
publish = false
edition = "2021"
name = "src-lsp-server"
version = "0.0.0"
[features]
default = ["tower-lsp/runtime-agnostic"]
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
anyhow = "1.0.57"
async-lock = "2.5.0"
console_error_panic_hook = "0.1.7"
dashmap = "5.3.4"
futures = "0.3.21"
indoc = "1.0"
js-sys = "0.3.57"
log = "0.4"
lsp = { version = "0.93", package = "lsp-types" }
lsp-text = "0.9"
ropey = "1.6.1"
serde_json = "1.0"
srclang = { version = "0.1.0", path = "../..", registry = "oksoftware" }
salsa = { version = "0.1.0", registry = "oksoftware", package = "salsa-2022" }
salsa-macros = { version = "0.1.0", registry = "oksoftware" , package = "salsa-2022-macros" }
thiserror = "1.0"
tower-lsp = { version = "0.17.0", default-features = false }
wasm-bindgen = "0.2.81"
wasm-bindgen-futures = { version = "0.4.30", features = ["futures-core-03-stream"] }
wasm-streams = "0.2.3"
src-collections = { version = "0.1.0", path = "../src-collections", registry = "oksoftware" }
lazy_static = "1.4.0"
[dependencies.web-sys]
version = "0.3.57"
features = [
"console",
"CssStyleDeclaration",
"Document",
"ReadableStream",
"Window",
"WritableStream",
]

View file

@ -0,0 +1,158 @@
use lsp::{InitializeParams, InitializeResult, Url};
use lsp_text::RopeExt;
use salsa::function::DynDb;
use src_collections::Map;
use srclang::{
analyzer::{self, span_text},
compiler::text::{self, Document, SourceProgram},
parser::{
self,
span::{ByteOrLineColOrCoord, ByteOrLineColOrCoordInterned},
},
Db,
};
use std::{borrow::BorrowMut, sync::Mutex};
use tower_lsp::{jsonrpc, LanguageServer};
use crate::update_channel;
pub struct LspServerDatabase {
db: Mutex<srclang::analyzer::db::Database>,
}
impl LspServerDatabase {
pub fn new() -> Self {
Self {
db: Mutex::new(srclang::analyzer::db::Database::default()),
}
}
}
// create a global singleton for input files input_files: Map<Url, SourceProgram>,
// so that we can access it from the analyzer it should be a mutable map so that we can add new files to it
// we can use a Mutex to make it thread safe
lazy_static::lazy_static! {
static ref FILES: Mutex<Map<Url, Document>> = Mutex::new(Map::default());
}
#[tower_lsp::async_trait]
impl LanguageServer for LspServerDatabase {
async fn initialize(&self, params: InitializeParams) -> jsonrpc::Result<InitializeResult> {
web_sys::console::log_1(&"server::initialize".into());
Ok(InitializeResult {
..InitializeResult::default()
})
}
async fn shutdown(&self) -> jsonrpc::Result<()> {
web_sys::console::log_1(&"server::shutdown".into());
Ok(())
}
async fn initialized(&self, _: lsp::InitializedParams) {
web_sys::console::log_1(&"server::initialized".into());
}
async fn did_open(&self, params: lsp::DidOpenTextDocumentParams) {
let url = params.text_document.uri;
let text = params.text_document.text;
let db = &*self.db.lock().unwrap();
let document = text::Document::new(db, url.to_string(), ropey::Rope::from_str(&text));
FILES.lock().unwrap().insert(url, document);
update_channel(&text);
}
async fn did_change(&self, params: lsp::DidChangeTextDocumentParams) {
let url = params.text_document.uri;
let db = &*self.db.lock().unwrap();
let mut files = FILES.lock().unwrap();
let file = files.get(&url).unwrap();
let mut rope = file.text(db);
let text_edit = rope.build_edit(&params.content_changes[0]).unwrap();
rope.apply_edit(&text_edit);
let text = rope.clone().to_string();
files.insert(url.clone(), text::Document::new(db, url.to_string(), rope));
update_channel(&text);
}
async fn document_symbol(
&self,
params: lsp::DocumentSymbolParams,
) -> jsonrpc::Result<Option<lsp::DocumentSymbolResponse>> {
web_sys::console::log_1(&"server::document_symbol".into());
let db = &*self.db.lock().unwrap();
Ok(None)
}
async fn hover(&self, params: lsp::HoverParams) -> jsonrpc::Result<Option<lsp::Hover>> {
web_sys::console::log_1(&"server::hover".into());
let db = &*self.db.lock().unwrap();
let url = params.text_document_position_params.text_document.uri;
let position = params.text_document_position_params.position;
let files = FILES.lock().unwrap();
let text = files.get(&url).unwrap();
let text = text.text(db);
let text = text::SourceProgram::new(db, url.to_string(), text.to_string());
let line: usize = position.line.try_into().unwrap();
let character: usize = position.character.try_into().unwrap();
web_sys::console::log_1(&format!("line: {}, character: {}", line, character).into());
let spanned = analyzer::get_symbol(
db,
text,
ByteOrLineColOrCoordInterned::new(db, ByteOrLineColOrCoord::LineCol(line, character)),
);
web_sys::console::log_1(&format!("{:#?}", spanned).into());
let hover = spanned.map(|span| {
// let text = span_text(db, span);
lsp::Hover {
contents: lsp::HoverContents::Markup(lsp::MarkupContent {
kind: lsp::MarkupKind::Markdown,
value: format!("```src\n{:#?}\n```src anaylzer", span),
}),
range: None,
}
});
Ok(hover)
}
async fn document_highlight(
&self,
params: lsp::DocumentHighlightParams,
) -> jsonrpc::Result<Option<Vec<lsp::DocumentHighlight>>> {
web_sys::console::log_1(&"server::document_highlight".into());
let db = &*self.db.lock().unwrap();
let url = params.text_document_position_params.text_document.uri;
let position = params.text_document_position_params.position;
let text = FILES.lock().unwrap();
let text = text.get(&url).unwrap();
let text = text.text(db);
let text = text::SourceProgram::new(db, url.to_string(), text.to_string());
let text = analyzer::get_symbol(
db,
text,
ByteOrLineColOrCoordInterned::new(
db,
ByteOrLineColOrCoord::LineCol(position.line as usize, position.character as usize),
),
);
let hightlight_kind = lsp::DocumentHighlightKind::TEXT;
let highlights = text.map(|text| {
vec![lsp::DocumentHighlight {
range: lsp::Range {
start: lsp::Position {
line: 0,
character: 0,
},
end: lsp::Position {
line: 0,
character: 0,
},
},
kind: Some(hightlight_kind),
}]
});
Ok(highlights)
}
}

View file

@ -0,0 +1,14 @@
#![deny(clippy::all)]
#![deny(unsafe_code)]
// mod core;
// pub mod handler;
mod server;
mod db;
use srclang::compiler;
pub use server::*;
pub use srclang::Jar;

View file

@ -0,0 +1,200 @@
use anyhow::anyhow;
use srclang::parser;
use std::result::Result::Ok;
use std::sync::Arc;
use tower_lsp::{jsonrpc, lsp_types::*, LanguageServer};
use wasm_bindgen::JsCast;
use web_sys::HtmlTextAreaElement;
pub fn capabilities() -> lsp::ServerCapabilities {
let document_symbol_provider = Some(lsp::OneOf::Left(true));
let text_document_sync = {
let options = lsp::TextDocumentSyncOptions {
open_close: Some(true),
change: Some(lsp::TextDocumentSyncKind::FULL),
..Default::default()
};
Some(lsp::TextDocumentSyncCapability::Options(options))
};
let hover_provider = Some(true.into());
let semantic_tokens_provider = Some(
lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(lsp::SemanticTokensOptions {
legend: lsp::SemanticTokensLegend {
token_types: vec![],
token_modifiers: vec![],
},
range: Some(true),
full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
..Default::default()
}),
);
let _document_highlight_provider: Option<OneOf<bool, DocumentHighlightOptions>> =
Some(lsp::OneOf::Left(true));
let code_lens_provider = Some(lsp::CodeLensOptions {
resolve_provider: Some(true),
});
let completion_provider = Some(CompletionOptions {
resolve_provider: Some(true),
trigger_characters: Some(vec![".".into()]),
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
all_commit_characters: None,
});
lsp::ServerCapabilities {
text_document_sync,
document_symbol_provider,
hover_provider,
semantic_tokens_provider,
// document_highlight_provider,
code_lens_provider,
completion_provider,
..Default::default()
}
}
pub struct Server {
pub client: tower_lsp::Client,
pub db: Arc<crate::db::LspServerDatabase>,
}
impl Server {
pub fn new(client: tower_lsp::Client) -> Self {
Server {
client,
db: Arc::new(crate::db::LspServerDatabase::new()),
}
}
}
#[tower_lsp::async_trait]
impl LanguageServer for Server {
async fn initialize(&self, params: InitializeParams) -> jsonrpc::Result<InitializeResult> {
web_sys::console::log_1(&"server::initialize".into());
let capabilities = capabilities();
Ok(InitializeResult {
server_info: Some(
ServerInfo {
name: "src language server".to_string(),
version: Some(env!("CARGO_PKG_VERSION").to_string()),
}
.into(),
),
capabilities,
})
}
async fn initialized(&self, _: lsp::InitializedParams) {
web_sys::console::log_1(&"server::initialized".into());
let typ = lsp::MessageType::INFO;
let message = "src language server initialized!";
self.client.log_message(typ, message).await;
}
async fn shutdown(&self) -> jsonrpc::Result<()> {
web_sys::console::log_1(&"server::shutdown".into());
Ok(())
}
// FIXME: for some reason this doesn't trigger
async fn did_open(&self, params: lsp::DidOpenTextDocumentParams) {
web_sys::console::log_1(&"server::did_open".into());
self.db.did_open(params).await;
}
async fn did_change(&self, params: lsp::DidChangeTextDocumentParams) {
web_sys::console::log_1(&"server::did_change".into());
self.db.did_change(params).await;
}
async fn document_symbol(
&self,
params: lsp::DocumentSymbolParams,
) -> jsonrpc::Result<Option<lsp::DocumentSymbolResponse>> {
web_sys::console::log_1(&"server::document_symbol".into());
self.db.document_symbol(params).await
}
async fn hover(&self, params: lsp::HoverParams) -> jsonrpc::Result<Option<lsp::Hover>> {
web_sys::console::log_1(&"server::hover".into());
self.db.hover(params).await
}
async fn document_highlight(
&self,
params: lsp::DocumentHighlightParams,
) -> jsonrpc::Result<Option<Vec<lsp::DocumentHighlight>>> {
web_sys::console::log_1(&"server::document_highlight".into());
self.db.document_highlight(params).await
}
}
pub fn get_channel_syntax() -> anyhow::Result<web_sys::HtmlTextAreaElement> {
use wasm_bindgen::JsCast;
let element_id = "channel-syntax";
let channel_syntax = web_sys::window()
.ok_or_else(|| anyhow!("failed to get window"))?
.document()
.ok_or_else(|| anyhow!("failed to get document"))?
.get_element_by_id(element_id)
.ok_or_else(|| anyhow!("failed to get channel-syntax element"))?
.unchecked_into();
Ok(channel_syntax)
}
pub fn update_channel(input: &str) {
// let tree = parser::parse(input);
// // assume errors; use red
// let element_id = "channel-syntax";
// match tree {
// Ok(module) => {
// // use green
// web_sys::window()
// .unwrap()
// .document()
// .unwrap()
// .get_element_by_id(element_id)
// .unwrap()
// .set_inner_html(&format!("{:#?}", module));
// // set the border of the textarea to green
// web_sys::window()
// .unwrap()
// .document()
// .unwrap()
// .get_element_by_id(element_id)
// .unwrap()
// .unchecked_into::<HtmlTextAreaElement>()
// .style()
// .set_property("border", "1px solid green")
// .unwrap();
// }
// Err(errs) => {
// // use red
// web_sys::window()
// .unwrap()
// .document()
// .unwrap()
// .get_element_by_id(element_id)
// .unwrap()
// .set_inner_html(&format!("{:#?}", errs));
// // set the border of the textarea to red
// web_sys::window()
// .unwrap()
// .document()
// .unwrap()
// .get_element_by_id(element_id)
// .unwrap()
// .unchecked_into::<HtmlTextAreaElement>()
// .style()
// .set_property("border", "1px solid red")
// .unwrap();
// }
// };
}

View file

@ -41,3 +41,14 @@ impl Make for Local {
}
}
```
## Acknowledgements
Building upon the incredible work of the Rust community and many others, src would not be possible without the following projects:
- [salsa-rs](https://github.com/salsa-rs/salsa)
- [lalrpop](https://github.com/lalrpop/lalrpop)
- [tower-lsp-web-demo](https://github.com/silvanshade/tower-lsp-web-demo)
- [tower-lsp-boilerplate](https://github.com/IWANABETHATGUY/tower-lsp-boilerplate)
- [tower-lsp](https:://github.com/tower-rs/tower-lsp)
- [gluon-lang](https://gluon-lang.org/)

View file

@ -1,5 +1,21 @@
# Summary
- [Intro](0intro.md)
# Language
- [Specification](language/0intro.md)
- [Examples](examples.md)
- [Language](language/0intro.md)
# Playground
* [Compiler Explorer](playground/index.md)
# Skills
- [Skills](skill-tree.md)
# Research Notes
- [Research](research.md)

View file

@ -1,5 +1,6 @@
# Examples
## innitguv
```src
use { native_fs, native_exec } from host
use { fs } from std
@ -27,4 +28,4 @@ impl Actor for Innitguv {
self.exec(msg.path, msg.args)
}
}
```
```

22
docs/playground/index.md Normal file
View file

@ -0,0 +1,22 @@
<div id="container">
<div id="cell-editor">
<label for="editor">editor</label>
<div id="editor"></div>
</div>
<div id="cell-syntax">
<label for="channel-syntax">syntax</label>
<textarea id="channel-syntax" autocomplete="off" spellcheck="off" wrap="off" readonly></textarea>
</div>
<div id="cell-client">
<label for="channel-client">message trace (client ↗ server)</label>
<textarea id="channel-client" autocomplete="off" spellcheck="off" wrap="off" readonly rows="4"></textarea>
</div>
<div id="cell-server">
<label for="channel-server">message trace (client ↙ server)</label>
<textarea id="channel-server" autocomplete="off" spellcheck="off" wrap="off" readonly rows="4"></textarea>
</div>
<div id="cell-console">
<label for="channel-console">console</label>
<textarea id="channel-console" autocomplete="off" spellcheck="off" wrap="off" readonly rows="3"></textarea>
</div>
</div>

BIN
docs/playground/taocp.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 822 KiB

3
docs/research.md Normal file
View file

@ -0,0 +1,3 @@
# Research
- [Hindley-Milner Type Inference](https://en.wikipedia.org/wiki/Hindley%E2%80%93Milner_type_system)

16
docs/skill-tree.md Normal file
View file

@ -0,0 +1,16 @@
<style type="text/css">
.content main {
/* margin: 0px auto; */
/* max-width: 800px; */
margin-inline-start: revert !important;
margin-inline-end: revert !important;
}
</style>
# Skills
```skill-tree
{{#include src-skill-tree.toml}}
```

95
docs/src-skill-tree.toml Normal file
View file

@ -0,0 +1,95 @@
[graphviz]
rankdir = "TD"
[doc]
columns = ["status", "assigned"]
[doc.defaults]
status = "tbd"
assigned = "no"
[doc.emoji.status]
"tbd" = "░️░️░️░️░️"
"exploration" = "█░️░️░️░️"
"design" = "██░️░️░️"
"implementation" = "███░️░️"
"stabilization" = "████░️"
"done" = "█████"
[doc.emoji.assigned]
"no" = "🫥"
"yes" = "👍"
"blocked" = "🤐"
[[group]]
name = "srclang"
label = "src language"
description = ["src programming language"]
items = [
{ label = "lexer", status = "stabilization" },
{ label = "parser", status = "stabilization" },
{ label = "lsp-server", status = "implementation" },
{ label = "type checker", status = "exploration" },
{ label = "codegen", status = "exploration" },
]
requires = ["lexer", "parser", "type checker", "codegen", "lsp-server", "vm"]
[[group]]
name = "lexer"
label = "lexer"
description = ["lexer for src programming language"]
requires = []
items = [
{ label = "lexer", status = "stabilization", assigned = "yes" },
{ label = "forbid unsafe", status = "exploration", assigned = "yes" },
{ label = "better diagnostics", status = "implementation", assigned = "yes" },
]
[[group]]
name = "parser"
label = "parser"
description = ["parser for src programming language"]
requires = ["lexer"]
items = [
{ label = "implement parser", status = "stabilization", assigned = "yes" },
]
[[group]]
name = "type checker"
label = "type checker"
description = ["type checker for src programming language"]
requires = ["lexer", "parser"]
items = [
{ label = "type inference", status = "exploration" },
{ label = "type checking", status = "exploration" },
{ label = "type error reporting", status = "exploration" },
{ label = "type checker", status = "exploration" },
]
[[group]]
name = "codegen"
label = "Code Generator"
description = ["Code Generator for src programming language"]
requires = ["lexer", "parser", "type checker"]
items = [{ label = "codegen", status = "exploration" }]
[[group]]
name = "lsp-server"
label = "LSP Server"
description = ["Language Server Protocol Server for src programming language"]
requires = ["lexer", "parser", "type checker"]
items = [
{ label = "apply edits to ropey on documentDidChange", status = "done" },
]
[[group]]
name = "vm"
label = "VM"
description = ["VM for src programming language"]
requires = ["lexer", "parser", "type checker", "codegen"]
items = [
{ label = "vm", status = "exploration" },
{ label = "vm tests", status = "exploration" },
]

View file

@ -24,7 +24,6 @@
<link rel="stylesheet" href="{{ path_to_root }}css/variables.css">
<link rel="stylesheet" href="{{ path_to_root }}css/general.css">
<link rel="stylesheet" href="{{ path_to_root }}css/chrome.css">
<link rel="stylesheet" href="{{ path_to_root }}ok.css"
{{#if print_enable}}
<link rel="stylesheet" href="{{ path_to_root }}css/print.css" media="print">
{{/if}}
@ -150,7 +149,7 @@
🍔
</label>
<button id="theme-toggle" class="nes-btn" type="button" title="Change theme" aria-label="Change theme" aria-haspopup="true" aria-expanded="false" aria-controls="theme-list">
🎨
💅
</button>
<ul id="theme-list" class="is-primary theme-popup" aria-label="Themes" role="menu">
<li role="none"><button role="menuitem" class="theme" id="light">Light</button></li>

72390
expanded.ast.rs Normal file

File diff suppressed because it is too large Load diff

90
full.render.js Normal file

File diff suppressed because one or more lines are too long

9
ok.css
View file

@ -2940,6 +2940,10 @@ pre {
padding: 16px;
overflow-x: auto;
}
label {
font-family: "Press Start 2P", cursive !important;
font-size: x-small !important;
}
nav,
pre
code {
@ -2947,11 +2951,6 @@ code {
font-size: x-small !important;
}
main {
margin: 0px auto;
max-width: 800px;
}
.nes-btn{
/* font-size: 1.1em; */
}

6502
package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

10
package.json Normal file
View file

@ -0,0 +1,10 @@
{
"private": true,
"workspaces": [
"packages/app"
],
"dependencies": {
"tree-sitter-javascript": "^0.19.0",
"web-tree-sitter-wasm-bindgen": "silvanshade/web-tree-sitter-wasm-bindgen"
}
}

View file

@ -0,0 +1 @@
dist

View file

@ -0,0 +1,51 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>
<%= htmlWebpackPlugin.options.title %>
</title>
</head>
<body>
<div id="container">
<h1 id="title">browser-hosted editor and language server</h1>
<p id="synopsis">
This app demos an editor with language smarts (for JavaScript) by hosting the <a
href="https://microsoft.github.io/monaco-editor/">Monaco</a> widget with an <a
href="https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/">LSP</a>
server implemented via <a href="https://github.com/ebkalderon/tower-lsp">tower-lsp</a> and <a
href="https://github.com/tree-sitter/tree-sitter">tree-sitter</a>. Everything is compiled to WASM and run
client-side directly in the browser; there are no separate sever-side processes or web workers used by the app
code. (Monaco itself does use web workers, however).
</p>
<p id="features">
<strong>features</strong>: ⇧⌘O (macos) or ⇧⌃O (windows) opens symbol view; the <strong>syntax</strong> area shows
the JavaScript syntax tree (green for valid; red for errors) parsed from <strong>editor</strong>
</p>
<div id="cell-editor">
<label for="editor">editor</label>
<div id="editor"></div>
</div>
<div id="cell-syntax">
<label for="channel-syntax">syntax</label>
<textarea id="channel-syntax" autocomplete="off" spellcheck="off" wrap="off" readonly></textarea>
</div>
<div id="cell-console">
<label for="channel-console">console</label>
<textarea id="channel-console" autocomplete="off" spellcheck="off" wrap="off" readonly rows="3"></textarea>
</div>
<div id="cell-client">
<label for="channel-client">message trace (client ⇒ server)</label>
<textarea id="channel-client" autocomplete="off" spellcheck="off" wrap="off" readonly rows="4"></textarea>
</div>
<div id="cell-server">
<label for="channel-server">message trace (client ⇐ server)</label>
<textarea id="channel-server" autocomplete="off" spellcheck="off" wrap="off" readonly rows="4"></textarea>
</div>
</div>
</body>
</html>

View file

@ -0,0 +1,116 @@
a {
color: mediumslateblue;
}
a:visited {
color: silver;
}
/*
body {
display: flex;
height: 100vh;
background-color: black;
color: white;
} */
div[id=container] {
display: grid;
height: 90%;
min-height: 600px;
grid-template-rows: auto auto auto minmax(0, 1fr) auto auto auto;
grid-template-columns: repeat(2, minmax(0, 1fr));
margin: auto;
}
/*
h1[id=title] {
grid-column: 1 / 3;
grid-row: 1;
font-family: monospace;
font-size: 2em;
}
p[id=synopsis] {
color: lightgrey;
grid-column: 1 / 3;
grid-row: 2;
margin: 0;
font-family: sans-serif;
font-size: 10pt;
line-height: 1.5em;
}
p[id=features] {
grid-column: 1 / 3;
grid-row: 3;
text-align: center;
font-family: sans-serif;
font-style: italic;
font-size: 10pt;
line-height: 1.5em;
color: lightgrey;
}
div[id=cell-editor] {
grid-column: 1 / 2;
grid-row: 4;
} */
[id=cell-editor] div[id=editor] {
border: 1px solid black;
height: calc(100vh - 500px);
padding-right: 1.5em;
}
/*
div[id=cell-syntax] {
grid-column: 2 / 3;
grid-row: 4;
} */
div[id=cell-syntax] textarea {
height: calc(100vh - 500px);
}
/*
[id=container] label {
display: block;
font-family: monospace;
font-size: 14pt;
}
div[id=cell-console] {
grid-column: 1 / 3;
grid-row: 5;
margin-top: 2em;
overflow: hidden;
}
div[id=cell-client] {
grid-column: 1 / 3;
grid-row: 6;
overflow: hidden;
}
div[id=cell-server] {
grid-column: 1 / 3;
grid-row: 7;
overflow: hidden;
}
div[id=container] textarea {
display: block;
box-sizing: border-box;
width: 100%;
resize: none;
overflow-y: auto;
font-family: monospace;
font-family: 10pt;
} */
main {
max-width: 100%;
}
.content main {
margin-inline-start: auto;
margin-inline-end: auto;
max-width: 100%;
}

View file

@ -0,0 +1 @@
export function bytes_literal() { return "bytes"; }

View file

@ -0,0 +1,242 @@
/* tslint:disable */
/* eslint-disable */
/**
* @param {ServerConfig} config
* @returns {Promise<void>}
*/
export function serve(config: ServerConfig): Promise<void>;
/**
* @param {TokenType} token_type
* @returns {string}
*/
export function token_type_as_js_string(token_type: TokenType): string;
/**
* @param {string} input
* @returns {(TokenSpan)[]}
*/
export function tokenize(input: string): (TokenSpan)[];
/**
*/
export enum TokenType {
Pipe = 0,
Ampersand = 1,
Semicolon = 2,
Equals = 3,
LessThan = 4,
GreaterThan = 5,
Variable = 6,
Word = 7,
String = 8,
Comment = 9,
Integer = 10,
Float = 11,
Eof = 12,
NewLine = 13,
LeftParen = 14,
RightParen = 15,
LeftBrace = 16,
RightBrace = 17,
LeftBracket = 18,
RightBracket = 19,
Comma = 20,
Dot = 21,
Colon = 22,
Underscore = 23,
Minus = 24,
Plus = 25,
Arrow = 26,
FatArrow = 27,
Divide = 28,
Multiply = 29,
Percent = 30,
Dollar = 31,
Exclamation = 32,
Question = 33,
Tilde = 34,
At = 35,
Caret = 36,
Shebang = 37,
}
/**
*/
export class IntoUnderlyingByteSource {
free(): void;
/**
* @param {any} controller
*/
start(controller: any): void;
/**
* @param {any} controller
* @returns {Promise<any>}
*/
pull(controller: any): Promise<any>;
/**
*/
cancel(): void;
/**
*/
readonly autoAllocateChunkSize: number;
/**
*/
readonly type: any;
}
/**
*/
export class IntoUnderlyingSink {
free(): void;
/**
* @param {any} chunk
* @returns {Promise<any>}
*/
write(chunk: any): Promise<any>;
/**
* @returns {Promise<any>}
*/
close(): Promise<any>;
/**
* @param {any} reason
* @returns {Promise<any>}
*/
abort(reason: any): Promise<any>;
}
/**
*/
export class IntoUnderlyingSource {
free(): void;
/**
* @param {any} controller
* @returns {Promise<any>}
*/
pull(controller: any): Promise<any>;
/**
*/
cancel(): void;
}
/**
* Raw options for [`pipeTo()`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/pipeTo).
*/
export class PipeOptions {
free(): void;
/**
*/
readonly preventAbort: boolean;
/**
*/
readonly preventCancel: boolean;
/**
*/
readonly preventClose: boolean;
/**
*/
readonly signal: AbortSignal | undefined;
}
/**
*/
export class QueuingStrategy {
free(): void;
/**
*/
readonly highWaterMark: number;
}
/**
* Raw options for [`getReader()`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/getReader).
*/
export class ReadableStreamGetReaderOptions {
free(): void;
/**
*/
readonly mode: any;
}
/**
*/
export class ServerConfig {
free(): void;
/**
* @param {AsyncIterator<any>} into_server
* @param {WritableStream} from_server
*/
constructor(into_server: AsyncIterator<any>, from_server: WritableStream);
}
/**
*/
export class TokenSpan {
free(): void;
/**
*/
end: number;
/**
*/
scope: TokenType;
/**
*/
start: number;
}
export type InitInput = RequestInfo | URL | Response | BufferSource | WebAssembly.Module;
export interface InitOutput {
readonly memory: WebAssembly.Memory;
readonly __wbg_serverconfig_free: (a: number) => void;
readonly serverconfig_new: (a: number, b: number) => number;
readonly serve: (a: number) => number;
readonly __wbg_tokenspan_free: (a: number) => void;
readonly __wbg_get_tokenspan_start: (a: number) => number;
readonly __wbg_set_tokenspan_start: (a: number, b: number) => void;
readonly __wbg_get_tokenspan_end: (a: number) => number;
readonly __wbg_set_tokenspan_end: (a: number, b: number) => void;
readonly __wbg_get_tokenspan_scope: (a: number) => number;
readonly __wbg_set_tokenspan_scope: (a: number, b: number) => void;
readonly token_type_as_js_string: (a: number) => number;
readonly tokenize: (a: number, b: number, c: number) => void;
readonly __wbg_intounderlyingsink_free: (a: number) => void;
readonly intounderlyingsink_write: (a: number, b: number) => number;
readonly intounderlyingsink_close: (a: number) => number;
readonly intounderlyingsink_abort: (a: number, b: number) => number;
readonly __wbg_intounderlyingsource_free: (a: number) => void;
readonly intounderlyingsource_pull: (a: number, b: number) => number;
readonly intounderlyingsource_cancel: (a: number) => void;
readonly __wbg_readablestreamgetreaderoptions_free: (a: number) => void;
readonly readablestreamgetreaderoptions_mode: (a: number) => number;
readonly __wbg_pipeoptions_free: (a: number) => void;
readonly pipeoptions_preventClose: (a: number) => number;
readonly pipeoptions_preventCancel: (a: number) => number;
readonly pipeoptions_preventAbort: (a: number) => number;
readonly pipeoptions_signal: (a: number) => number;
readonly __wbg_queuingstrategy_free: (a: number) => void;
readonly queuingstrategy_highWaterMark: (a: number) => number;
readonly __wbg_intounderlyingbytesource_free: (a: number) => void;
readonly intounderlyingbytesource_type: (a: number) => number;
readonly intounderlyingbytesource_autoAllocateChunkSize: (a: number) => number;
readonly intounderlyingbytesource_start: (a: number, b: number) => void;
readonly intounderlyingbytesource_pull: (a: number, b: number) => number;
readonly intounderlyingbytesource_cancel: (a: number) => void;
readonly __wbindgen_malloc: (a: number, b: number) => number;
readonly __wbindgen_realloc: (a: number, b: number, c: number, d: number) => number;
readonly __wbindgen_export_2: WebAssembly.Table;
readonly _dyn_core__ops__function__FnMut__A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hef4ca354c23a4fc7: (a: number, b: number, c: number) => void;
readonly __wbindgen_add_to_stack_pointer: (a: number) => number;
readonly __wbindgen_free: (a: number, b: number, c: number) => void;
readonly __wbindgen_exn_store: (a: number) => void;
readonly wasm_bindgen__convert__closures__invoke2_mut__hd246805203d2bb11: (a: number, b: number, c: number, d: number) => void;
}
export type SyncInitInput = BufferSource | WebAssembly.Module;
/**
* Instantiates the given `module`, which can either be bytes or
* a precompiled `WebAssembly.Module`.
*
* @param {SyncInitInput} module
*
* @returns {InitOutput}
*/
export function initSync(module: SyncInitInput): InitOutput;
/**
* If `module_or_path` is {RequestInfo} or {URL}, makes a request and
* for everything else, calls `WebAssembly.instantiate` directly.
*
* @param {InitInput | Promise<InitInput>} module_or_path
*
* @returns {Promise<InitOutput>}
*/
export default function __wbg_init (module_or_path?: InitInput | Promise<InitInput>): Promise<InitOutput>;

View file

@ -0,0 +1,945 @@
import { bytes_literal } from './snippets/wasm-streams-42e57edbcd526312/inline0.js';
let wasm;
const heap = new Array(128).fill(undefined);
heap.push(undefined, null, true, false);
function getObject(idx) { return heap[idx]; }
let heap_next = heap.length;
function dropObject(idx) {
if (idx < 132) return;
heap[idx] = heap_next;
heap_next = idx;
}
function takeObject(idx) {
const ret = getObject(idx);
dropObject(idx);
return ret;
}
const cachedTextDecoder = (typeof TextDecoder !== 'undefined' ? new TextDecoder('utf-8', { ignoreBOM: true, fatal: true }) : { decode: () => { throw Error('TextDecoder not available') } } );
if (typeof TextDecoder !== 'undefined') { cachedTextDecoder.decode(); };
let cachedUint8Memory0 = null;
function getUint8Memory0() {
if (cachedUint8Memory0 === null || cachedUint8Memory0.byteLength === 0) {
cachedUint8Memory0 = new Uint8Array(wasm.memory.buffer);
}
return cachedUint8Memory0;
}
function getStringFromWasm0(ptr, len) {
ptr = ptr >>> 0;
return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len));
}
function addHeapObject(obj) {
if (heap_next === heap.length) heap.push(heap.length + 1);
const idx = heap_next;
heap_next = heap[idx];
heap[idx] = obj;
return idx;
}
let WASM_VECTOR_LEN = 0;
const cachedTextEncoder = (typeof TextEncoder !== 'undefined' ? new TextEncoder('utf-8') : { encode: () => { throw Error('TextEncoder not available') } } );
const encodeString = (typeof cachedTextEncoder.encodeInto === 'function'
? function (arg, view) {
return cachedTextEncoder.encodeInto(arg, view);
}
: function (arg, view) {
const buf = cachedTextEncoder.encode(arg);
view.set(buf);
return {
read: arg.length,
written: buf.length
};
});
function passStringToWasm0(arg, malloc, realloc) {
if (realloc === undefined) {
const buf = cachedTextEncoder.encode(arg);
const ptr = malloc(buf.length, 1) >>> 0;
getUint8Memory0().subarray(ptr, ptr + buf.length).set(buf);
WASM_VECTOR_LEN = buf.length;
return ptr;
}
let len = arg.length;
let ptr = malloc(len, 1) >>> 0;
const mem = getUint8Memory0();
let offset = 0;
for (; offset < len; offset++) {
const code = arg.charCodeAt(offset);
if (code > 0x7F) break;
mem[ptr + offset] = code;
}
if (offset !== len) {
if (offset !== 0) {
arg = arg.slice(offset);
}
ptr = realloc(ptr, len, len = offset + arg.length * 3, 1) >>> 0;
const view = getUint8Memory0().subarray(ptr + offset, ptr + len);
const ret = encodeString(arg, view);
offset += ret.written;
ptr = realloc(ptr, len, offset, 1) >>> 0;
}
WASM_VECTOR_LEN = offset;
return ptr;
}
function isLikeNone(x) {
return x === undefined || x === null;
}
let cachedInt32Memory0 = null;
function getInt32Memory0() {
if (cachedInt32Memory0 === null || cachedInt32Memory0.byteLength === 0) {
cachedInt32Memory0 = new Int32Array(wasm.memory.buffer);
}
return cachedInt32Memory0;
}
function debugString(val) {
// primitive types
const type = typeof val;
if (type == 'number' || type == 'boolean' || val == null) {
return `${val}`;
}
if (type == 'string') {
return `"${val}"`;
}
if (type == 'symbol') {
const description = val.description;
if (description == null) {
return 'Symbol';
} else {
return `Symbol(${description})`;
}
}
if (type == 'function') {
const name = val.name;
if (typeof name == 'string' && name.length > 0) {
return `Function(${name})`;
} else {
return 'Function';
}
}
// objects
if (Array.isArray(val)) {
const length = val.length;
let debug = '[';
if (length > 0) {
debug += debugString(val[0]);
}
for(let i = 1; i < length; i++) {
debug += ', ' + debugString(val[i]);
}
debug += ']';
return debug;
}
// Test for built-in
const builtInMatches = /\[object ([^\]]+)\]/.exec(toString.call(val));
let className;
if (builtInMatches.length > 1) {
className = builtInMatches[1];
} else {
// Failed to match the standard '[object ClassName]'
return toString.call(val);
}
if (className == 'Object') {
// we're a user defined class or Object
// JSON.stringify avoids problems with cycles, and is generally much
// easier than looping through ownProperties of `val`.
try {
return 'Object(' + JSON.stringify(val) + ')';
} catch (_) {
return 'Object';
}
}
// errors
if (val instanceof Error) {
return `${val.name}: ${val.message}\n${val.stack}`;
}
// TODO we could test for more things here, like `Set`s and `Map`s.
return className;
}
const CLOSURE_DTORS = (typeof FinalizationRegistry === 'undefined')
? { register: () => {}, unregister: () => {} }
: new FinalizationRegistry(state => {
wasm.__wbindgen_export_2.get(state.dtor)(state.a, state.b)
});
function makeMutClosure(arg0, arg1, dtor, f) {
const state = { a: arg0, b: arg1, cnt: 1, dtor };
const real = (...args) => {
// First up with a closure we increment the internal reference
// count. This ensures that the Rust closure environment won't
// be deallocated while we're invoking it.
state.cnt++;
const a = state.a;
state.a = 0;
try {
return f(a, state.b, ...args);
} finally {
if (--state.cnt === 0) {
wasm.__wbindgen_export_2.get(state.dtor)(a, state.b);
CLOSURE_DTORS.unregister(state);
} else {
state.a = a;
}
}
};
real.original = state;
CLOSURE_DTORS.register(real, state, state);
return real;
}
function __wbg_adapter_20(arg0, arg1, arg2) {
wasm._dyn_core__ops__function__FnMut__A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hef4ca354c23a4fc7(arg0, arg1, addHeapObject(arg2));
}
function _assertClass(instance, klass) {
if (!(instance instanceof klass)) {
throw new Error(`expected instance of ${klass.name}`);
}
return instance.ptr;
}
/**
* @param {ServerConfig} config
* @returns {Promise<void>}
*/
export function serve(config) {
_assertClass(config, ServerConfig);
var ptr0 = config.__destroy_into_raw();
const ret = wasm.serve(ptr0);
return takeObject(ret);
}
/**
* @param {TokenType} token_type
* @returns {string}
*/
export function token_type_as_js_string(token_type) {
const ret = wasm.token_type_as_js_string(token_type);
return takeObject(ret);
}
let cachedUint32Memory0 = null;
function getUint32Memory0() {
if (cachedUint32Memory0 === null || cachedUint32Memory0.byteLength === 0) {
cachedUint32Memory0 = new Uint32Array(wasm.memory.buffer);
}
return cachedUint32Memory0;
}
function getArrayJsValueFromWasm0(ptr, len) {
ptr = ptr >>> 0;
const mem = getUint32Memory0();
const slice = mem.subarray(ptr / 4, ptr / 4 + len);
const result = [];
for (let i = 0; i < slice.length; i++) {
result.push(takeObject(slice[i]));
}
return result;
}
/**
* @param {string} input
* @returns {(TokenSpan)[]}
*/
export function tokenize(input) {
try {
const retptr = wasm.__wbindgen_add_to_stack_pointer(-16);
const ptr0 = passStringToWasm0(input, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len0 = WASM_VECTOR_LEN;
wasm.tokenize(retptr, ptr0, len0);
var r0 = getInt32Memory0()[retptr / 4 + 0];
var r1 = getInt32Memory0()[retptr / 4 + 1];
var r2 = getInt32Memory0()[retptr / 4 + 2];
var r3 = getInt32Memory0()[retptr / 4 + 3];
if (r3) {
throw takeObject(r2);
}
var v2 = getArrayJsValueFromWasm0(r0, r1).slice();
wasm.__wbindgen_free(r0, r1 * 4, 4);
return v2;
} finally {
wasm.__wbindgen_add_to_stack_pointer(16);
}
}
function handleError(f, args) {
try {
return f.apply(this, args);
} catch (e) {
wasm.__wbindgen_exn_store(addHeapObject(e));
}
}
function __wbg_adapter_99(arg0, arg1, arg2, arg3) {
wasm.wasm_bindgen__convert__closures__invoke2_mut__hd246805203d2bb11(arg0, arg1, addHeapObject(arg2), addHeapObject(arg3));
}
/**
*/
export const TokenType = Object.freeze({ Pipe:0,"0":"Pipe",Ampersand:1,"1":"Ampersand",Semicolon:2,"2":"Semicolon",Equals:3,"3":"Equals",LessThan:4,"4":"LessThan",GreaterThan:5,"5":"GreaterThan",Variable:6,"6":"Variable",Word:7,"7":"Word",String:8,"8":"String",Comment:9,"9":"Comment",Integer:10,"10":"Integer",Float:11,"11":"Float",Eof:12,"12":"Eof",NewLine:13,"13":"NewLine",LeftParen:14,"14":"LeftParen",RightParen:15,"15":"RightParen",LeftBrace:16,"16":"LeftBrace",RightBrace:17,"17":"RightBrace",LeftBracket:18,"18":"LeftBracket",RightBracket:19,"19":"RightBracket",Comma:20,"20":"Comma",Dot:21,"21":"Dot",Colon:22,"22":"Colon",Underscore:23,"23":"Underscore",Minus:24,"24":"Minus",Plus:25,"25":"Plus",Arrow:26,"26":"Arrow",FatArrow:27,"27":"FatArrow",Divide:28,"28":"Divide",Multiply:29,"29":"Multiply",Percent:30,"30":"Percent",Dollar:31,"31":"Dollar",Exclamation:32,"32":"Exclamation",Question:33,"33":"Question",Tilde:34,"34":"Tilde",At:35,"35":"At",Caret:36,"36":"Caret",Shebang:37,"37":"Shebang", });
const IntoUnderlyingByteSourceFinalization = (typeof FinalizationRegistry === 'undefined')
? { register: () => {}, unregister: () => {} }
: new FinalizationRegistry(ptr => wasm.__wbg_intounderlyingbytesource_free(ptr >>> 0));
/**
*/
export class IntoUnderlyingByteSource {
__destroy_into_raw() {
const ptr = this.__wbg_ptr;
this.__wbg_ptr = 0;
IntoUnderlyingByteSourceFinalization.unregister(this);
return ptr;
}
free() {
const ptr = this.__destroy_into_raw();
wasm.__wbg_intounderlyingbytesource_free(ptr);
}
/**
* @returns {any}
*/
get type() {
const ret = wasm.intounderlyingbytesource_type(this.__wbg_ptr);
return takeObject(ret);
}
/**
* @returns {number}
*/
get autoAllocateChunkSize() {
const ret = wasm.intounderlyingbytesource_autoAllocateChunkSize(this.__wbg_ptr);
return ret >>> 0;
}
/**
* @param {any} controller
*/
start(controller) {
wasm.intounderlyingbytesource_start(this.__wbg_ptr, addHeapObject(controller));
}
/**
* @param {any} controller
* @returns {Promise<any>}
*/
pull(controller) {
const ret = wasm.intounderlyingbytesource_pull(this.__wbg_ptr, addHeapObject(controller));
return takeObject(ret);
}
/**
*/
cancel() {
const ptr = this.__destroy_into_raw();
wasm.intounderlyingbytesource_cancel(ptr);
}
}
const IntoUnderlyingSinkFinalization = (typeof FinalizationRegistry === 'undefined')
? { register: () => {}, unregister: () => {} }
: new FinalizationRegistry(ptr => wasm.__wbg_intounderlyingsink_free(ptr >>> 0));
/**
*/
export class IntoUnderlyingSink {
__destroy_into_raw() {
const ptr = this.__wbg_ptr;
this.__wbg_ptr = 0;
IntoUnderlyingSinkFinalization.unregister(this);
return ptr;
}
free() {
const ptr = this.__destroy_into_raw();
wasm.__wbg_intounderlyingsink_free(ptr);
}
/**
* @param {any} chunk
* @returns {Promise<any>}
*/
write(chunk) {
const ret = wasm.intounderlyingsink_write(this.__wbg_ptr, addHeapObject(chunk));
return takeObject(ret);
}
/**
* @returns {Promise<any>}
*/
close() {
const ptr = this.__destroy_into_raw();
const ret = wasm.intounderlyingsink_close(ptr);
return takeObject(ret);
}
/**
* @param {any} reason
* @returns {Promise<any>}
*/
abort(reason) {
const ptr = this.__destroy_into_raw();
const ret = wasm.intounderlyingsink_abort(ptr, addHeapObject(reason));
return takeObject(ret);
}
}
const IntoUnderlyingSourceFinalization = (typeof FinalizationRegistry === 'undefined')
? { register: () => {}, unregister: () => {} }
: new FinalizationRegistry(ptr => wasm.__wbg_intounderlyingsource_free(ptr >>> 0));
/**
*/
export class IntoUnderlyingSource {
__destroy_into_raw() {
const ptr = this.__wbg_ptr;
this.__wbg_ptr = 0;
IntoUnderlyingSourceFinalization.unregister(this);
return ptr;
}
free() {
const ptr = this.__destroy_into_raw();
wasm.__wbg_intounderlyingsource_free(ptr);
}
/**
* @param {any} controller
* @returns {Promise<any>}
*/
pull(controller) {
const ret = wasm.intounderlyingsource_pull(this.__wbg_ptr, addHeapObject(controller));
return takeObject(ret);
}
/**
*/
cancel() {
const ptr = this.__destroy_into_raw();
wasm.intounderlyingsource_cancel(ptr);
}
}
const PipeOptionsFinalization = (typeof FinalizationRegistry === 'undefined')
? { register: () => {}, unregister: () => {} }
: new FinalizationRegistry(ptr => wasm.__wbg_pipeoptions_free(ptr >>> 0));
/**
* Raw options for [`pipeTo()`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/pipeTo).
*/
export class PipeOptions {
__destroy_into_raw() {
const ptr = this.__wbg_ptr;
this.__wbg_ptr = 0;
PipeOptionsFinalization.unregister(this);
return ptr;
}
free() {
const ptr = this.__destroy_into_raw();
wasm.__wbg_pipeoptions_free(ptr);
}
/**
* @returns {boolean}
*/
get preventClose() {
const ret = wasm.pipeoptions_preventClose(this.__wbg_ptr);
return ret !== 0;
}
/**
* @returns {boolean}
*/
get preventCancel() {
const ret = wasm.pipeoptions_preventCancel(this.__wbg_ptr);
return ret !== 0;
}
/**
* @returns {boolean}
*/
get preventAbort() {
const ret = wasm.pipeoptions_preventAbort(this.__wbg_ptr);
return ret !== 0;
}
/**
* @returns {AbortSignal | undefined}
*/
get signal() {
const ret = wasm.pipeoptions_signal(this.__wbg_ptr);
return takeObject(ret);
}
}
const QueuingStrategyFinalization = (typeof FinalizationRegistry === 'undefined')
? { register: () => {}, unregister: () => {} }
: new FinalizationRegistry(ptr => wasm.__wbg_queuingstrategy_free(ptr >>> 0));
/**
*/
export class QueuingStrategy {
__destroy_into_raw() {
const ptr = this.__wbg_ptr;
this.__wbg_ptr = 0;
QueuingStrategyFinalization.unregister(this);
return ptr;
}
free() {
const ptr = this.__destroy_into_raw();
wasm.__wbg_queuingstrategy_free(ptr);
}
/**
* @returns {number}
*/
get highWaterMark() {
const ret = wasm.queuingstrategy_highWaterMark(this.__wbg_ptr);
return ret;
}
}
const ReadableStreamGetReaderOptionsFinalization = (typeof FinalizationRegistry === 'undefined')
? { register: () => {}, unregister: () => {} }
: new FinalizationRegistry(ptr => wasm.__wbg_readablestreamgetreaderoptions_free(ptr >>> 0));
/**
* Raw options for [`getReader()`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream/getReader).
*/
export class ReadableStreamGetReaderOptions {
__destroy_into_raw() {
const ptr = this.__wbg_ptr;
this.__wbg_ptr = 0;
ReadableStreamGetReaderOptionsFinalization.unregister(this);
return ptr;
}
free() {
const ptr = this.__destroy_into_raw();
wasm.__wbg_readablestreamgetreaderoptions_free(ptr);
}
/**
* @returns {any}
*/
get mode() {
const ret = wasm.readablestreamgetreaderoptions_mode(this.__wbg_ptr);
return takeObject(ret);
}
}
const ServerConfigFinalization = (typeof FinalizationRegistry === 'undefined')
? { register: () => {}, unregister: () => {} }
: new FinalizationRegistry(ptr => wasm.__wbg_serverconfig_free(ptr >>> 0));
/**
*/
export class ServerConfig {
__destroy_into_raw() {
const ptr = this.__wbg_ptr;
this.__wbg_ptr = 0;
ServerConfigFinalization.unregister(this);
return ptr;
}
free() {
const ptr = this.__destroy_into_raw();
wasm.__wbg_serverconfig_free(ptr);
}
/**
* @param {AsyncIterator<any>} into_server
* @param {WritableStream} from_server
*/
constructor(into_server, from_server) {
const ret = wasm.serverconfig_new(addHeapObject(into_server), addHeapObject(from_server));
this.__wbg_ptr = ret >>> 0;
return this;
}
}
const TokenSpanFinalization = (typeof FinalizationRegistry === 'undefined')
? { register: () => {}, unregister: () => {} }
: new FinalizationRegistry(ptr => wasm.__wbg_tokenspan_free(ptr >>> 0));
/**
*/
export class TokenSpan {
static __wrap(ptr) {
ptr = ptr >>> 0;
const obj = Object.create(TokenSpan.prototype);
obj.__wbg_ptr = ptr;
TokenSpanFinalization.register(obj, obj.__wbg_ptr, obj);
return obj;
}
__destroy_into_raw() {
const ptr = this.__wbg_ptr;
this.__wbg_ptr = 0;
TokenSpanFinalization.unregister(this);
return ptr;
}
free() {
const ptr = this.__destroy_into_raw();
wasm.__wbg_tokenspan_free(ptr);
}
/**
* @returns {number}
*/
get start() {
const ret = wasm.__wbg_get_tokenspan_start(this.__wbg_ptr);
return ret >>> 0;
}
/**
* @param {number} arg0
*/
set start(arg0) {
wasm.__wbg_set_tokenspan_start(this.__wbg_ptr, arg0);
}
/**
* @returns {number}
*/
get end() {
const ret = wasm.__wbg_get_tokenspan_end(this.__wbg_ptr);
return ret >>> 0;
}
/**
* @param {number} arg0
*/
set end(arg0) {
wasm.__wbg_set_tokenspan_end(this.__wbg_ptr, arg0);
}
/**
* @returns {TokenType}
*/
get scope() {
const ret = wasm.__wbg_get_tokenspan_scope(this.__wbg_ptr);
return ret;
}
/**
* @param {TokenType} arg0
*/
set scope(arg0) {
wasm.__wbg_set_tokenspan_scope(this.__wbg_ptr, arg0);
}
}
async function __wbg_load(module, imports) {
if (typeof Response === 'function' && module instanceof Response) {
if (typeof WebAssembly.instantiateStreaming === 'function') {
try {
return await WebAssembly.instantiateStreaming(module, imports);
} catch (e) {
if (module.headers.get('Content-Type') != 'application/wasm') {
console.warn("`WebAssembly.instantiateStreaming` failed because your server does not serve wasm with `application/wasm` MIME type. Falling back to `WebAssembly.instantiate` which is slower. Original error:\n", e);
} else {
throw e;
}
}
}
const bytes = await module.arrayBuffer();
return await WebAssembly.instantiate(bytes, imports);
} else {
const instance = await WebAssembly.instantiate(module, imports);
if (instance instanceof WebAssembly.Instance) {
return { instance, module };
} else {
return instance;
}
}
}
function __wbg_get_imports() {
const imports = {};
imports.wbg = {};
imports.wbg.__wbindgen_object_drop_ref = function(arg0) {
takeObject(arg0);
};
imports.wbg.__wbindgen_cb_drop = function(arg0) {
const obj = takeObject(arg0).original;
if (obj.cnt-- == 1) {
obj.a = 0;
return true;
}
const ret = false;
return ret;
};
imports.wbg.__wbg_tokenspan_new = function(arg0) {
const ret = TokenSpan.__wrap(arg0);
return addHeapObject(ret);
};
imports.wbg.__wbindgen_string_new = function(arg0, arg1) {
const ret = getStringFromWasm0(arg0, arg1);
return addHeapObject(ret);
};
imports.wbg.__wbindgen_string_get = function(arg0, arg1) {
const obj = getObject(arg1);
const ret = typeof(obj) === 'string' ? obj : undefined;
var ptr1 = isLikeNone(ret) ? 0 : passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
var len1 = WASM_VECTOR_LEN;
getInt32Memory0()[arg0 / 4 + 1] = len1;
getInt32Memory0()[arg0 / 4 + 0] = ptr1;
};
imports.wbg.__wbg_write_f6d68f54a7b41f92 = function(arg0, arg1) {
const ret = getObject(arg0).write(takeObject(arg1));
return addHeapObject(ret);
};
imports.wbg.__wbg_getWriter_6cdcca9cf3b715dc = function() { return handleError(function (arg0) {
const ret = getObject(arg0).getWriter();
return addHeapObject(ret);
}, arguments) };
imports.wbg.__wbg_ready_8f8565755a0b5d1f = function(arg0) {
const ret = getObject(arg0).ready;
return addHeapObject(ret);
};
imports.wbg.__wbg_close_ae487688d07f03b0 = function(arg0) {
const ret = getObject(arg0).close();
return addHeapObject(ret);
};
imports.wbg.__wbg_releaseLock_0d3bce87e07d43f6 = function(arg0) {
getObject(arg0).releaseLock();
};
imports.wbg.__wbg_respond_f4778bef04e912a6 = function(arg0, arg1) {
getObject(arg0).respond(arg1 >>> 0);
};
imports.wbg.__wbg_close_a41954830b65c455 = function(arg0) {
getObject(arg0).close();
};
imports.wbg.__wbg_enqueue_3a8a8e67e44d2567 = function(arg0, arg1) {
getObject(arg0).enqueue(getObject(arg1));
};
imports.wbg.__wbg_byobRequest_a3c74c3694777d1b = function(arg0) {
const ret = getObject(arg0).byobRequest;
return isLikeNone(ret) ? 0 : addHeapObject(ret);
};
imports.wbg.__wbg_close_045ed342139beb7d = function(arg0) {
getObject(arg0).close();
};
imports.wbg.__wbg_view_d1a31268af734e5d = function(arg0) {
const ret = getObject(arg0).view;
return isLikeNone(ret) ? 0 : addHeapObject(ret);
};
imports.wbg.__wbg_buffer_610b70c8fd30da2d = function(arg0) {
const ret = getObject(arg0).buffer;
return addHeapObject(ret);
};
imports.wbg.__wbg_byteOffset_ede786cfcf88d3dd = function(arg0) {
const ret = getObject(arg0).byteOffset;
return ret;
};
imports.wbg.__wbg_byteLength_1fef7842ca4200fa = function(arg0) {
const ret = getObject(arg0).byteLength;
return ret;
};
imports.wbg.__wbindgen_object_clone_ref = function(arg0) {
const ret = getObject(arg0);
return addHeapObject(ret);
};
imports.wbg.__wbg_bytesliteral_efe7d360639bf32b = function() {
const ret = bytes_literal();
return addHeapObject(ret);
};
imports.wbg.__wbg_new_abda76e883ba8a5f = function() {
const ret = new Error();
return addHeapObject(ret);
};
imports.wbg.__wbg_stack_658279fe44541cf6 = function(arg0, arg1) {
const ret = getObject(arg1).stack;
const ptr1 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len1 = WASM_VECTOR_LEN;
getInt32Memory0()[arg0 / 4 + 1] = len1;
getInt32Memory0()[arg0 / 4 + 0] = ptr1;
};
imports.wbg.__wbg_error_f851667af71bcfc6 = function(arg0, arg1) {
let deferred0_0;
let deferred0_1;
try {
deferred0_0 = arg0;
deferred0_1 = arg1;
console.error(getStringFromWasm0(arg0, arg1));
} finally {
wasm.__wbindgen_free(deferred0_0, deferred0_1, 1);
}
};
imports.wbg.__wbg_log_5bb5f88f245d7762 = function(arg0) {
console.log(getObject(arg0));
};
imports.wbg.__wbindgen_is_object = function(arg0) {
const val = getObject(arg0);
const ret = typeof(val) === 'object' && val !== null;
return ret;
};
imports.wbg.__wbg_done_298b57d23c0fc80c = function(arg0) {
const ret = getObject(arg0).done;
return ret;
};
imports.wbg.__wbg_value_d93c65011f51a456 = function(arg0) {
const ret = getObject(arg0).value;
return addHeapObject(ret);
};
imports.wbg.__wbg_new_28c511d9baebfa89 = function(arg0, arg1) {
const ret = new Error(getStringFromWasm0(arg0, arg1));
return addHeapObject(ret);
};
imports.wbg.__wbg_call_b3ca7c6051f9bec1 = function() { return handleError(function (arg0, arg1, arg2) {
const ret = getObject(arg0).call(getObject(arg1), getObject(arg2));
return addHeapObject(ret);
}, arguments) };
imports.wbg.__wbg_next_a1c35169a4db2ac1 = function() { return handleError(function (arg0) {
const ret = getObject(arg0).next();
return addHeapObject(ret);
}, arguments) };
imports.wbg.__wbg_toString_c816a20ab859d0c1 = function(arg0) {
const ret = getObject(arg0).toString();
return addHeapObject(ret);
};
imports.wbg.__wbg_new_81740750da40724f = function(arg0, arg1) {
try {
var state0 = {a: arg0, b: arg1};
var cb0 = (arg0, arg1) => {
const a = state0.a;
state0.a = 0;
try {
return __wbg_adapter_99(a, state0.b, arg0, arg1);
} finally {
state0.a = a;
}
};
const ret = new Promise(cb0);
return addHeapObject(ret);
} finally {
state0.a = state0.b = 0;
}
};
imports.wbg.__wbg_resolve_b0083a7967828ec8 = function(arg0) {
const ret = Promise.resolve(getObject(arg0));
return addHeapObject(ret);
};
imports.wbg.__wbg_then_0c86a60e8fcfe9f6 = function(arg0, arg1) {
const ret = getObject(arg0).then(getObject(arg1));
return addHeapObject(ret);
};
imports.wbg.__wbg_then_a73caa9a87991566 = function(arg0, arg1, arg2) {
const ret = getObject(arg0).then(getObject(arg1), getObject(arg2));
return addHeapObject(ret);
};
imports.wbg.__wbg_buffer_12d079cc21e14bdb = function(arg0) {
const ret = getObject(arg0).buffer;
return addHeapObject(ret);
};
imports.wbg.__wbg_newwithbyteoffsetandlength_aa4a17c33a06e5cb = function(arg0, arg1, arg2) {
const ret = new Uint8Array(getObject(arg0), arg1 >>> 0, arg2 >>> 0);
return addHeapObject(ret);
};
imports.wbg.__wbg_new_63b92bc8671ed464 = function(arg0) {
const ret = new Uint8Array(getObject(arg0));
return addHeapObject(ret);
};
imports.wbg.__wbg_set_a47bac70306a19a7 = function(arg0, arg1, arg2) {
getObject(arg0).set(getObject(arg1), arg2 >>> 0);
};
imports.wbg.__wbg_length_c20a40f15020d68a = function(arg0) {
const ret = getObject(arg0).length;
return ret;
};
imports.wbg.__wbg_instanceof_Uint8Array_2b3bbecd033d19f6 = function(arg0) {
let result;
try {
result = getObject(arg0) instanceof Uint8Array;
} catch (_) {
result = false;
}
const ret = result;
return ret;
};
imports.wbg.__wbindgen_debug_string = function(arg0, arg1) {
const ret = debugString(getObject(arg1));
const ptr1 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len1 = WASM_VECTOR_LEN;
getInt32Memory0()[arg0 / 4 + 1] = len1;
getInt32Memory0()[arg0 / 4 + 0] = ptr1;
};
imports.wbg.__wbindgen_throw = function(arg0, arg1) {
throw new Error(getStringFromWasm0(arg0, arg1));
};
imports.wbg.__wbindgen_memory = function() {
const ret = wasm.memory;
return addHeapObject(ret);
};
imports.wbg.__wbindgen_closure_wrapper4516 = function(arg0, arg1, arg2) {
const ret = makeMutClosure(arg0, arg1, 1550, __wbg_adapter_20);
return addHeapObject(ret);
};
return imports;
}
function __wbg_init_memory(imports, maybe_memory) {
}
function __wbg_finalize_init(instance, module) {
wasm = instance.exports;
__wbg_init.__wbindgen_wasm_module = module;
cachedInt32Memory0 = null;
cachedUint32Memory0 = null;
cachedUint8Memory0 = null;
return wasm;
}
function initSync(module) {
if (wasm !== undefined) return wasm;
const imports = __wbg_get_imports();
__wbg_init_memory(imports);
if (!(module instanceof WebAssembly.Module)) {
module = new WebAssembly.Module(module);
}
const instance = new WebAssembly.Instance(module, imports);
return __wbg_finalize_init(instance, module);
}
async function __wbg_init(input) {
if (wasm !== undefined) return wasm;
if (typeof input === 'undefined') {
input = new URL('src_lsp_browser_bg.wasm', import.meta.url);
}
const imports = __wbg_get_imports();
if (typeof input === 'string' || (typeof Request === 'function' && input instanceof Request) || (typeof URL === 'function' && input instanceof URL)) {
input = fetch(input);
}
__wbg_init_memory(imports);
const { instance, module } = await __wbg_load(await input, imports);
return __wbg_finalize_init(instance, module);
}
export { initSync }
export default __wbg_init;

Binary file not shown.

View file

@ -0,0 +1,45 @@
/* tslint:disable */
/* eslint-disable */
export const memory: WebAssembly.Memory;
export function __wbg_serverconfig_free(a: number): void;
export function serverconfig_new(a: number, b: number): number;
export function serve(a: number): number;
export function __wbg_tokenspan_free(a: number): void;
export function __wbg_get_tokenspan_start(a: number): number;
export function __wbg_set_tokenspan_start(a: number, b: number): void;
export function __wbg_get_tokenspan_end(a: number): number;
export function __wbg_set_tokenspan_end(a: number, b: number): void;
export function __wbg_get_tokenspan_scope(a: number): number;
export function __wbg_set_tokenspan_scope(a: number, b: number): void;
export function token_type_as_js_string(a: number): number;
export function tokenize(a: number, b: number, c: number): void;
export function __wbg_intounderlyingsink_free(a: number): void;
export function intounderlyingsink_write(a: number, b: number): number;
export function intounderlyingsink_close(a: number): number;
export function intounderlyingsink_abort(a: number, b: number): number;
export function __wbg_intounderlyingsource_free(a: number): void;
export function intounderlyingsource_pull(a: number, b: number): number;
export function intounderlyingsource_cancel(a: number): void;
export function __wbg_readablestreamgetreaderoptions_free(a: number): void;
export function readablestreamgetreaderoptions_mode(a: number): number;
export function __wbg_pipeoptions_free(a: number): void;
export function pipeoptions_preventClose(a: number): number;
export function pipeoptions_preventCancel(a: number): number;
export function pipeoptions_preventAbort(a: number): number;
export function pipeoptions_signal(a: number): number;
export function __wbg_queuingstrategy_free(a: number): void;
export function queuingstrategy_highWaterMark(a: number): number;
export function __wbg_intounderlyingbytesource_free(a: number): void;
export function intounderlyingbytesource_type(a: number): number;
export function intounderlyingbytesource_autoAllocateChunkSize(a: number): number;
export function intounderlyingbytesource_start(a: number, b: number): void;
export function intounderlyingbytesource_pull(a: number, b: number): number;
export function intounderlyingbytesource_cancel(a: number): void;
export function __wbindgen_malloc(a: number, b: number): number;
export function __wbindgen_realloc(a: number, b: number, c: number, d: number): number;
export const __wbindgen_export_2: WebAssembly.Table;
export function _dyn_core__ops__function__FnMut__A____Output___R_as_wasm_bindgen__closure__WasmClosure___describe__invoke__hef4ca354c23a4fc7(a: number, b: number, c: number): void;
export function __wbindgen_add_to_stack_pointer(a: number): number;
export function __wbindgen_free(a: number, b: number, c: number): void;
export function __wbindgen_exn_store(a: number): void;
export function wasm_bindgen__convert__closures__invoke2_mut__hd246805203d2bb11(a: number, b: number, c: number, d: number): void;

17
packages/app/build.sh Executable file
View file

@ -0,0 +1,17 @@
#!/bin/sh
# FILEPATH: /workspaces/libsrc/packages/app/build.sh
cargo build --release -p src-lsp-browser --target wasm32-unknown-unknown;
# if we'are in a docker container the output directory is /scratch/cargo_target
# if we're not in a docker container the output directory is ../../target
OUT_DIR="/scratch/cargo_target"
if [ ! -d "$OUT_DIR" ]; then
OUT_DIR="../../target"
fi
wasm-bindgen --out-dir assets/wasm --target web --typescript ${OUT_DIR}/wasm32-unknown-unknown/release/src_lsp_browser.wasm;
webpack;
mv ../../book/playground/*.wasm ../../book
mv ../../book/playground/*.ttf ../../book
cp ../../book/taocp.png ../../book/playground

1
packages/app/declarations.d.ts vendored Normal file
View file

@ -0,0 +1 @@
declare module "*.module.css";

Binary file not shown.

Binary file not shown.

2
packages/app/dist/app.bundle.js vendored Normal file

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1 @@
/*! @license DOMPurify 2.3.1 | (c) Cure53 and other contributors | Released under the Apache license 2.0 and Mozilla Public License 2.0 | github.com/cure53/DOMPurify/blob/2.3.1/LICENSE */

File diff suppressed because one or more lines are too long

383
packages/app/dist/index.html vendored Normal file
View file

@ -0,0 +1,383 @@
<!DOCTYPE html>
<html lang="en" class="dark" dir="">
<head>
<meta charset="UTF-8" />
<title>Playground</title>
<meta name="description" content="" />
<meta name="viewport" content="width=device-width,initial-scale=1" />
<meta name="theme-color" content="#ffffff" />
<link rel="shortcut icon" href="taocp.png" />
<link rel="stylesheet" href="css/variables.css" />
<link rel="stylesheet" href="css/general.css" />
<link rel="stylesheet" href="css/chrome.css" />
<link rel="stylesheet" href="css/print.css" media="print" />
<link rel="stylesheet" href="FontAwesome/css/font-awesome.css" />
<link rel="stylesheet" href="fonts/fonts.css" />
<link rel="stylesheet" href="highlight.css" />
<link rel="stylesheet" href="tomorrow-night.css" />
<link rel="stylesheet" href="ayu-highlight.css" />
<link rel="stylesheet" href="ok.css" />
<link rel="preconnect" href="https://fonts.googleapis.com" />
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
<link
href="https://fonts.googleapis.com/css2?family=Allura&family=Inclusive+Sans:ital@0;1&display=swap"
rel="stylesheet"
/>
<script type="module" src="app.bundle.js"></script>
<script type="module" src="editor.worker.bundle.js"></script>
</head>
<body class="sidebar-visible no-js">
<div id="body-container">
<script>
var path_to_root = "";
var default_theme = window.matchMedia("(prefers-color-scheme: dark)")
.matches
? "rust"
: "dark";
</script>
<script>
try {
var theme = localStorage.getItem("mdbook-theme");
var sidebar = localStorage.getItem("mdbook-sidebar");
if (theme.startsWith('"') && theme.endsWith('"')) {
localStorage.setItem(
"mdbook-theme",
theme.slice(1, theme.length - 1)
);
}
if (sidebar.startsWith('"') && sidebar.endsWith('"')) {
localStorage.setItem(
"mdbook-sidebar",
sidebar.slice(1, sidebar.length - 1)
);
}
} catch (e) {}
</script>
<script>
var theme;
try {
theme = localStorage.getItem("mdbook-theme");
} catch (e) {}
if (theme === null || theme === undefined) {
theme = default_theme;
}
var html = document.querySelector("html");
html.classList.remove("dark");
html.classList.add(theme);
var body = document.querySelector("body");
body.classList.remove("no-js");
body.classList.add("js");
</script>
<input type="checkbox" id="sidebar-toggle-anchor" class="hidden" />
<script>
var body = document.querySelector("body");
var sidebar = null;
var sidebar_toggle = document.getElementById("sidebar-toggle-anchor");
if (document.body.clientWidth >= 1080) {
try {
sidebar = localStorage.getItem("mdbook-sidebar");
} catch (e) {}
sidebar = sidebar || "visible";
} else {
sidebar = "hidden";
}
sidebar_toggle.checked = sidebar === "visible";
body.classList.remove("sidebar-visible");
body.classList.add("sidebar-" + sidebar);
</script>
<nav id="sidebar" class="sidebar" aria-label="Table of contents">
<div class="sidebar-scrollbox">
<ol class="chapter">
<li class="chapter-item expanded">
<a href="0intro.html"
><strong aria-hidden="true">1.</strong> Intro</a
>
</li>
<li class="chapter-item expanded affix"></li>
<li class="part-title">Language</li>
<li class="chapter-item expanded">
<a href="language/0intro.html"
><strong aria-hidden="true">2.</strong> Language</a
>
</li>
<li>
<ol class="section">
<li class="chapter-item expanded">
<a href="examples.html"
><strong aria-hidden="true">2.1.</strong> Examples</a
>
</li>
</ol>
</li>
<li class="chapter-item expanded"></li>
<li class="part-title">Playground</li>
<li class="chapter-item expanded">
<a href="playground.html" class="active"
><strong aria-hidden="true">3.</strong> Playground</a
>
</li>
</ol>
</div>
<div id="sidebar-resize-handle" class="sidebar-resize-handle">
<div class="sidebar-resize-indicator"></div>
</div>
</nav>
<script>
var sidebarScrollbox = document.querySelector(
"#sidebar .sidebar-scrollbox"
);
sidebarScrollbox.addEventListener(
"click",
function (e) {
if (e.target.tagName === "A") {
sessionStorage.setItem(
"sidebar-scroll",
sidebarScrollbox.scrollTop
);
}
},
{ passive: true }
);
var sidebarScrollTop = sessionStorage.getItem("sidebar-scroll");
sessionStorage.removeItem("sidebar-scroll");
if (sidebarScrollTop) {
// preserve sidebar scroll position when navigating via links within sidebar
sidebarScrollbox.scrollTop = sidebarScrollTop;
} else {
// scroll sidebar to current active section when navigating via "next/previous chapter" buttons
var activeSection = document.querySelector("#sidebar .active");
if (activeSection) {
activeSection.scrollIntoView({ block: "center" });
}
}
</script>
<div id="page-wrapper" class="page-wrapper">
<div class="page">
<div id="menu-bar-hover-placeholder"></div>
<div id="menu-bar" class="menu-bar sticky">
<div class="left-buttons">
<label
id="sidebar-toggle"
class="nes-btn"
for="sidebar-toggle-anchor"
title="Toggle Table of Contents"
aria-label="Toggle Table of Contents"
aria-controls="sidebar"
>🍔</label
>
<button
id="theme-toggle"
class="nes-btn"
type="button"
title="Change theme"
aria-label="Change theme"
aria-haspopup="true"
aria-expanded="false"
aria-controls="theme-list"
>
💅
</button>
<ul
id="theme-list"
class="is-primary theme-popup"
aria-label="Themes"
role="menu"
>
<li role="none">
<button role="menuitem" class="theme" id="light">
Light
</button>
</li>
<li role="none">
<button role="menuitem" class="theme" id="rust">Rust</button>
</li>
<li role="none">
<button role="menuitem" class="theme" id="coal">Coal</button>
</li>
<li role="none">
<button role="menuitem" class="theme" id="navy">Navy</button>
</li>
<li role="none">
<button role="menuitem" class="theme" id="ayu">Ayu</button>
</li>
</ul>
<button
id="search-toggle"
class="nes-btn is-primary"
type="button"
title="Search. (Shortkey: s)"
aria-label="Toggle Searchbar"
aria-expanded="false"
aria-keyshortcuts="S"
aria-controls="searchbar"
>
🔎
</button>
</div>
<h1 class="menu-title"></h1>
<div class="right-buttons">
<a
href="print.html"
title="Print this book"
aria-label="Print this book"
class="nes-btn"
>🖨️ </a
><a
class="nes-btn"
href="https://ok.software/ok/src"
title="branch"
aria-label="branch"
>🌿</a
>
</div>
</div>
<div id="search-wrapper" class="hidden">
<form id="searchbar-outer" class="searchbar-outer">
<input
type="search"
id="searchbar"
class="nes-field"
name="searchbar"
placeholder="Search this book ..."
aria-controls="searchresults-outer"
aria-describedby="searchresults-header"
/>
</form>
<div id="searchresults-outer" class="searchresults-outer hidden">
<div id="searchresults-header" class="searchresults-header"></div>
<ul id="searchresults"></ul>
</div>
</div>
<script>
document
.getElementById("sidebar-toggle")
.setAttribute("aria-expanded", sidebar === "visible");
document
.getElementById("sidebar")
.setAttribute("aria-hidden", sidebar !== "visible");
Array.from(document.querySelectorAll("#sidebar a")).forEach(
function (link) {
link.setAttribute("tabIndex", sidebar === "visible" ? 0 : -1);
}
);
</script>
<div id="content" class="content">
<main>
<div id="container">
<div id="cell-editor">
<label for="editor">editor</label>
<div id="editor"></div>
</div>
<div id="cell-syntax">
<label for="channel-syntax">syntax</label>
<textarea
id="channel-syntax"
autocomplete="off"
spellcheck="off"
wrap="off"
readonly="readonly"
></textarea>
</div>
<div id="cell-console">
<label for="channel-console">console</label>
<textarea
id="channel-console"
autocomplete="off"
spellcheck="off"
wrap="off"
readonly="readonly"
rows="3"
></textarea>
</div>
<div id="cell-client">
<label for="channel-client"
>message trace (client ⇒ server)</label
>
<textarea
id="channel-client"
autocomplete="off"
spellcheck="off"
wrap="off"
readonly="readonly"
rows="4"
></textarea>
</div>
<div id="cell-server">
<label for="channel-server"
>message trace (client ⇐ server)</label
>
<textarea
id="channel-server"
autocomplete="off"
spellcheck="off"
wrap="off"
readonly="readonly"
rows="4"
></textarea>
</div>
</div>
</main>
<nav class="nav-wrapper" aria-label="Page navigation">
<a
rel="prev"
href="examples.html"
class="mobile-nav-chapters previous"
title="Previous chapter"
aria-label="Previous chapter"
aria-keyshortcuts="Left"
><img
src="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAzElEQVRYR+2X0Q6AIAhF5f8/2jYXZkwEjNSVvVUjDpcrGgT7FUkI2D9xRfQETwNIiWO85wfINfQUEyxBG2ArsLwC0jioGt5zFcwF4OYDPi/mBYKm4t0U8ATgRm3ThFoAqkhNgWkA0jJLvaOVSs7j3qMnSgXWBMiWPXe94QqMBMBc1VZIvaTu5u5pQewq0EqNZvIEMCmxAawK0DNkay9QmfFNAJUXfgGgUkLaE7j/h8fnASkxHTz0DGIBMCnBeeM7AArpUd3mz2x3C7wADglA8BcWMZhZAAAAAElFTkSuQmCC"
style="rotate: 270deg; width: 94px"
/></a>
<div style="clear: both"></div>
</nav>
</div>
</div>
<nav class="nav-wide-wrapper" aria-label="Page navigation">
<a
rel="next prefetch"
href="examples.html"
class="nav-chapters previous"
title="Next chapter"
aria-label="Next chapter"
aria-keyshortcuts="Right"
style="
display: flex;
justify-content: middle;
align-items: center;
width: 50%;
"
>&lt;</a
>
</nav>
</div>
<script>
const wsProtocol = location.protocol === "https:" ? "wss:" : "ws:";
const wsAddress =
wsProtocol + "//" + location.host + "/" + "__livereload";
const socket = new WebSocket(wsAddress);
socket.onmessage = function (event) {
if (event.data === "reload") {
socket.close();
location.reload();
}
};
window.onbeforeunload = function () {
socket.close();
};
</script>
<script>
window.playground_copyable = true;
</script>
<script src="elasticlunr.min.js"></script>
<script src="mark.min.js"></script>
<script src="searcher.js"></script>
<script src="clipboard.min.js"></script>
<script src="highlight.js"></script>
<script src="book.js"></script>
</div>
</body>
</html>

58
packages/app/package.json Normal file
View file

@ -0,0 +1,58 @@
{
"private": true,
"name": "monaco-lsp-streams",
"description": "",
"version": "0.0.0",
"license": "Apache-2.0 WITH LLVM-exception",
"author": {
"name": "silvanshade",
"email": "silvanshade@users.noreply.github.com",
"url": "https://github.com/silvanshade"
},
"homepage": "https://github.com/silvanshade/monaco-lsp-streams#readme",
"repository": {
"type": "git",
"url": "git+https://github.com/silvanshade/monaco-lsp-streams.git"
},
"bugs": {
"url": "https://github.com/silvanshade/monaco-lsp-streams/issues"
},
"main": "index.js",
"scripts": {
"build": "./build.sh",
"format": "prettier --write '**/*.{js,json,ts,tsx,yml,yaml}'",
"lint": "eslint 'src/**/*.{js,ts,tsx}' && prettier --check '**/*.{json,yml,yaml}'",
"app": "webpack serve --open"
},
"devDependencies": {
"@types/debounce": "^1.2.1",
"@typescript-eslint/eslint-plugin": "^5.27.0",
"@typescript-eslint/parser": "^5.27.0",
"clean-webpack-plugin": "^4.0.0",
"copy-webpack-plugin": "^11.0.0",
"css-loader": "^6.7.1",
"esbuild-loader": "^2.19.0",
"eslint": "^8.17.0",
"eslint-config-prettier": "^8.5.0",
"eslint-plugin-prettier": "^4.0.0",
"file-loader": "^6.2.0",
"html-webpack-plugin": "^5.5.0",
"path-browserify": "^1.0.1",
"prettier": "^2.6.2",
"source-map-loader": "^4.0.0",
"style-loader": "^3.3.1",
"terser-webpack-plugin": "^5.3.3",
"ts-node": "^10.8.1",
"typescript": "^4.7.3",
"webpack": "^5.73.0",
"webpack-cli": "^4.9.2",
"webpack-dev-server": "^4.9.1"
},
"dependencies": {
"debounce": "^1.2.1",
"json-rpc-2.0": "^1.3.0",
"monaco-editor-core": "^0.33.0",
"monaco-languageclient": "^1.0.1",
"vscode-languageserver-protocol": "^3.17.1"
}
}

218
packages/app/src/app.ts Normal file
View file

@ -0,0 +1,218 @@
import debounce from "debounce";
import * as monaco from "monaco-editor-core";
import { MonacoToProtocolConverter } from "monaco-languageclient";
import * as proto from "vscode-languageserver-protocol";
import Client from "./client";
import { FromServer, IntoServer } from "./codec";
import Language from "./language";
import Server from "./server";
class Environment implements monaco.Environment {
getWorkerUrl(moduleId: string, label: string) {
if (label === "editorWorkerService") {
return "./editor.worker.bundle.js";
}
throw new Error(
`getWorkerUrl: unexpected ${JSON.stringify({ moduleId, label })}`
);
}
}
const monacoToProtocol = new MonacoToProtocolConverter(monaco);
function commonOptions(theme: "light" | "dark") {
const isLight = theme === "light";
// Vibrant Color Palette (tailored for light/dark)
const vibrantColors = {
red: isLight ? "#FF2400" : "#FF5C33",
green: isLight ? "#1AFF00" : "#33FF33",
blue: isLight ? "#0040FF" : "#3399FF",
yellow: isLight ? "#FFFF00" : "#FFFF66",
purple: isLight ? "#9933FF" : "#CC99FF",
orange: isLight ? "#FF9900" : "#FFCC66",
};
return {
rules: [
{ token: "Pipe", foreground: vibrantColors.red },
{ token: "Ampersand", foreground: vibrantColors.green },
{ token: "Semicolon", foreground: vibrantColors.blue },
{ token: "Equals", foreground: vibrantColors.yellow },
{ token: "LessThan", foreground: vibrantColors.purple },
{ token: "GreaterThan", foreground: vibrantColors.orange },
{ token: "Variable", foreground: vibrantColors.orange },
{ token: "Word", foreground: isLight ? "#000000" : "#FFFFFF" },
{ token: "String", foreground: vibrantColors.red, fontStyle: "italic" },
{ token: "Comment", foreground: vibrantColors.green, fontStyle: "italic" },
{ token: "Integer", foreground: vibrantColors.blue },
{ token: "Float", foreground: vibrantColors.yellow },
{ token: "Eof", foreground: vibrantColors.purple },
{ token: "NewLine", foreground: vibrantColors.orange },
// Bracket Color Pairs (with darker right counterparts)
{ token: "LeftParen", foreground: vibrantColors.red },
{ token: "RightParen", foreground: "#8B0000" }, // Maroon
{ token: "LeftBrace", foreground: vibrantColors.green },
{ token: "RightBrace", foreground: "#006400" }, // Dark green
{ token: "LeftBracket", foreground: vibrantColors.blue },
{ token: "RightBracket", foreground: "#00008B" }, // Dark blue
{ token: "Comma", foreground: vibrantColors.purple },
{ token: "Dot", foreground: vibrantColors.orange },
{ token: "Colon", foreground: vibrantColors.orange },
{ token: "Underscore", foreground: "#888888" },
{ token: "Minus", foreground: vibrantColors.red },
{ token: "Plus", foreground: vibrantColors.green },
{ token: "Arrow", foreground: vibrantColors.blue },
{ token: "FatArrow", foreground: vibrantColors.yellow },
{ token: "Divide", foreground: vibrantColors.purple },
{ token: "Multiply", foreground: vibrantColors.orange },
{ token: "Percent", foreground: vibrantColors.orange },
{ token: "Dollar", foreground: "#888888" },
{ token: "Exclamation", foreground: vibrantColors.red },
{ token: "Question", foreground: vibrantColors.green },
{ token: "Tilde", foreground: vibrantColors.blue },
{ token: "At", foreground: vibrantColors.yellow },
{ token: "Caret", foreground: vibrantColors.purple },
{ token: "Shebang", foreground: vibrantColors.orange },
{ token: "Other", foreground: "#888888", background: "#FFFFFF" },
],
colors: {
"editor.foreground": isLight ? "#000000" : "#FFFFFF",
"editor.background": isLight ? "#F2F2F2" : "#282828",
"editorCursor.foreground": isLight ? "#000000" : "#A7A7A7",
"editor.lineHighlightBackground": isLight ? "#EFEFEF" : "#333333",
"editorLineNumber.foreground": isLight ? "#AAAAAA" : "#858585",
"editor.selectionBackground": isLight ? "#D0D0D0" : "#264F78",
"editor.inactiveSelectionBackground": isLight ? "#E0E0E0" : "#3A3D41",
},
};
}
function defineThemes() {
// Light theme
monaco.editor.defineTheme("src", {
base: "vs",
inherit: true,
...commonOptions("light"),
});
// Dark theme
monaco.editor.defineTheme("src-dark", {
base: "vs-dark",
inherit: true,
...commonOptions("dark"),
});
}
export default class App {
readonly #window: Window & monaco.Window & typeof globalThis = self;
readonly #intoServer: IntoServer = new IntoServer();
readonly #fromServer: FromServer = FromServer.create();
initializeMonaco(): void {
this.#window.MonacoEnvironment = new Environment();
}
createModel(client: Client): monaco.editor.ITextModel {
const language = Language.initialize(client);
const value = `use { host } from std
effect Make: async + throws + execs + reads + writes {
catch() [throws]
await<T>(f: Future<T>) [async, throws] -> T
exec(arg0: string, args: stringvec) [Make] -> i32
}
struct Local {
host: host
}
impl Make for Local {
fn catch(self) [throws] {
}
fn await<T>(f: Future<T>) [async, trhows] -> T {
yield()
}
fn exec(self, arg0: string, args: vec<string>) [Vm] -> i32 {
self.host.read("jobserver")
if self.host.exec(arg0, args) {
raise(1)
}
}
}`;
const id = language.id;
const uri = monaco.Uri.parse("inmemory://exec.src");
const model = monaco.editor.createModel(value, id, uri);
// Define and set the custom themes
defineThemes();
const theme = "src";
monaco.editor.setTheme(theme);
model.onDidChangeContent(
debounce(() => {
const text = model.getValue();
client.notify(proto.DidChangeTextDocumentNotification.type.method, {
textDocument: {
version: 0,
uri: model.uri.toString(),
},
contentChanges: [
{
range: monacoToProtocol.asRange(model.getFullModelRange()),
text,
},
],
} as proto.DidChangeTextDocumentParams);
}, 200)
);
client.pushAfterInitializeHook(async () => {
client.notify(proto.DidOpenTextDocumentNotification.type.method, {
textDocument: {
uri: model.uri.toString(),
languageId: language.id,
version: 0,
text: model.getValue(),
},
} as proto.DidOpenTextDocumentParams);
});
return model;
}
createEditor(client: Client): void {
document
.querySelector(".content")
?.querySelector("main")
?.setAttribute("style", "max-width: 100%;");
document
.querySelector(".nav-chapters")
?.setAttribute("style", "display: none;");
const container = document.getElementById("editor")!;
this.initializeMonaco();
const model = this.createModel(client);
monaco.editor.create(container, {
model,
automaticLayout: true,
});
}
async run(): Promise<void> {
const client = new Client(this.#fromServer, this.#intoServer);
const server = await Server.initialize(this.#intoServer, this.#fromServer);
this.createEditor(client);
await Promise.all([server.start(), client.start()]);
}
}

117
packages/app/src/client.ts Normal file
View file

@ -0,0 +1,117 @@
import * as jsrpc from "json-rpc-2.0";
import * as proto from "vscode-languageserver-protocol";
import { Codec, FromServer, IntoServer } from "./codec";
const consoleChannel = document.getElementById(
"channel-console"
) as HTMLTextAreaElement;
export default class Client extends jsrpc.JSONRPCServerAndClient {
afterInitializedHooks: (() => Promise<void>)[] = [];
#fromServer: FromServer;
constructor(fromServer: FromServer, intoServer: IntoServer) {
super(
new jsrpc.JSONRPCServer(),
new jsrpc.JSONRPCClient(async (json: jsrpc.JSONRPCRequest) => {
const encoded = Codec.encode(json);
intoServer.enqueue(encoded);
if (null != json.id) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const response = await fromServer.responses.get(json.id)!;
this.client.receive(response as jsrpc.JSONRPCResponse);
}
})
);
this.#fromServer = fromServer;
}
async start(): Promise<void> {
// process "window/logMessage": client <- server
this.addMethod(proto.LogMessageNotification.type.method, (params) => {
const { type, message } = params as {
type: proto.MessageType;
message: string;
};
switch (type) {
case proto.MessageType.Error: {
consoleChannel.value += "[error] ";
break;
}
case proto.MessageType.Warning: {
consoleChannel.value += " [warn] ";
break;
}
case proto.MessageType.Info: {
consoleChannel.value += " [info] ";
break;
}
case proto.MessageType.Log: {
consoleChannel.value += " [log] ";
break;
}
}
consoleChannel.value += message;
consoleChannel.value += "\n";
return;
});
// request "initialize": client <-> server
await (this.request(proto.InitializeRequest.type.method, {
processId: null,
clientInfo: {
name: "src-language-client",
},
trace: proto.TraceValues.Messages,
capabilities: {
general: {
},
window : {
showMessage: {
dynamicRegistration: true,
},
},
textDocument: {
definition: {
dynamicRegistration: true,
symbolKind: [proto.SymbolKind.String],
},
documentHighlight: {
dynamicRegistration: true,
},
documentSymbol: {
dynamicRegistration: true,
},
hover: {},
},
},
rootUri: null,
} as proto.InitializeParams) as Promise<jsrpc.JSONRPCResponse>);
// notify "initialized": client --> server
this.notify(proto.InitializedNotification.type.method, {});
await Promise.all(
this.afterInitializedHooks.map((f: () => Promise<void>) => f())
);
await Promise.all([this.processNotifications(), this.processRequests()]);
}
async processNotifications(): Promise<void> {
for await (const notification of this.#fromServer.notifications) {
await this.receiveAndSend(notification);
}
}
async processRequests(): Promise<void> {
for await (const request of this.#fromServer.requests) {
await this.receiveAndSend(request);
}
}
pushAfterInitializeHook(...hooks: (() => Promise<void>)[]): void {
this.afterInitializedHooks.push(...hooks);
}
}

46
packages/app/src/codec.ts Normal file
View file

@ -0,0 +1,46 @@
import * as jsrpc from "json-rpc-2.0";
import * as vsrpc from "vscode-jsonrpc";
import Bytes from "./codec/bytes";
import StreamDemuxer from "./codec/demuxer";
import Headers from "./codec/headers";
import Queue from "./codec/queue";
import Tracer from "./tracer";
export const encoder = new TextEncoder();
export const decoder = new TextDecoder();
export class Codec {
static encode(json: jsrpc.JSONRPCRequest | jsrpc.JSONRPCResponse): Uint8Array {
const message = JSON.stringify(json);
const delimited = Headers.add(message);
return Bytes.encode(delimited);
}
static decode<T>(data: Uint8Array): T {
const delimited = Bytes.decode(data);
const message = Headers.remove(delimited);
return JSON.parse(message) as T;
}
}
// FIXME: tracing effiency
export class IntoServer extends Queue<Uint8Array> implements AsyncGenerator<Uint8Array, never, void> {
enqueue(item: Uint8Array): void {
Tracer.client(Headers.remove(decoder.decode(item)));
super.enqueue(item);
}
}
export interface FromServer extends WritableStream<Uint8Array> {
readonly responses: { get(key: number | string): null | Promise<vsrpc.ResponseMessage> };
readonly notifications: AsyncGenerator<vsrpc.NotificationMessage, never, void>;
readonly requests: AsyncGenerator<vsrpc.RequestMessage, never, void>;
}
// eslint-disable-next-line @typescript-eslint/no-namespace
export namespace FromServer {
export function create(): FromServer {
return new StreamDemuxer();
}
}

View file

@ -0,0 +1,28 @@
import { encoder, decoder } from "../codec";
export default class Bytes {
static encode(input: string): Uint8Array {
return encoder.encode(input);
}
static decode(input: Uint8Array): string {
return decoder.decode(input);
}
static append<T extends { length: number; set(arr: T, offset: number): void }>(
constructor: { new (length: number): T },
...arrays: T[]
) {
let totalLength = 0;
for (const arr of arrays) {
totalLength += arr.length;
}
const result = new constructor(totalLength);
let offset = 0;
for (const arr of arrays) {
result.set(arr, offset);
offset += arr.length;
}
return result;
}
}

View file

@ -0,0 +1,73 @@
import * as vsrpc from "vscode-jsonrpc";
import Bytes from "./bytes";
import PromiseMap from "./map";
import Queue from "./queue";
import Tracer from "../tracer";
export default class StreamDemuxer extends Queue<Uint8Array> {
readonly responses: PromiseMap<number | string, vsrpc.ResponseMessage> = new PromiseMap();
readonly notifications: Queue<vsrpc.NotificationMessage> = new Queue<vsrpc.NotificationMessage>();
readonly requests: Queue<vsrpc.RequestMessage> = new Queue<vsrpc.RequestMessage>();
readonly #start: Promise<void>;
constructor() {
super();
this.#start = this.start();
}
private async start(): Promise<void> {
let contentLength: null | number = null;
let buffer = new Uint8Array();
for await (const bytes of this) {
buffer = Bytes.append(Uint8Array, buffer, bytes);
// check if the content length is known
if (null == contentLength) {
// if not, try to match the prefixed headers
const match = Bytes.decode(buffer).match(/^Content-Length:\s*(\d+)\s*/);
if (null == match) continue;
// try to parse the content-length from the headers
const length = parseInt(match[1]);
if (isNaN(length)) throw new Error("invalid content length");
// slice the headers since we now have the content length
buffer = buffer.slice(match[0].length);
// set the content length
contentLength = length;
}
// if the buffer doesn't contain a full message; await another iteration
if (buffer.length < contentLength) continue;
// decode buffer to a string
const delimited = Bytes.decode(buffer);
// reset the buffer
buffer = buffer.slice(contentLength);
// reset the contentLength
contentLength = null;
const message = JSON.parse(delimited) as vsrpc.Message;
Tracer.server(message);
// demux the message stream
if (vsrpc.Message.isResponse(message) && null != message.id) {
this.responses.set(message.id, message);
continue;
}
if (vsrpc.Message.isNotification(message)) {
this.notifications.enqueue(message);
continue;
}
if (vsrpc.Message.isRequest(message)) {
this.requests.enqueue(message);
continue;
}
}
}
}

View file

@ -0,0 +1,9 @@
export default class Headers {
static add(message: string): string {
return `Content-Length: ${message.length}\r\n\r\n${message}`;
}
static remove(delimited: string): string {
return delimited.replace(/^Content-Length:\s*\d+\s*/, "");
}
}

View file

@ -0,0 +1,68 @@
export default class PromiseMap<K, V extends { toString(): string }> {
#map: Map<K, PromiseMap.Entry<V>> = new Map();
get(key: K & { toString(): string }): null | Promise<V> {
let initialized: PromiseMap.Entry<V>;
// if the entry doesn't exist, set it
if (!this.#map.has(key)) {
initialized = this.#set(key);
} else {
// otherwise return the entry
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
initialized = this.#map.get(key)!;
}
// if the entry is a pending promise, return it
if (initialized.status === "pending") {
return initialized.promise;
} else {
// otherwise return null
return null;
}
}
#set(key: K, value?: V): PromiseMap.Entry<V> {
if (this.#map.has(key)) {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
return this.#map.get(key)!;
}
// placeholder resolver for entry
let resolve = (item: V) => {
void item;
};
// promise for entry (which assigns the resolver
const promise = new Promise<V>((resolver) => {
resolve = resolver;
});
// the initialized entry
const initialized: PromiseMap.Entry<V> = { status: "pending", resolve, promise };
if (null != value) {
initialized.resolve(value);
}
// set the entry
this.#map.set(key, initialized);
return initialized;
}
set(key: K & { toString(): string }, value: V): this {
const initialized = this.#set(key, value);
// if the promise is pending ...
if (initialized.status === "pending") {
// ... set the entry status to resolved to free the promise
this.#map.set(key, { status: "resolved" });
// ... and resolve the promise with the given value
initialized.resolve(value);
}
return this;
}
get size(): number {
return this.#map.size;
}
}
// eslint-disable-next-line @typescript-eslint/no-namespace
export namespace PromiseMap {
export type Entry<V> =
| { status: "pending"; resolve: (item: V) => void; promise: Promise<V> }
| { status: "resolved" };
}

View file

@ -0,0 +1,103 @@
export default class Queue<T> implements WritableStream<T>, AsyncGenerator<T, never, void> {
readonly #promises: Promise<T>[] = [];
readonly #resolvers: ((item: T) => void)[] = [];
readonly #observers: ((item: T) => void)[] = [];
#closed = false;
#locked = false;
readonly #stream: WritableStream<T>;
static #__add<X>(promises: Promise<X>[], resolvers: ((item: X) => void)[]): void {
promises.push(
new Promise((resolve) => {
resolvers.push(resolve);
}),
);
}
static #__enqueue<X>(closed: boolean, promises: Promise<X>[], resolvers: ((item: X) => void)[], item: X): void {
if (!closed) {
if (!resolvers.length) Queue.#__add(promises, resolvers);
const resolve = resolvers.shift()!; // eslint-disable-line @typescript-eslint/no-non-null-assertion
resolve(item);
}
}
constructor() {
const closed = this.#closed;
const promises = this.#promises;
const resolvers = this.#resolvers;
this.#stream = new WritableStream({
write(item: T): void {
Queue.#__enqueue(closed, promises, resolvers, item);
},
});
}
#add(): void {
return Queue.#__add(this.#promises, this.#resolvers);
}
enqueue(item: T): void {
return Queue.#__enqueue(this.#closed, this.#promises, this.#resolvers, item);
}
dequeue(): Promise<T> {
if (!this.#promises.length) this.#add();
const item = this.#promises.shift()!; // eslint-disable-line @typescript-eslint/no-non-null-assertion
return item;
}
isEmpty(): boolean {
return !this.#promises.length;
}
isBlocked(): boolean {
return !!this.#resolvers.length;
}
get length(): number {
return this.#promises.length - this.#resolvers.length;
}
async next(): Promise<IteratorResult<T, never>> {
const done = false;
const value = await this.dequeue();
for (const observer of this.#observers) {
observer(value);
}
return { done, value };
}
return(): Promise<IteratorResult<T, never>> {
return new Promise(() => {
// empty
});
}
throw(err: Error): Promise<IteratorResult<T, never>> {
return new Promise((_resolve, reject) => {
reject(err);
});
}
[Symbol.asyncIterator](): AsyncGenerator<T, never, void> {
return this;
}
get locked(): boolean {
return this.#stream.locked;
}
abort(reason?: Error): Promise<void> {
return this.#stream.abort(reason);
}
close(): Promise<void> {
return this.#stream.close();
}
getWriter(): WritableStreamDefaultWriter<T> {
return this.#stream.getWriter();
}
}

View file

@ -0,0 +1,6 @@
import "../assets/index.module.css";
import App from "./app";
const app = new App();
app.run().catch(console.error);

View file

@ -0,0 +1,211 @@
// import * as jsrpc from "json-rpc-2.0";
import {
MonacoToProtocolConverter,
ProtocolToMonacoConverter,
} from "monaco-languageclient";
import * as monaco from "monaco-editor-core";
import * as proto from "vscode-languageserver-protocol";
import Client from "./client";
import { tokenizer, as_js_string } from "./server";
export const monacoToProtocol = new MonacoToProtocolConverter(monaco);
export const protocolToMonaco = new ProtocolToMonacoConverter(monaco);
let language: null | Language;
export default class Language
implements monaco.languages.ILanguageExtensionPoint
{
readonly id: string;
readonly aliases: string[];
readonly extensions: string[];
readonly mimetypes: string[];
private constructor(client: Client) {
const { id, aliases, extensions, mimetypes } = Language.extensionPoint();
this.id = id;
this.aliases = aliases;
this.extensions = extensions;
this.mimetypes = mimetypes;
this.registerLanguage(client);
}
static extensionPoint(): monaco.languages.ILanguageExtensionPoint & {
aliases: string[];
extensions: string[];
mimetypes: string[];
} {
const id = "src lang";
const aliases = ["src"];
const extensions = [".src"];
const mimetypes = ["text/src"];
return { id, extensions, aliases, mimetypes };
}
private registerLanguage(client: Client): void {
void client;
monaco.languages.register(Language.extensionPoint());
monaco.languages.registerDocumentSymbolProvider(this.id, {
// eslint-disable-next-line
async provideDocumentSymbols(
model,
token
): Promise<monaco.languages.DocumentSymbol[]> {
void token;
const response = await (client.request(
proto.DocumentSymbolRequest.type.method,
{
textDocument: monacoToProtocol.asTextDocumentIdentifier(model),
} as proto.DocumentSymbolParams
) as Promise<proto.SymbolInformation[]>);
const uri = model.uri.toString();
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const result: monaco.languages.DocumentSymbol[] =
protocolToMonaco.asSymbolInformations(response, uri);
return result;
},
});
monaco.languages.registerDocumentHighlightProvider(this.id, {
// eslint-disable-next-line
async provideDocumentHighlights(
model,
position,
token
): Promise<monaco.languages.DocumentHighlight[]> {
void token;
const response = await (client.request(
proto.DocumentHighlightRequest.type.method,
{
textDocument: monacoToProtocol.asTextDocumentIdentifier(model),
position: monacoToProtocol.asPosition(
position.lineNumber,
position.column
),
} as proto.DocumentHighlightParams
) as Promise<proto.DocumentHighlight[]>);
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const result: monaco.languages.DocumentHighlight[] =
protocolToMonaco.asDocumentHighlights(response);
return result;
},
});
monaco.languages.registerHoverProvider(this.id, {
// eslint-disable-next-line
async provideHover(
model,
position,
token
): Promise<monaco.languages.Hover> {
void token;
const response = await (client.request(proto.HoverRequest.type.method, {
textDocument: monacoToProtocol.asTextDocumentIdentifier(model),
position: monacoToProtocol.asPosition(
position.lineNumber,
position.column
),
} as proto.HoverParams) as Promise<proto.Hover>);
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const result: monaco.languages.Hover =
protocolToMonaco.asHover(response);
return result;
},
});
monaco.languages.registerDocumentSemanticTokensProvider(this.id, {
// eslint-disable-next-line
async provideDocumentSemanticTokens(
model: monaco.editor.ITextModel,
lastResultId: string | null,
token: monaco.CancellationToken
): Promise<
| monaco.languages.SemanticTokens
| monaco.languages.SemanticTokensEdits
| null
| undefined
> {
void lastResultId;
void token;
const response = await (client.request(
proto.SemanticTokensRequest.type.method,
{
textDocument: monacoToProtocol.asTextDocumentIdentifier(model),
} as proto.SemanticTokensParams
) as Promise<proto.SemanticTokens>);
// eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
const result: monaco.languages.SemanticTokens =
protocolToMonaco.asSemanticTokens(response);
return result;
},
getLegend: function (): monaco.languages.SemanticTokensLegend {
console.log("getLegend");
return {
tokenTypes: [],
tokenModifiers: [],
};
},
releaseDocumentSemanticTokens: function (
resultId: string | undefined
): void {
console.log("releaseDocumentSemanticTokens");
void resultId;
},
});
monaco.languages.setTokensProvider(this.id, {
// eslint-disable-next-line
getInitialState(): monaco.languages.IState {
return {
clone: function (): monaco.languages.IState {
return this;
},
equals: function (other: monaco.languages.IState): boolean {
return true;
},
};
},
// eslint-disable-next-line
tokenize(
line: string,
state: monaco.languages.IState
): monaco.languages.ILineTokens {
if (tokenizer) {
const result = tokenizer(line);
let tokens = result.map((token) => {
let scope = as_js_string ? as_js_string(token.scope) : "";
return {
startIndex: token.start,
scopes: scope,
};
});
console.log(tokens);
return {
endState: state,
tokens,
};
}
return {
endState: state,
tokens: [],
};
},
});
}
static initialize(client: Client): Language {
if (null == language) {
language = new Language(client);
} else {
console.warn("Language already initialized; ignoring");
}
return language;
}
}

View file

@ -0,0 +1,39 @@
import init, { InitOutput, serve, ServerConfig, tokenize, token_type_as_js_string } from "../assets/wasm/src_lsp_browser";
import { FromServer, IntoServer } from "./codec";
let server: null | Server;
let tokenizer: null | typeof tokenize;
let as_js_string: null | typeof token_type_as_js_string;
export { tokenizer, as_js_string };
export default class Server {
readonly initOutput: InitOutput;
readonly #intoServer: IntoServer;
readonly #fromServer: FromServer;
private constructor(initOutput: InitOutput, intoServer: IntoServer, fromServer: FromServer) {
this.initOutput = initOutput;
this.#intoServer = intoServer;
this.#fromServer = fromServer;
}
static async initialize(intoServer: IntoServer, fromServer: FromServer): Promise<Server> {
if (null == server) {
const initOutput = await init();
server = new Server(initOutput, intoServer, fromServer);
tokenizer = tokenize;
as_js_string = token_type_as_js_string;
} else {
console.warn("Server already initialized; ignoring");
}
return server;
}
async start(): Promise<void> {
const config = new ServerConfig(this.#intoServer, this.#fromServer);
await serve(config);
}
}

View file

@ -0,0 +1,17 @@
import * as proto from "vscode-languageserver-protocol";
const clientChannel = document.getElementById("channel-client") as HTMLTextAreaElement;
const serverChannel = document.getElementById("channel-server") as HTMLTextAreaElement;
export default class Tracer {
static client(message: string): void {
clientChannel.value += message;
clientChannel.value += "\n";
}
static server(input: string | proto.Message): void {
const message: string = typeof input === "string" ? input : JSON.stringify(input);
serverChannel.value += message;
serverChannel.value += "\n";
}
}

View file

@ -0,0 +1,8 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "dist",
"rootDir": "src"
},
"exclude": []
}

View file

@ -0,0 +1,94 @@
// @ts-check
const { CleanWebpackPlugin } = require("clean-webpack-plugin");
const CopyWebpackPlugin = require("copy-webpack-plugin");
const HtmlWebpackPlugin = require("html-webpack-plugin");
const path = require("path");
const webpack = require("webpack");
/** @type {import("webpack").Configuration & { devServer?: import("webpack-dev-server").Configuration } } */
const config = {
experiments: {
asyncWebAssembly: true,
},
mode: "production",
target: "web",
entry: {
app: "./src/index.ts",
"editor.worker": "monaco-editor-core/esm/vs/editor/editor.worker.js",
},
resolve: {
alias: {
vscode: require.resolve("monaco-languageclient/vscode-compatibility"),
},
extensions: [".ts", ".js", ".json", ".ttf"],
fallback: {
fs: false,
child_process: false,
net: false,
crypto: false,
path: require.resolve("path-browserify"),
},
},
output: {
globalObject: "self",
filename: "[name].bundle.js",
path: path.resolve(__dirname, "../../book/playground"),
},
module: {
rules: [
{
test: /\.ts?$/,
loader: "esbuild-loader",
options: {
loader: "ts",
target: "es2022",
minify: true,
},
},
{
test: /\.css$/,
use: ["style-loader", "css-loader"],
},
{
test: /\.(woff|woff2|eot|ttf|otf)$/i,
type: "asset/resource",
},
],
},
plugins: [
new webpack.ProgressPlugin(),
new CleanWebpackPlugin(),
// new CopyWebpackPlugin({
// patterns: [
// {
// from: "../../node_modules/web-tree-sitter/tree-sitter.wasm",
// },
// ],
// }),
new HtmlWebpackPlugin({
template: "../../book/playground/index.html",
scriptLoading: "module",
title: "src-lsp Playground",
}),
],
optimization: {
minimize: true,
},
performance: {
hints: false,
},
devServer: {
static: {
directory: path.join(__dirname, "dist"),
},
compress: true,
port: 9000,
client: {
progress: true,
reconnect: false,
},
},
};
module.exports = config;

1
panzoom.min.js vendored Normal file

File diff suppressed because one or more lines are too long

21
skill-tree.css Normal file
View file

@ -0,0 +1,21 @@
/* the lines within the edges */
.edge:active path,
.edge:hover path {
stroke: fuchsia;
stroke-width: 3;
stroke-opacity: 1;
}
/* arrows are typically drawn with a polygon */
.edge:active polygon,
.edge:hover polygon {
stroke: fuchsia;
stroke-width: 3;
fill: fuchsia;
stroke-opacity: 1;
fill-opacity: 1;
}
/* If you happen to have text and want to color that as well... */
.edge:active text,
.edge:hover text {
fill: fuchsia;
}

27
skill-tree.js Normal file
View file

@ -0,0 +1,27 @@
// Loads the dot file found at `dot_path` as text and displays it.
function loadSkillTree(dot_path) {
var viz = new Viz();
fetch(dot_path)
.then(response => response.text())
.then(text => {
viz.renderSVGElement(text)
.then(element => { document.body.appendChild(element); })
});
}
function convertDivToSkillTree(divId, dotText) {
new Viz().renderSVGElement(dotText.dot_text).then(svg_elem => {
let parent = document.getElementById(divId);
parent.appendChild(svg_elem);
var element = svg_elem.children[0];
panzoom(element, {
bounds: true,
boundsPadding: 0.1
});
})
}
for (let obj of SKILL_TREES) {
convertDivToSkillTree(obj.id, obj.value);
}

View file

@ -2,13 +2,9 @@ use std::sync::{Arc, Mutex};
use salsa::DebugWithDb;
#[derive(Default)]
#[salsa::db(crate::Jar)]
pub(crate) struct Database {
pub struct Database {
storage: salsa::Storage<Self>,
// The logs are only used for testing and demonstrating reuse:

137
src/analyzer/mod.rs Normal file
View file

@ -0,0 +1,137 @@
pub mod db;
use crate::{
compiler::text::{self, Position, SourceMap, SourceProgram, Span},
lexer::Location,
parser::{
ast::{self, Node},
span::{self, ByteOrLineColOrCoord, Spanned},
},
Db,
};
use okstd::prelude::*;
#[salsa::input]
pub struct SyntaxTree {
exprs: Vec<Spanned<ast::Node>>,
}
impl SyntaxTree {
fn find_span(self, db: &dyn Db, pos: &ByteOrLineColOrCoord) -> Option<Spanned<Node>> {
self.exprs(db).iter().fold(None, |acc, expr| {
debug!("Checking if {:#?} overlaps with {:?}\n", expr, pos);
if expr.overlap(pos) {
debug!("{:?} overlaps with {:#?}\n", expr, pos);
match acc {
Some(acc) => {
debug!(
"Comparing {:#?} with {:#?}\n",
expr.span_size(),
acc.span_size()
);
if expr.span_size() < acc.span_size() {
Some(expr.clone())
} else {
Some(acc)
}
}
None => Some(expr.clone()),
}
} else {
acc
}
})
}
}
#[salsa::tracked]
pub fn get_symbol(
db: &dyn Db,
src: text::SourceProgram,
pos: span::ByteOrLineColOrCoordInterned,
) -> Option<Spanned<ast::Node>> {
None
}
#[salsa::input]
pub struct Url {
#[id]
pub url: String,
}
#[salsa::tracked]
pub fn add_file(db: &dyn Db, url: Url, text: String) -> SourceProgram {
SourceProgram::new(db, url.url(db), text)
}
#[cfg(test)]
mod tests {
use span::{ByteOrLineColOrCoord, ByteOrLineColOrCoordInterned};
use super::db::Database;
use super::*;
use insta::assert_snapshot;
// test_span_text!(test_get_symbol, "fn main()[] {}", "main()", 0,0)
// which expands to:
// ```
// #[okstd::log(debug)]
// #[okstd::test]
// fn test_get_symbol() {
// let db = &Database::default();
//
// let src = text::SourceProgram::new(
// db,
// "inmemory://test".to_string(),
// r#"fn main()[] {}"#.to_string(),
// );
// let symb: Option<Spanned<Expression>> = get_symbol(
// db,
// src,
// ByteOrLineColOrCoordInterned::new(db, ByteOrLineColOrCoord::LineCol(0, 0)),
// );
// assert_snapshot!(symb.unwrap(), @r###"main()"###);
// }```
macro_rules! test_span_text {
($name:ident, $src:expr, $expected:expr, $line:expr, $col:expr) => {
#[okstd::log(debug)]
#[okstd::test]
fn $name() {
let db = &Database::default();
let src = text::SourceProgram::new(
db,
"inmemory://test".to_string(),
$src.to_string(),
);
let symb: Option<Spanned<Node>> = get_symbol(
db,
src,
ByteOrLineColOrCoordInterned::new(db, ByteOrLineColOrCoord::LineCol($line, $col)),
);
if symb.is_none() {
panic!("Symbol not found");
}
assert_snapshot!(symb.unwrap(), @$expected);
}
};
}
// test_span_text!(
// test_get_body,
// r#"fn main()[] {
// let a = 1
// }"#,
// "let",
// 1,
// 1
// );
// test_span_text!(test_get_symbol, "fn main()[] {}", "priv fn", 0, 0);
}
#[salsa::tracked]
pub fn span_text(db: &dyn Db, span: text::Spanned) -> String {
span.src(db).text(db)[span.span(db).span(db)].to_string()
}

71
src/ast/mod.rs Normal file
View file

@ -0,0 +1,71 @@
use crate::lexer::Location;
use crate::ops;
use crate::parser::span::Spanned;
use src_derive::node;
use std::fmt::Display;
use std::ops::Range;
/// ast node representing an identifier.
#[node]
pub struct Ident {
name: String,
generics: Vec<Ident>,
}
/// ast node representing a field.
#[node]
pub struct Field {
vis: Option<Visibility>,
name: String,
ty: Ident,
}
/// An enum representing the different types of literals that can be used in an expression.
pub enum Literal {
Bool(bool),
Float(f64),
Integer(i64),
String(String),
}
#[derive(Debug)]
/// An enum representing the visibility of a field or method.
pub enum Visibility {
Private,
Public,
}
impl Display for Visibility {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Visibility::Public => write!(f, "pub"),
Visibility::Private => write!(f, "priv"),
}
}
}
/// An enum representing the different operators that can be used in an expression.
pub enum Operator {
Add,
Sub,
Mul,
Div,
Mod,
And,
Or,
Not,
Eq,
Ne,
Lt,
Le,
Gt,
Ge,
}
pub enum Node {
Ident(Ident),
Field(Field),
Literal(Literal),
Visibility(Visibility),
Operator(Operator),
}

View file

@ -1,36 +1,36 @@
use std::ops::Range;
use crate::lexer::Location;
pub struct Errors<'a>(Vec<lalrpop_util::ErrorRecovery<Location, crate::lexer::Token<'a>, &'a str>>);
pub struct Errors<'a>(Vec<lalrpop_util::ErrorRecovery<usize, crate::lexer::Token<'a>, &'a str>>);
impl<'a> From<Vec<lalrpop_util::ErrorRecovery<usize, crate::lexer::Token<'a>, &'a str>>>
impl<'a> From<Vec<lalrpop_util::ErrorRecovery<Location, crate::lexer::Token<'a>, &'a str>>>
for Errors<'a>
{
fn from(
errors: Vec<lalrpop_util::ErrorRecovery<usize, crate::lexer::Token<'a>, &'a str>>,
value: Vec<lalrpop_util::ErrorRecovery<Location, crate::lexer::Token<'a>, &'a str>>,
) -> Self {
Self(errors)
Self(value)
}
}
impl<'a> IntoIterator for Errors<'a> {
type Item = Range<usize>;
type Item = Range<Location>;
type IntoIter = <Vec<std::ops::Range<usize>> as IntoIterator>::IntoIter;
type IntoIter = <Vec<std::ops::Range<Location>> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.0
.into_iter()
.map(|error| match error.error {
lalrpop_util::ParseError::InvalidToken { location } => location..location,
lalrpop_util::ParseError::UnrecognizedEof { location, expected: _ } => {
location..location
lalrpop_util::ParseError::UnrecognizedEof {
location,
expected: _,
} => location..location,
lalrpop_util::ParseError::UnrecognizedToken { token, expected: _ } => {
token.0..token.2
}
lalrpop_util::ParseError::UnrecognizedToken { token, expected: _ } => token.0..token.2,
lalrpop_util::ParseError::ExtraToken { token } => token.0..token.2,
lalrpop_util::ParseError::User { error: _ } => todo!(),
})
@ -39,38 +39,45 @@ impl<'a> IntoIterator for Errors<'a> {
}
}
fn handle_errors(errors: Vec<lalrpop_util::ErrorRecovery<usize, crate::lexer::Token, &str>>, src: &str) -> String {
fn handle_errors(
errors: Vec<lalrpop_util::ErrorRecovery<Location, crate::lexer::Token, &str>>,
src: &str,
) -> String {
let mut pretty = String::new();
let mut last_end = 0;
let mut last_end = Location::default();
for error in errors {
match error.error {
lalrpop_util::ParseError::InvalidToken { location: _ } => todo!(),
lalrpop_util::ParseError::UnrecognizedEof { location: _, expected: _ } => todo!(),
lalrpop_util::ParseError::UnrecognizedEof {
location: _,
expected: _,
} => todo!(),
lalrpop_util::ParseError::UnrecognizedToken { token, expected } => {
// find the line and column of the start and end tokens,
// and print the line with a caret pointing to the error
let start = token.0;
let end = token.2;
let start_line = src[..start].rfind('\n').map_or(0, |i| i + 1);
let end_line = src[end..].find('\n').map_or(src.len(), |i| end + i);
let start_line = start.line;
let end_line = end.line;
let line = &src[start_line..end_line];
let start_col = start - start_line;
let end_col = end - start_line;
let start_col = start.col;
let end_col = end.col;
// pretty.push_str(&src[last_end..start]);
pretty.push_str(&format!("error: unexpected token {:?}, expected one of {:?}\n", token.1, expected));
pretty.push_str(&format!(
"error: unexpected token {:?}, expected one of {:?}\n",
token.1, expected
));
pretty.push_str(&line);
pretty.push_str("\n");
pretty.push_str(&" ".repeat(start_col));
pretty.push_str(&"^".repeat(end_col - start_col));
last_end = end;
},
}
lalrpop_util::ParseError::ExtraToken { token: _ } => todo!(),
lalrpop_util::ParseError::User { error: _ } => todo!(),
};
}
// pretty.push_str(&src[last_end..]);
pretty
}
}

View file

@ -1,62 +1,57 @@
use std::{
collections::BTreeMap,
};
use std::collections::BTreeMap;
use crate::{
compiler::{errors::Errors},
parser::ast::{self},
Db,
};
use okstd::prelude::debug;
use crate::{compiler::errors::Errors, lexer::Location, parser::ast, Db};
use self::text::SourceProgram;
mod db;
mod errors;
pub mod errors;
pub mod ir;
mod tests;
pub mod text;
#[cfg(test)]
mod tests;
#[salsa::tracked]
pub fn compile(db: &dyn Db, src: SourceProgram) -> ir::Program {
let mut errors: Vec<lalrpop_util::ErrorRecovery<usize, crate::lexer::Token, &str>> = vec![];
let mut errors: Vec<lalrpop_util::ErrorRecovery<Location, crate::lexer::Token<'_>, &str>> = vec![];
let wrapper = crate::lexer::TripleIterator::new(src.text(db));
let t = crate::parser::src::SourceParser::new().parse(&mut errors, wrapper);
let t = crate::parser::src::SourceParser::new().parse(&mut errors, db, wrapper);
// let mut errors_in_positions: Vec<ir::Position> = vec![];
if !errors.is_empty() {
for _error_range in Into::<Errors>::into(errors) {
text::to_spans(db, src);
}
panic!();
let spans = text::to_spans(db, src);
let _tokens = spans.tokens(db);
for _error_range in Into::<Errors>::into(errors) {}
// panic!();
}
let modul = t.unwrap();
let mut symbol_table = BTreeMap::new();
for toplevel in modul.0 {
match *toplevel {
ast::Expression::BinaryExpression(_) => todo!(),
ast::Expression::Bool(_) => todo!(),
ast::Expression::Integer(_) => todo!(),
ast::Expression::Float(_) => todo!(),
ast::Expression::Ident(_) => todo!(),
ast::Expression::Binding(_) => todo!(),
ast::Expression::FnCall(_) => todo!(),
ast::Expression::String(_) => todo!(),
ast::Expression::FnDef(_) => {}
ast::Expression::ShellCommand(_, _) => todo!(),
ast::Expression::EffectDef(_) => todo!(),
ast::Expression::StructDef(_) => todo!(),
ast::Expression::UseDef(usedef) => {
let import =
ir::Import::new(db, usedef.0.into_iter().map(|x| x.0).collect(), usedef.1 .0);
for import in add_imports(db, import) {
symbol_table.insert(import, ir::Symbol::new(db, import));
}
match toplevel.1 {
ast::Node::Visibility(_) => todo!(),
ast::Node::BinaryExpression(_) => todo!(),
ast::Node::Bool(_) => todo!(),
ast::Node::Integer(_) => todo!(),
ast::Node::Float(_) => todo!(),
ast::Node::Ident(_) => todo!(),
ast::Node::Binding(_) => todo!(),
ast::Node::FnCall(_) => todo!(),
ast::Node::String(_) => todo!(),
ast::Node::FnDef(_) => {
debug!("Function definition");
}
ast::Expression::Keyword(_) => todo!(),
ast::Expression::ImplDef(_) => todo!(),
ast::Expression::Branch(_) => todo!(),
ast::Expression::Error => todo!(),
ast::Node::EffectDef(_) => todo!(),
ast::Node::StructDef(_) => todo!(),
ast::Node::UseDef(usedef) => {}
ast::Node::Keyword(_) => todo!(),
ast::Node::ImplDef(_) => todo!(),
ast::Node::Branch(_) => todo!(),
ast::Node::Error => todo!(),
ast::Node::FieldAccess(_) => todo!(),
}
}
let program = ir::Program::new(db, vec![], symbol_table);

View file

@ -1,17 +1,10 @@
#[cfg(test)]
use okstd::prelude::*;
use super::*;
#[okstd::log(off)]
#[okstd::test]
#[okstd::log(debug)]
fn debug() {
use salsa::{database::AsSalsaDatabase, storage::HasJarsDyn};
debug!("hello");
use super::{db, text::SourceProgram};
let src = r#"use { native_fs, native_exec } from host
use { fs } from std
@ -21,9 +14,9 @@ struct Innitguv {
current_pid: i32
}
"#;
let db = &crate::compiler::db::Database::default().enable_logging();
let db = &crate::analyzer::db::Database::default().enable_logging();
let prog = SourceProgram::new(db, src.to_string());
let prog = SourceProgram::new(db, src.to_string(), "test".to_string());
let res = super::compile(db, prog);
println!("{:?}", prog);
println!("{:?}", res.symbols(db));

View file

@ -1,11 +1,24 @@
use std::ops::Range;
use bitflags::bitflags;
use crate::Db;
use bitflags::bitflags;
#[salsa::input]
#[derive(Default)]
pub struct Document {
#[id]
pub url: String,
pub text: ropey::Rope,
}
/// Represents the source program text.
#[salsa::input]
pub struct SourceProgram {
#[id]
pub url: String,
#[return_ref]
pub text: String,
}
@ -30,17 +43,16 @@ pub struct Spanned {
#[salsa::interned]
pub struct Span {
/// The range of the span in the source program text.
pub span: (usize, usize),
pub span: Range<usize>,
}
/// Represents a position in the source code.
#[salsa::interned]
pub struct Position {
/// The line number of the position.
l: usize,
pub line: usize,
/// The column number of the position.
c: usize,
pub column: usize,
}
/// Represents the source map of the program.
@ -69,7 +81,7 @@ bitflags! {
}
#[inline]
fn cmp_range<T: Ord>(a: &Range<T>, b: &Range<T>) -> SpanOverlap {
pub fn cmp_range<T: Ord>(a: &Range<T>, b: &Range<T>) -> SpanOverlap {
let mut overlap = SpanOverlap::NONE;
if a.contains(&b.start) {
overlap |= SpanOverlap::START;
@ -80,23 +92,8 @@ fn cmp_range<T: Ord>(a: &Range<T>, b: &Range<T>) -> SpanOverlap {
overlap
}
/// todo(sevki): split this into two functions
#[salsa::tracked]
pub fn to_spans(db: &dyn Db, src: SourceProgram) -> SourceMap {
let line_lengths: Vec<Range<usize>> = calculate_line_lengths(db, src)
.into_iter()
.scan(0, |acc, x| {
let range = *acc..*acc + x;
*acc += x;
Some(range)
})
.collect();
// reverse the line lengths and make it peakable essentially
// turinging it into a stack
let mut line_lengths = line_lengths.into_iter().enumerate().rev().peekable();
let mut spans = vec![];
let lexer = crate::lexer::Lexer::new(src.text(db), 0);
@ -106,50 +103,65 @@ pub fn to_spans(db: &dyn Db, src: SourceProgram) -> SourceMap {
// Lexer tokens have a start and end position, and we want to map these to the line lengths
// first we iterate over the lexer tokens
for token in lexer {
let _size = token.end - token.start;
// then we peek at the first line
let mut start: Option<(usize, usize)> = None;
loop {
if let Some((line_no, span)) = line_lengths.clone().peek() {
// if the token is within the line
let overlap = cmp_range(&span, &(token.start..token.end));
if overlap == SpanOverlap::NONE && start.is_none() {
// if the token is not within the line
line_lengths.next();
}
if overlap == SpanOverlap::START || overlap == SpanOverlap::BOTH {
// if the token is within the line
start = Some((*line_no, span.start));
// we do not need to iterate more.
break;
}
}
match token.node {
crate::lexer::Token::Eof => break,
crate::lexer::Token::NewLine => continue,
_ => {}
}
if start.is_none() {
// if the token is not within the line
break;
}
let start = start.unwrap();
let leading_chars = src.text(db).get(start.1..token.start);
let column = leading_chars.map(|x| x.chars().count()).unwrap_or(0);
/*
```text
1,1 7
| |
# Intro
8 lorem ipsum dolor sit amet
13 byte start
6th column, 2nd line
```
*/
spans.push(Spanned::new(
db,
Span::new(db, (token.start, token.end)),
Span::new(db, token.start..token.end),
src,
Position::new(db, start.0, column),
Position::new(db, token.pos.line, token.pos.col),
));
}
SourceMap::new(db, spans)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::analyzer::db::Database;
use okstd::prelude::*;
#[okstd::log(off)]
#[okstd::test]
fn test_to_spans() {
let db = Database::default();
let src = SourceProgram::new(
&db,
"inmemory://test".to_string(),
r#"fn main() {}"#.to_string(),
);
let spans = to_spans(&db, src);
let tokens = spans.tokens(&db);
for token in tokens.iter() {
debug!("line {:?}", token.pos(&db).line(&db));
debug!("column {:?}", token.pos(&db).column(&db));
}
assert_eq!(tokens.len(), 6);
assert_eq!(tokens[0].pos(&db).line(&db), 0);
}
#[okstd::log(trace)]
#[okstd::test]
fn test_to_spans_multiline() {
let db = Database::default();
let src = SourceProgram::new(
&db,
"inmemory://test".to_string(),
r#"fn main() {
let x = 1
}"#
.to_string(),
);
let spans = to_spans(&db, src);
let tokens = spans.tokens(&db);
for token in tokens.iter() {
debug!("line {:?}", token.pos(&db).line(&db));
debug!("column {:?}", token.pos(&db).column(&db));
}
assert_eq!(tokens.len(), 10);
assert_eq!(tokens[0].pos(&db).line(&db), 0);
}
}

22
src/core/mod.rs Normal file
View file

@ -0,0 +1,22 @@
trait PartialEq {
fn eq(&self, other: &Self) -> bool;
fn ne(&self, other: &Self) -> bool {
!self.eq(other)
}
}
trait PartialOrd: PartialEq {
fn partial_cmp(&self, other: &Self) -> Option<Ordering>;
fn lt(&self, other: &Self) -> bool {
matches!(self.partial_cmp(other), Some(Ordering::Less))
}
fn le(&self, other: &Self) -> bool {
matches!(self.partial_cmp(other), Some(Ordering::Less) | Some(Ordering::Equal))
}
fn gt(&self, other: &Self) -> bool {
matches!(self.partial_cmp(other), Some(Ordering::Greater))
}
fn ge(&self, other: &Self) -> bool {
matches!(self.partial_cmp(other), Some(Ordering::Greater) | Some(Ordering::Equal))
}
}

View file

@ -1,9 +1,11 @@
#[cfg(test)]
use proptest::{prelude::*};
#[allow(unused_imports)]
use crate::lexer::{Position, Spanned, Lexer, Token};
use super::Word;
proptest! {
#[test]
@ -12,5 +14,15 @@ proptest! {
let lexer = Lexer::new(&input, 0);
let tokens: Vec<Spanned<Token, Position>> = lexer.collect();
assert_eq!(tokens.len(), 4);
let expected = vec![
Token::Word(Word::Let),
Token::Word(Word::Ident(&rnd.0)),
Token::Equals,
// Token::String(rnd.1), // ignore this as proptest can't handle this expression
];
for (actual, expected) in tokens.iter().zip(expected.iter()) {
}
}
}

View file

@ -1,9 +1,10 @@
#[cfg(test)]
use crate::lexer::{Lexer, TokenStreamDisplay};
use insta::assert_snapshot;
use okstd::prelude::*;
#[cfg(test)]
#[okstd::test]
fn test_empty_lexer() {
let input = " ";
@ -19,9 +20,9 @@ fn test_1_plus_1() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Integer(1), 1:1
- Plus, 1:3
- Integer(1), 1:5
- Integer(1), 0:1
- Plus, 0:3
- Integer(1), 0:5
"###);
}
@ -31,11 +32,11 @@ fn test_1_plus_1_plus_1() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Integer(1), 1:1
- Plus, 1:3
- Integer(1), 1:5
- Plus, 1:7
- Integer(1), 1:9
- Integer(1), 0:1
- Plus, 0:3
- Integer(1), 0:5
- Plus, 0:7
- Integer(1), 0:9
"###);
}
@ -45,13 +46,13 @@ fn test_1_plus_1_plus_1_plus_1() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Integer(1), 1:1
- Plus, 1:3
- Integer(1), 1:5
- Divide, 1:7
- Integer(1), 1:9
- Percent, 1:11
- Integer(1), 1:13
- Integer(1), 0:1
- Plus, 0:3
- Integer(1), 0:5
- Divide, 0:7
- Integer(1), 0:9
- Percent, 0:11
- Integer(1), 0:13
"###);
}
@ -61,10 +62,10 @@ fn test_let_a_equals_1() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Let), 1:3
- Word(Ident("a")), 1:5
- Equals, 1:7
- Integer(1), 1:9
- Word(Let), 0:3
- Word(Ident("a")), 0:5
- Equals, 0:7
- Integer(1), 0:9
"###);
}
@ -74,12 +75,12 @@ fn test_let_a_equals_1_plus_1() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Let), 1:3
- Word(Ident("a")), 1:5
- Equals, 1:7
- Integer(1), 1:9
- Plus, 1:11
- Integer(1), 1:13
- Word(Let), 0:3
- Word(Ident("a")), 0:5
- Equals, 0:7
- Integer(1), 0:9
- Plus, 0:11
- Integer(1), 0:13
"###);
}
@ -89,12 +90,12 @@ fn test_let_a_equals_1_plus_3_point_14() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Let), 1:3
- Word(Ident("a")), 1:5
- Equals, 1:7
- Integer(1), 1:9
- Plus, 1:11
- Float(3.14), 1:16
- Word(Let), 0:3
- Word(Ident("a")), 0:5
- Equals, 0:7
- Integer(1), 0:9
- Plus, 0:11
- Float(3.14), 0:16
"###);
}
@ -104,14 +105,14 @@ fn test_let_a_equals_1_plus_3_point_14_plus_1() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Let), 1:3
- Word(Ident("a")), 1:5
- Equals, 1:7
- Integer(1), 1:9
- Plus, 1:11
- Float(3.14), 1:16
- Plus, 1:18
- Integer(1), 1:20
- Word(Let), 0:3
- Word(Ident("a")), 0:5
- Equals, 0:7
- Integer(1), 0:9
- Plus, 0:11
- Float(3.14), 0:16
- Plus, 0:18
- Integer(1), 0:20
"###);
}
@ -121,12 +122,12 @@ fn test_fn_foo() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Fn), 1:2
- Word(Ident("foo")), 1:6
- LeftParen, 1:7
- RightParen, 1:8
- LeftBrace, 1:10
- RightBrace, 1:11
- Word(Fn), 0:2
- Word(Ident("foo")), 0:6
- LeftParen, 0:7
- RightParen, 0:8
- LeftBrace, 0:10
- RightBrace, 0:11
"###);
}
@ -136,13 +137,13 @@ fn test_fn_foo_bar() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Fn), 1:2
- Word(Ident("foo")), 1:6
- LeftParen, 1:7
- Word(Ident("bar")), 1:10
- RightParen, 1:11
- LeftBrace, 1:13
- RightBrace, 1:14
- Word(Fn), 0:2
- Word(Ident("foo")), 0:6
- LeftParen, 0:7
- Word(Ident("bar")), 0:10
- RightParen, 0:11
- LeftBrace, 0:13
- RightBrace, 0:14
"###);
}
@ -154,17 +155,17 @@ fn test_fn_foo_bar_baz() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Fn), 1:2
- Word(Ident("foo")), 1:6
- LeftParen, 1:7
- Word(Ident("bar")), 1:10
- Comma, 1:11
- Word(Ident("baz")), 1:15
- RightParen, 1:16
- LeftBrace, 1:18
- Word(Fn), 0:2
- Word(Ident("foo")), 0:6
- LeftParen, 0:7
- Word(Ident("bar")), 0:10
- Comma, 0:11
- Word(Ident("baz")), 0:15
- RightParen, 0:16
- LeftBrace, 0:18
- NewLine, 1:0
- NewLine, 2:0
- NewLine, 3:0
- RightBrace, 3:1
- RightBrace, 2:1
"###);
}
@ -176,20 +177,20 @@ fn test_fn_foo_bar_baz_qux() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Fn), 1:2
- Word(Ident("foo")), 1:6
- LeftParen, 1:7
- Word(Ident("bar")), 1:10
- Comma, 1:11
- Word(Ident("baz")), 1:15
- RightParen, 1:16
- LeftBrace, 1:18
- Word(Fn), 0:2
- Word(Ident("foo")), 0:6
- LeftParen, 0:7
- Word(Ident("bar")), 0:10
- Comma, 0:11
- Word(Ident("baz")), 0:15
- RightParen, 0:16
- LeftBrace, 0:18
- NewLine, 1:0
- Word(Ident("qux")), 1:7
- LeftParen, 1:8
- RightParen, 1:9
- NewLine, 2:0
- Word(Ident("qux")), 2:7
- LeftParen, 2:8
- RightParen, 2:9
- NewLine, 3:0
- RightBrace, 3:1
- RightBrace, 2:1
"###);
}
@ -201,21 +202,21 @@ fn test_fn_foo_bar_baz_qux_quux() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Fn), 1:2
- Word(Ident("foo")), 1:6
- LeftParen, 1:7
- Word(Ident("bar")), 1:10
- Comma, 1:11
- Word(Ident("baz")), 1:15
- RightParen, 1:16
- LeftBrace, 1:18
- Word(Fn), 0:2
- Word(Ident("foo")), 0:6
- LeftParen, 0:7
- Word(Ident("bar")), 0:10
- Comma, 0:11
- Word(Ident("baz")), 0:15
- RightParen, 0:16
- LeftBrace, 0:18
- NewLine, 1:0
- Word(Ident("qux")), 1:7
- LeftParen, 1:8
- Word(Ident("quux")), 1:12
- RightParen, 1:13
- NewLine, 2:0
- Word(Ident("qux")), 2:7
- LeftParen, 2:8
- Word(Ident("quux")), 2:12
- RightParen, 2:13
- NewLine, 3:0
- RightBrace, 3:1
- RightBrace, 2:1
"###);
}
@ -227,80 +228,96 @@ fn test_fn_foo_bar_baz_qux_quux_quuz() {
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Fn), 1:2
- Word(Ident("foo")), 1:6
- LeftParen, 1:7
- Word(Ident("bar")), 1:10
- Comma, 1:11
- Word(Ident("baz")), 1:15
- RightParen, 1:16
- LeftBrace, 1:18
- Word(Fn), 0:2
- Word(Ident("foo")), 0:6
- LeftParen, 0:7
- Word(Ident("bar")), 0:10
- Comma, 0:11
- Word(Ident("baz")), 0:15
- RightParen, 0:16
- LeftBrace, 0:18
- NewLine, 1:0
- Word(Ident("qux")), 1:7
- LeftParen, 1:8
- Word(Ident("quux")), 1:12
- Comma, 1:13
- Float(3.14), 1:18
- Comma, 1:19
- Integer(3735928559), 1:29
- RightParen, 1:30
- NewLine, 2:0
- Word(Ident("qux")), 2:7
- LeftParen, 2:8
- Word(Ident("quux")), 2:12
- Comma, 2:13
- Float(3.14), 2:18
- Comma, 2:19
- Integer(3735928559), 2:29
- RightParen, 2:30
- NewLine, 3:0
- RightBrace, 3:1
- RightBrace, 2:1
"###);
}
#[okstd::test]
fn test_func_with_genetics() {
fn test_func_with_generics() {
let input = "fn foo<T>(bar: T)[throws, awaits, execs] {
qux()
}";
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Fn), 1:2
- Word(Ident("foo")), 1:6
- LessThan, 1:7
- Word(Ident("T")), 1:8
- GreaterThan, 1:9
- LeftParen, 1:10
- Word(Ident("bar")), 1:13
- Colon, 1:14
- Word(Ident("T")), 1:16
- RightParen, 1:17
- LeftBracket, 1:18
- Word(Ident("throws")), 1:24
- Comma, 1:25
- Word(Ident("awaits")), 1:32
- Comma, 1:33
- Word(Ident("execs")), 1:39
- RightBracket, 1:40
- LeftBrace, 1:42
- Word(Fn), 0:2
- Word(Ident("foo")), 0:6
- LessThan, 0:7
- Word(Ident("T")), 0:8
- GreaterThan, 0:9
- LeftParen, 0:10
- Word(Ident("bar")), 0:13
- Colon, 0:14
- Word(Ident("T")), 0:16
- RightParen, 0:17
- LeftBracket, 0:18
- Word(Ident("throws")), 0:24
- Comma, 0:25
- Word(Ident("awaits")), 0:32
- Comma, 0:33
- Word(Ident("execs")), 0:39
- RightBracket, 0:40
- LeftBrace, 0:42
- NewLine, 1:0
- Word(Ident("qux")), 1:7
- LeftParen, 1:8
- RightParen, 1:9
- NewLine, 2:0
- Word(Ident("qux")), 2:7
- LeftParen, 2:8
- RightParen, 2:9
- NewLine, 3:0
- RightBrace, 3:1
- RightBrace, 2:1
"###);
}
#[okstd::test]
fn test_func_call_with_genetics() {
fn test_func_call_with_generics() {
let input = "foo<T>(bar: T)[vm]";
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Ident("foo")), 1:3
- LessThan, 1:4
- Word(Ident("T")), 1:5
- GreaterThan, 1:6
- LeftParen, 1:7
- Word(Ident("bar")), 1:10
- Colon, 1:11
- Word(Ident("T")), 1:13
- RightParen, 1:14
- LeftBracket, 1:15
- Word(Ident("vm")), 1:17
- RightBracket, 1:18
- Word(Ident("foo")), 0:3
- LessThan, 0:4
- Word(Ident("T")), 0:5
- GreaterThan, 0:6
- LeftParen, 0:7
- Word(Ident("bar")), 0:10
- Colon, 0:11
- Word(Ident("T")), 0:13
- RightParen, 0:14
- LeftBracket, 0:15
- Word(Ident("vm")), 0:17
- RightBracket, 0:18
"###);
}
#[okstd::test]
fn test_identifier_with_member_access() {
let input = "foo.bar.baz.qux";
let lexer = Lexer::new(input, 0);
let actual_tokens = lexer.map_while(|t| Some(t)).collect::<Vec<_>>();
assert_snapshot!(TokenStreamDisplay::from(actual_tokens), @r###"
- Word(Ident("foo")), 0:3
- Dot, 0:4
- Word(Ident("bar")), 0:7
- Dot, 0:8
- Word(Ident("baz")), 0:11
- Dot, 0:12
- Word(Ident("qux")), 0:15
"###);
}

View file

@ -4,10 +4,8 @@ lexer.rs is a lexer for the src language
use std::{fmt::Display, iter::Iterator, iter::Peekable, str::Chars};
use okstd::prelude::*;
// Identifier
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Variable<'input> {
@ -96,9 +94,11 @@ impl<T, P> Spanned<T, P> {
pos,
}
}
}
impl Spanned<Token<'_>> {
pub fn len(&self) -> usize {
self.end - self.start
self.node.to_string().chars().count()
}
}
@ -131,7 +131,47 @@ impl std::fmt::Display for Position {
// display trait implementation for Token
impl std::fmt::Display for Token<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
let str = match self {
Token::Pipe => "|".to_string(),
Token::Ampersand => "&".to_string(),
Token::Semicolon => ";".to_string(),
Token::Equals => "=".to_string(),
Token::LessThan => "<".to_string(),
Token::GreaterThan => ">".to_string(),
Token::Variable(variable) => variable.to_string(),
Token::Word(word) => word.chars().collect(),
Token::String(string) => string.to_string(),
Token::Comment(comment) => comment.to_string(),
Token::Integer(number) => number.to_string(),
Token::Float(number) => number.to_string(),
Token::Eof => "".to_string(),
Token::NewLine => "\n".to_string(),
Token::LeftParen => "(".to_string(),
Token::RightParen => ")".to_string(),
Token::LeftBrace => "{".to_string(),
Token::RightBrace => "}".to_string(),
Token::LeftBracket => "[".to_string(),
Token::RightBracket => "]".to_string(),
Token::Comma => ",".to_string(),
Token::Colon => ":".to_string(),
Token::Underscore => "_".to_string(),
Token::Minus => "-".to_string(),
Token::Plus => "+".to_string(),
Token::Arrow => "->".to_string(),
Token::FatArrow => "=>".to_string(),
Token::Divide => "/".to_string(),
Token::Multiply => "*".to_string(),
Token::Percent => "%".to_string(),
Token::Dollar => "$".to_string(),
Token::Exclamation => "!".to_string(),
Token::Question => "?".to_string(),
Token::Tilde => "~".to_string(),
Token::At => "@".to_string(),
Token::Caret => "^".to_string(),
Token::Dot => ".".to_string(),
Token::Shebang => "#!".to_string(),
};
write!(f, "{}", str)
}
}
@ -163,6 +203,8 @@ pub enum Word<'input> {
From,
Where,
Self_,
Pub,
Priv,
Ident(&'input str),
FnIdent(&'input str),
Any(&'input str),
@ -200,6 +242,8 @@ impl<'input> Word<'input> {
Word::From => "from".chars(),
Word::Where => "where".chars(),
Word::Self_ => "self".chars(),
Word::Pub => "pub".chars(),
Word::Priv => "priv".chars(),
}
}
}
@ -255,6 +299,8 @@ pub enum Token<'input> {
}
impl<'input> Token<'input> {
// deprecated
#[deprecated(note = "to_chars is deprecated, use to_string instead")]
fn to_chars(&'input self) -> Chars<'input> {
match self {
Token::Pipe => "|".chars(),
@ -327,8 +373,8 @@ impl<'input> Lexer<'input> {
Self {
input,
pos,
line: 1,
col: 1,
line: 0, // Change from 1 to 0
col: 0, // Change from 1 to 0
state: State::Program,
buffer: String::new(),
peekable: input.chars().peekable(),
@ -364,34 +410,21 @@ macro_rules! set_state {
}
macro_rules! emit {
($self:expr, $state:expr => ?) => {{
let r = $self.emit_buffer().unwrap();
let r = $self.emit_buffer()?;
$self.buffer.clear();
emit!($self, $state => r)
}};
($self:expr, $state:expr => $token:expr) => {{
let start = $self.pos;
match $token {
Token::Integer (number ) => {
for c in number.to_string().chars() {
debug!("c: {}", c);
$self.advance(c);
}
}
Token::Float ( number ) => {
for c in number.to_string().chars() {
$self.advance(c);
}
}
_ => {
for c in $token.to_chars() {
$self.advance(c);
}
}
for c in $token.to_string().chars() {
$self.advance(c);
}
let end = $self.pos;
let pos = Position::new(
$self.line,
$self.col - $self.buffer.len() - 1,
$self.col - $self.buffer.len(),
end - start,
);
$self.state = $state;
@ -421,13 +454,13 @@ impl<'input> Lexer<'input> {
fn advance(&mut self, c: char) -> bool {
if self.pos + 1 > self.input.len() {
unreachable!("pos: {}, input.len: {}", self.pos, self.input.len());
return false;
}
self.pos += 1;
self.last_char = Some(c);
if c == '\n' {
self.line += 1;
self.col = 1;
self.col = 0; // Change from 1 to 0
} else {
self.col += 1;
}
@ -447,41 +480,52 @@ impl<'input> Lexer<'input> {
match self.state {
// these states cannot emit tokens
State::Program => Err(LexicalError::InvalidStateEmission(State::Program)),
State::Op => Ok(match self.buffer.chars().next().unwrap() {
'(' => Token::LeftParen,
')' => Token::RightParen,
'{' => Token::LeftBrace,
'}' => Token::RightBrace,
'>' => Token::GreaterThan,
'<' => Token::LessThan,
'|' => Token::Pipe,
'&' => Token::Ampersand,
';' => Token::Semicolon,
',' => Token::Comma,
':' => Token::Colon,
'_' => Token::Underscore,
'+' => Token::Plus,
'*' => Token::Multiply,
'[' => Token::LeftBracket,
']' => Token::RightBracket,
'%' => Token::Percent,
'@' => Token::At,
'/' => Token::Divide,
'-' => {
if self.buffer.len() == 1 {
Token::Minus
} else if self.buffer == "->" {
Token::Arrow
} else {
unreachable!("unexpected character: {}", self.buffer)
State::Op => Ok(
match self
.buffer
.chars()
.next()
.ok_or(LexicalError::UnexpectedEndOfInput)?
{
'(' => Token::LeftParen,
')' => Token::RightParen,
'{' => Token::LeftBrace,
'}' => Token::RightBrace,
'>' => Token::GreaterThan,
'<' => Token::LessThan,
'|' => Token::Pipe,
'&' => Token::Ampersand,
';' => Token::Semicolon,
',' => Token::Comma,
':' => Token::Colon,
'_' => Token::Underscore,
'+' => Token::Plus,
'*' => Token::Multiply,
'[' => Token::LeftBracket,
']' => Token::RightBracket,
'%' => Token::Percent,
'@' => Token::At,
'/' => Token::Divide,
'.' => Token::Dot,
'-' => {
if self.buffer.len() == 1 {
Token::Minus
} else if self.buffer == "->" {
Token::Arrow
} else {
return Err(LexicalError::UnexpectedCharacter(
self.buffer.chars().next().unwrap(),
));
}
}
}
'=' => Token::Equals,
_ => unreachable!(
"unexpected character: {} in state: {:?}",
self.buffer, self.state
),
}),
'=' => Token::Equals,
_ => {
return Err(LexicalError::UnexpectedCharacter(
self.buffer.chars().next().unwrap(),
))
}
},
),
State::Any => Err(LexicalError::InvalidStateEmission(State::Any)),
// these states can emit tokens
State::Comment => {
@ -550,28 +594,26 @@ impl<'input> Lexer<'input> {
"from" => Word::From,
"where" => Word::Where,
"self" => Word::Self_,
_ => {
Word::Ident(word)
// }
}
"pub" => Word::Pub,
"priv" => Word::Priv,
_ => Word::Ident(word),
};
Ok(Token::Word(word))
}
State::String(Quotation) => {
State::String(quotation) => {
let last_char = self.buffer.chars().last();
let quote = if Quotation == Quotation::Double {
let quote = if quotation == Quotation::Double {
Some('"')
} else {
Some('\'')
};
if last_char != quote {
panic!("expected: {:?}, got: {:?}", quote, last_char);
return Err(LexicalError::UnterminatedString);
}
let string = self
.input
.get(start..end)
.expect("shoulld've done something");
.ok_or(LexicalError::UnexpectedEndOfInput)?;
Ok(Token::String(string))
}
State::Number => {
@ -637,13 +679,13 @@ impl<'input> Lexer<'input> {
return Ok(());
}
'(' | ')' | '{' | '}' | '>' | '<' | '|' | '&' | ';' | ',' | ':' | '+' | '*'
| '[' | ']' | '%' | '@' | '/' | '-' | '=' | '!' => {
| '.' | '[' | ']' | '%' | '@' | '/' | '-' | '=' | '!' => {
set_state!(self, State::Op;);
debug!("to state: {:?}", self.state);
return Ok(());
}
_ => {
return Err(LexicalError::UnexpectedCharacter(c))?;
return Err(LexicalError::UnexpectedCharacter(c));
}
}
if self.pos >= self.input.len() {
@ -660,11 +702,11 @@ impl<'input> Lexer<'input> {
if let Some(c) = self.peek() {
debug!("consume_op: {}", c);
if self.state != State::Op {
return Err(LexicalError::InvalidStateEmission(self.state))?;
return Err(LexicalError::InvalidStateEmission(self.state));
}
match c {
'(' | ')' | '{' | '}' | '>' | '<' | '|' | '&' | ';' | ',' | ':' | '_' | '+'
| '/' | '*' | '[' | ']' | '%' | '@' => {
| '.' | '/' | '*' | '[' | ']' | '%' | '@' => {
let state = if self.push() { State::Eof } else { State::Any };
return emit!(self, state => ?);
}
@ -720,12 +762,11 @@ impl<'input> Lexer<'input> {
}
}
_ => {
panic!("unexpected character: '{}'", c);
return emit!(self, State::Any => ?);
return Err(LexicalError::UnexpectedCharacter(c));
}
}
}
emit!(self, self.state=> Token::Eof)
emit!(self, self.state => Token::Eof)
}
// comment state
@ -757,7 +798,7 @@ impl<'input> Lexer<'input> {
fn consume_word(&mut self) -> Result<Spanned<Token<'input>>> {
while let Some(c) = self.peek() {
match c {
'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '.' | '/' | '_' => {
'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '/' | '_' => {
if self.push() {
return emit!(self, State::Eof => ?);
}
@ -787,7 +828,7 @@ impl<'input> Lexer<'input> {
break;
} else if self.push() {
// this is a violation as it is not a number
// so panic
// so return error
return Err(LexicalError::InvalidNumberFormat);
}
}
@ -867,7 +908,7 @@ impl<'input> Lexer<'input> {
}
}
}
panic!("unexpected state: {:?}", self.state);
return Err(LexicalError::UnexpectedEndOfInput);
}
fn consume_variable(&mut self) -> Result<Spanned<Token<'input>>> {
@ -915,7 +956,9 @@ impl<'input> Iterator for Lexer<'input> {
State::Variable => self.consume_variable(),
State::Word => self.consume_word(),
State::Number => self.consume_number(),
State::Any | State::Program => unreachable!(),
State::Any | State::Program => {
return None;
}
State::Shebang => todo!(),
};
debug!(
@ -937,7 +980,7 @@ impl<'input> Iterator for Lexer<'input> {
return None;
}
}
panic!("unexpected state: {:?}", self.state);
// Removed the panic! as it's now unreachable.
}
}
@ -962,23 +1005,59 @@ impl<'input> From<Vec<Spanned<Token<'input>>>> for TokenStreamDisplay<'input> {
}
}
#[cfg(test)]
mod lexer_prop_tests;
#[cfg(test)]
mod lexer_snap_tests;
pub struct TripleIterator<'input>(Lexer<'input>);
#[derive(Debug, Clone, Default, Copy, Hash)]
pub struct Location {
pub offset: usize,
pub line: usize,
pub col: usize,
}
impl Eq for Location {}
impl PartialOrd for Location {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.offset.cmp(&other.offset))
}
}
impl PartialEq for Location {
fn eq(&self, other: &Self) -> bool {
self.offset == other.offset
}
}
impl<'input> TripleIterator<'input> {
pub fn new(input: &'input str) -> Self {
TripleIterator(Lexer::new(input, 0))
}
}
impl From<(usize, usize, usize)> for Location {
fn from((offset, line, col): (usize, usize, usize)) -> Self {
Self { offset, line, col }
}
}
impl<'input> Iterator for TripleIterator<'input> {
type Item = (usize, Token<'input>, usize);
type Item = (Location, Token<'input>, Location);
fn next(&mut self) -> Option<Self::Item> {
let token = self.0.next()?;
debug!("token: {:?}", token);
Some((token.start, token.node, token.end))
let start_pos: Location = (
token.start,
token.pos.line,
token.pos.col.wrapping_sub(token.len()),
)
.into();
let end_pos = (token.end, self.0.line, self.0.col).into();
Some((start_pos, token.node, end_pos))
}
}

View file

@ -1,14 +1,19 @@
pub mod lexer;
pub mod parser;
pub mod analyzer;
pub mod ast;
pub mod compiler;
pub mod lexer;
pub mod ops;
pub mod parser;
use compiler::text;
use crate::compiler::ir;
#[salsa::jar(db = Db)]
pub struct Jar(
parser::span::ByteOrLineColOrCoordInterned,
parser::span::SourceMap,
analyzer::SyntaxTree,
compiler::compile,
compiler::compile_effect,
compiler::add_imports,
@ -19,6 +24,7 @@ pub struct Jar(
text::Position,
text::SourceMap,
text::SourceProgram,
text::Document,
ir::Program,
ir::Function,
ir::InternedEffect,
@ -26,6 +32,10 @@ pub struct Jar(
ir::EffectDef,
ir::Import,
ir::Mangled,
analyzer::get_symbol,
analyzer::add_file,
analyzer::Url,
analyzer::span_text,
);
pub trait Db: salsa::DbWithJar<Jar> {}

10
src/ops/mod.rs Normal file
View file

@ -0,0 +1,10 @@
/// This module contains the srclang ops.
pub mod traversal {
/// The result of a traversal operation.
pub enum Result {
/// Continue the traversal.
Continue,
/// Stop the traversal.
Break,
}
}

View file

@ -1,19 +1,43 @@
use std::fmt::Display;
use proptest::prelude::*;
use std::{fmt::Display, ops::Range};
pub const ANON_FN_NAME: &str = "anonymous";
use super::span;
use super::span::*;
use src_derive::node;
use super::span::*;
#[derive(PartialEq, Debug, Clone)]
pub struct Ident(pub String, pub Option<Vec<Ident>>);
pub struct Ident(pub String, pub Option<Vec<Spanned<Ident>>>);
#[derive(PartialEq, Debug)]
impl Display for Ident {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(PartialEq, Debug, Clone, Default)]
pub enum Visibility {
#[default]
Private,
Public,
}
impl Display for Visibility {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Visibility::Public => write!(f, "pub"),
Visibility::Private => write!(f, "priv"),
}
}
}
#[derive(PartialEq, Debug, Clone)]
pub struct StringLit(pub String);
#[derive(PartialEq, Debug)]
pub struct Binding(pub Ident, pub Box<Expression>);
#[derive(PartialEq, Debug, Clone)]
pub struct Binding(pub Spanned<Ident>, pub Box<Spanned<Node>>);
#[derive(PartialEq, Debug)]
#[derive(PartialEq, Debug, Clone)]
pub enum Literal {
Bool(bool),
Float(f64),
@ -21,13 +45,13 @@ pub enum Literal {
String(String),
}
#[derive(PartialEq, Debug)]
#[derive(PartialEq, Debug, Clone)]
pub enum Keyword {
None,
Some,
Let,
Action,
Saga,
Public,
Private,
Fn,
If,
Else,
@ -43,76 +67,155 @@ pub enum Keyword {
Where,
Self_,
}
#[derive(PartialEq, Debug)]
trait KeywordVisitor {}
impl Display for Keyword {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let kw = match self {
Keyword::None => "none",
Keyword::Some => "some",
Keyword::Let => "let",
Keyword::Fn => "fn",
Keyword::If => "if",
Keyword::Else => "else",
Keyword::Match => "match",
Keyword::Arrow => "=>",
Keyword::Struct => "struct",
Keyword::SelfValue => "self",
Keyword::When => "when",
Keyword::Effect => "effect",
Keyword::Impl => "impl",
Keyword::Use => "use",
Keyword::From => "from",
Keyword::Where => "where",
Keyword::Self_ => "Self",
Keyword::Public => "pub",
Keyword::Private => "priv",
};
write!(f, "{}", kw)
}
}
#[derive(PartialEq, Debug, Clone)]
pub enum Value {
Literal(Literal),
Ident(Ident),
}
#[derive(PartialEq, Debug)]
#[derive(PartialEq, Debug, Clone)]
pub struct Block<T>(pub Vec<T>);
#[derive(PartialEq, Debug)]
#[derive(PartialEq, Debug, Clone)]
pub struct Tuple<T>(pub Vec<T>);
#[derive(PartialEq, Debug)]
#[derive(PartialEq, Debug, Clone)]
pub struct Array<T>(pub Vec<T>);
#[derive(PartialEq, Debug)]
#[derive(PartialEq, Debug, Clone)]
pub struct BinaryOperation {
pub lhs: Box<Expression>,
pub lhs: Box<Spanned<Node>>,
pub op: Operator,
pub rhs: Box<Expression>,
pub rhs: Box<Spanned<Node>>,
}
#[derive(PartialEq, Debug)]
pub struct FnCall(pub Ident, pub Vec<Box<Expression>>);
#[derive(PartialEq, Debug, Clone)]
pub struct FnCall(pub Spanned<Ident>, pub Vec<Spanned<Node>>);
#[derive(PartialEq, Debug)]
pub enum Expression {
#[derive(PartialEq, Debug, Clone)]
pub enum Node {
BinaryExpression(BinaryOperation),
Bool(bool),
Integer(i64),
Float(f64),
Ident(Ident),
Ident(Spanned<Ident>),
Binding(Binding),
FnCall(FnCall),
String(String),
FnDef(FnDef),
ShellCommand(Vec<Ident>, Vec<Box<Expression>>),
EffectDef(EffectDef),
StructDef(StructDef),
UseDef(UseDef),
Keyword(Keyword),
ImplDef(ImplDef),
Branch(Branch),
Branch(BranchDef),
FieldAccess(FieldAccess),
Visibility(Visibility),
Error,
}
#[derive(PartialEq, Debug)]
pub struct Field(pub Ident, pub Ident);
impl Display for Node {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Node::BinaryExpression(bin) => write!(f, "{} {} {}", bin.lhs, bin.op, bin.rhs),
Node::Bool(b) => write!(f, "{}", b),
Node::Integer(i) => write!(f, "{}", i),
Node::Float(fl) => write!(f, "{}", fl),
Node::Ident(ident) => write!(f, "{}", ident.1),
Node::Binding(bind) => write!(f, "{} = {}", bind.0, bind.1),
Node::FnCall(call) => write!(
f,
"{}({})",
call.0,
call.1
.iter()
.map(|e| e.1.to_string())
.collect::<Vec<String>>()
.join(", ")
),
Node::String(s) => write!(f, "{}", s),
Node::FnDef(def) => write!(f, "{}", def.0),
Node::EffectDef(def) => write!(f, "{}", def.0),
Node::StructDef(def) => write!(f, "{}", def.0),
Node::UseDef(def) => write!(f, "{:#?}", def.0),
Node::Keyword(kw) => write!(f, "{}", kw),
Node::ImplDef(def) => write!(f, "{}", def.0),
Node::Branch(branch) => write!(f, "{}", branch.0),
Node::FieldAccess(access) => write!(f, "{}.{}", access.0, access.1),
Node::Visibility(vis) => write!(f, "{}", vis),
Node::Error => write!(f, "Error"),
}
}
}
#[derive(PartialEq, Debug)]
#[derive(PartialEq, Debug, Clone)]
pub enum FnArg {
Reciever,
Field(Field)
Field(Spanned<FieldDef>),
}
#[derive(PartialEq, Debug)]
impl Display for FnArg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
FnArg::Reciever => write!(f, "self"),
FnArg::Field(field) => write!(f, "{}", field.1),
}
}
}
#[derive(PartialEq, Debug, Clone)]
pub struct Prototype {
pub name: Ident,
pub args: Vec<FnArg>,
pub ret: Option<Ident>,
pub effects: Vec<Ident>,
pub name: Spanned<Ident>,
pub args: Vec<Spanned<FnArg>>,
pub ret: Option<Spanned<Ident>>,
pub effects: Vec<Spanned<Ident>>,
}
#[derive(PartialEq, Debug)]
pub struct FnDef(
pub Prototype,
pub Block<Box<Expression>>,
pub Vec<(Ident, Block<Box<Expression>>)>,
);
impl Display for Prototype {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}(", self.name)?;
for arg in self.args.iter() {
write!(f, "{}", arg)?;
}
write!(f, ")")?;
if let Some(ret) = &self.ret {
write!(f, " -> {}", ret)?;
}
Ok(())
}
}
#[derive(PartialEq, Debug)]
#[derive(PartialEq, Debug, Clone)]
pub enum Whitespace {
Space,
Tab,
@ -131,21 +234,10 @@ pub enum Operator {
Maybe,
Not,
Neg,
}
impl Arbitrary for Operator {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;
fn arbitrary_with(_args: ()) -> Self::Strategy {
prop_oneof![
Just(Operator::Add),
Just(Operator::Sub),
Just(Operator::Mul),
Just(Operator::Div),
]
.boxed()
}
Dot,
Arrow,
FatArrow,
DoubleColon,
}
impl Display for Operator {
@ -161,28 +253,148 @@ impl Display for Operator {
Operator::Maybe => "?",
Operator::Not => "!",
Operator::Neg => "-",
Operator::Dot => ".",
Operator::Arrow => "->",
Operator::FatArrow => "=>",
Operator::DoubleColon => "::",
};
write!(f, "{}", op)
}
}
#[derive(PartialEq, Debug)]
pub struct StructDef(pub Ident, pub Block<Field>);
#[derive(PartialEq, Debug, Clone)]
pub struct FieldAccess(pub Box<Spanned<Node>>, pub Box<Spanned<Node>>);
#[derive(PartialEq, Debug)]
#[derive(PartialEq, Debug, Clone)]
pub struct Module(pub Vec<Spanned<Node>>);
// defs
#[derive(PartialEq, Debug, Clone)]
pub struct FieldDef(
pub Spanned<Visibility>,
pub Spanned<Ident>,
pub Spanned<Ident>,
);
impl Display for FieldDef {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}: {}", self.0, self.1)
}
}
#[derive(PartialEq, Debug, Clone)]
pub struct StructDef(
pub Spanned<KeywordAndVisibility>,
pub Spanned<Ident>,
pub Block<Spanned<FieldDef>>,
);
#[derive(PartialEq, Debug, Clone)]
pub struct FnIdent(pub Ident);
#[derive(PartialEq, Debug)]
pub struct EffectDef(pub Ident, pub Vec<Ident>, pub Block<Prototype>);
#[derive(PartialEq, Debug, Clone)]
pub struct EffectDef(
pub Spanned<KeywordAndVisibility>,
pub Spanned<Ident>,
pub Vec<Spanned<Ident>>,
pub Block<Spanned<Prototype>>,
);
#[derive(PartialEq, Debug)]
pub struct UseDef(pub Vec<Ident>, pub Ident);
#[derive(PartialEq, Debug, Clone)]
pub struct UseDef(
pub Spanned<KeywordAndVisibility>,
pub Vec<Spanned<Ident>>,
pub Spanned<Ident>,
);
#[derive(PartialEq, Debug)]
pub struct ImplDef(pub Ident, pub Option<Ident>, pub Block<Box<Expression>>);
impl Display for UseDef {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} {:?} from {}", self.0, self.1, self.2)
}
}
#[derive(PartialEq, Debug)]
pub struct Branch(pub Box<Expression>, pub Vec<(Expression, Block<Box<Expression>>)>);
#[derive(PartialEq, Debug, Clone)]
pub struct KeywordAndVisibility(pub Spanned<Keyword>, pub Spanned<Visibility>);
#[derive(PartialEq, Debug)]
pub struct Module(pub Vec<Box<Expression>>);
impl Display for KeywordAndVisibility {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} {}", self.1, self.0)
}
}
#[derive(PartialEq, Debug, Clone)]
pub struct ImplDef(
pub Spanned<KeywordAndVisibility>,
pub Spanned<Ident>,
pub Option<Spanned<Ident>>,
pub Block<Spanned<Node>>,
);
#[derive(PartialEq, Debug, Clone)]
pub struct BranchDef(
pub Box<Spanned<Node>>,
pub Vec<(Spanned<Node>, Block<Spanned<Node>>)>,
);
// #[visitor]
#[derive(PartialEq, Debug, Clone)]
pub struct FnDef(
pub Spanned<KeywordAndVisibility>,
pub Spanned<Prototype>,
pub Block<Spanned<Node>>,
);
impl Display for FnDef {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{} {} {{", self.0, self.1)?;
for expr in self.2 .0.iter() {
write!(f, "{}", expr)?;
}
write!(f, "}}")
}
}
#[cfg(test)]
use proptest::prelude::*;
#[cfg(test)]
impl Arbitrary for Operator {
type Parameters = ();
type Strategy = BoxedStrategy<Self>;
fn arbitrary_with(_args: ()) -> Self::Strategy {
prop_oneof![
Just(Operator::Add),
Just(Operator::Sub),
Just(Operator::Mul),
Just(Operator::Div),
]
.boxed()
}
}
impl Eq for Node {}
#[cfg(test)]
mod test {
use crate::analyzer;
use super::*;
use proptest::prelude::*;
struct TestVisitor;
#[test]
fn test_binding_vistior() {
let input = "fn some()[] {let a = some_fnExpr(1, \"2\", 3)}";
let mut errors = vec![];
let wrapper = crate::lexer::TripleIterator::new(input);
let db = analyzer::db::Database::default();
let t = crate::parser::src::SourceParser::new().parse(&mut errors, &db, wrapper);
assert!(errors.is_empty());
assert!(t.is_ok());
let t = t.unwrap();
}
}

View file

@ -1,32 +1,57 @@
pub fn pretty_errors<'input>(src: &'input str, errors: Vec<lalrpop_util::ErrorRecovery<usize, crate::lexer::Token, &str>>) -> String {
use crate::lexer::Location;
pub fn pretty_errors<'input>(
src: &'input str,
errors: Vec<lalrpop_util::ErrorRecovery<Location, crate::lexer::Token<'_>, &str>>,
) -> String {
let mut pretty = String::new();
let mut last_end = 0;
for error in errors {
match error.error {
lalrpop_util::ParseError::InvalidToken { location: _ } => todo!(),
lalrpop_util::ParseError::UnrecognizedEof { location: _, expected: _ } => todo!(),
lalrpop_util::ParseError::UnrecognizedEof { location, expected } => {
let start = location;
let end = location;
let start_line = start.line;
let end_line = end.line;
let line = &src[start_line..end_line];
let start_col = start.col;
let end_col = end.col;
pretty.push_str(&format!(
"error: unexpected end of file, expected one of {:?}\n",
expected
));
pretty.push_str(&line);
pretty.push_str("\n");
pretty.push_str(&" ".repeat(start_col));
pretty.push_str(&"^".repeat(end_col - start_col));
}
lalrpop_util::ParseError::UnrecognizedToken { token, expected } => {
// find the line and column of the start and end tokens,
// and print the line with a caret pointing to the error
let start = token.0;
let end = token.2;
let start_line = src[..start].rfind('\n').map_or(0, |i| i + 1);
let end_line = src[end..].find('\n').map_or(src.len(), |i| end + i);
let start_line = start.line;
let end_line = end.line;
let line = &src[start_line..end_line];
let start_col = start - start_line;
let end_col = end - start_line;
let start_col = start.col;
let end_col = end.col;
// pretty.push_str(&src[last_end..start]);
pretty.push_str(&format!("error: unexpected token {:?}, expected one of {:?}\n", token.1, expected));
// expected is double quoted, so we need to remove the quotes
// which ends up printing like "\"(\""
let expected: Vec<&str> = expected.iter().map(|x| &x[1..x.len() - 1]).collect();
pretty.push_str(&format!(
"error: unexpected token {:?}, expected one of {:?}\n",
token.1, expected
));
pretty.push_str(&line);
pretty.push_str("\n");
pretty.push_str(&" ".repeat(start_col));
pretty.push_str(&"^".repeat(end_col - start_col));
last_end = end;
},
}
lalrpop_util::ParseError::ExtraToken { token: _ } => todo!(),
lalrpop_util::ParseError::User { error: _ } => todo!(),
};
}
// pretty.push_str(&src[last_end..]);
pretty

View file

@ -1,7 +1,26 @@
use crate::lexer::Location;
mod parser_snap_tests;
mod string;
pub mod ast;
mod errors;
#[cfg(test)]
mod parser_snap_tests;
mod string;
pub mod span;
pub mod src;
pub mod src;
// pub fn parse(
// src: &str,
// ) -> Result<
// crate::parser::ast::Module,
// Vec<lalrpop_util::ErrorRecovery<Coord, crate::lexer::Token, &str>>,
// > {
// let mut errors: Vec<lalrpop_util::ErrorRecovery<Coord, crate::lexer::Token, &str>> = vec![];
// let wrapper = crate::lexer::TripleIterator::new(src);
// let module = crate::parser::src::SourceParser::new().parse(&mut errors, wrapper);
// if !errors.is_empty() {
// return Err(errors);
// }
// Ok(module.unwrap())
// }

View file

@ -1,17 +1,19 @@
use crate::parser::errors::pretty_errors;
use crate::{analyzer, lexer::Location, parser::errors::pretty_errors};
use insta::assert_snapshot;
#[cfg(test)]
#[okstd::test]
fn test_empty_parser() {
use crate::analyzer;
let input = " ";
let mut errors = vec![];
let wrapper = crate::lexer::TripleIterator::new(input);
let t = crate::parser::src::SourceParser::new().parse(&mut errors, wrapper);
let db = analyzer::db::Database::default();
let t = crate::parser::src::SourceParser::new().parse(&mut errors, &db, wrapper);
assert!(errors.is_empty());
assert_snapshot!(format!("{:#?}", t.unwrap()), @r###"
let fmted = format!("{:#?}", t.unwrap());
assert_snapshot!(fmted, @r###"
Module(
[],
)
@ -23,7 +25,8 @@ fn test_fn_call_parser_with_multiple_args_and_strings() {
let input = "fn some()[] {let a = some_fnExpr(1, \"2\", 3)}";
let mut errors = vec![];
let wrapper = crate::lexer::TripleIterator::new(input);
let t = crate::parser::src::SourceParser::new().parse(&mut errors, wrapper);
let db = analyzer::db::Database::default();
let t = crate::parser::src::SourceParser::new().parse(&mut errors, &db, wrapper);
if !errors.is_empty() {
panic!("{}", pretty_errors(&input, errors));
}
@ -33,28 +36,30 @@ fn test_fn_call_parser_with_multiple_args_and_strings() {
#[okstd::test]
fn test_fn_def_parser() {
let input = r#"fn call(a:b, b:c) [throws, awaits, execs] {
call(1+1)
let a = 1
} when throws {
raise(1)
}"#;
call(1+1)
let a = 1
}"#;
let mut errors = vec![];
let wrapper = crate::lexer::TripleIterator::new(input);
let t = crate::parser::src::SourceParser::new().parse(&mut errors, wrapper);
assert!(errors.is_empty());
let db = analyzer::db::Database::default();
let t = crate::parser::src::SourceParser::new().parse(&mut errors, &db, wrapper);
if !errors.is_empty() {
panic!("{}", pretty_errors(&input, errors));
}
assert_snapshot!(format!("{:#?}", t.unwrap()));
}
#[okstd::test]
fn test_effect() {
let input = r#"effect VM: async + throws + execs {
catch() []
await<T>(f: Future<T>) [] -> T
exec(arg0: string, args: stringvec) []
}"#;
catch() []
await<T>(f: Future<T>) [] -> T
exec(arg0: string, args: stringvec) []
}"#;
let mut errors = vec![];
let wrapper = crate::lexer::TripleIterator::new(input);
let t = crate::parser::src::SourceParser::new().parse(&mut errors, wrapper);
let db = analyzer::db::Database::default();
let t = crate::parser::src::SourceParser::new().parse(&mut errors, &db, wrapper);
assert!(errors.is_empty());
assert_snapshot!(format!("{:#?}", t.unwrap()));
}
@ -62,12 +67,13 @@ fn test_effect() {
#[okstd::test]
fn test_struct_parser() {
let input = r#"struct VM {
a: string
b: string
}"#;
a: string
b: string
}"#;
let mut errors = vec![];
let wrapper = crate::lexer::TripleIterator::new(input);
let t = crate::parser::src::SourceParser::new().parse(&mut errors, wrapper);
let db = analyzer::db::Database::default();
let t = crate::parser::src::SourceParser::new().parse(&mut errors, &db, wrapper);
assert!(errors.is_empty());
assert_snapshot!(format!("{:#?}", t.unwrap()));
}
@ -76,34 +82,35 @@ fn test_struct_parser() {
fn test_enum_parser() {
let input = r#"use { exec } from host
effect Make: async + throws + execs + reads + writes {
catch() [throws]
await<T>(f: Future<T>) [async, throws] -> T
exec(arg0: string, args: stringvec) [Make] -> i32
read(name: string) [reads] -> string
write(name: string, value: string) [writes]
effect Make: async + throws + execs + reads + writes {
catch() [throws]
await<T>(f: Future<T>) [async, throws] -> T
exec(arg0: string, args: stringvec) [Make] -> i32
read(name: string) [reads] -> string
write(name: string, value: string) [writes]
}
struct Local {
host: host
}
impl Make for Local {
fn catch(self) [throws] {
}
struct Local {
host: host
fn await<T>(f: Future<T>) [async, trhows] -> T {
yield()
}
impl Make for Local {
fn catch(self) [throws] {
}
fn await<T>(f: Future<T>) [async, trhows] -> T {
yield()
fn exec(self, arg0: string, args: vec<string>) [Vm] -> i32 {
self.host.read("jobserver").await
if self.host.exec(arg0, args).await {
raise(1)
}
fn exec(self, arg0: string, args: vec<string>) [Vm] -> i32 {
self.host.read("jobserver")
if self.host.exec(arg0, args) {
raise(1)
}
}
}"#;
let mut errors: Vec<lalrpop_util::ErrorRecovery<usize, crate::lexer::Token, &str>> = vec![];
}
}"#;
let mut errors = vec![];
let wrapper = crate::lexer::TripleIterator::new(input);
let t = crate::parser::src::SourceParser::new().parse(&mut errors, wrapper);
let db = analyzer::db::Database::default();
let t = crate::parser::src::SourceParser::new().parse(&mut errors, &db, wrapper);
if !errors.is_empty() {
panic!("{}", pretty_errors(&input, errors));
}

View file

@ -1,118 +0,0 @@
---
source: src/parser/parser_snap_tests.rs
expression: "format!(\"{:#?}\", t.unwrap())"
---
Module(
[
EffectDef(
EffectDef(
Ident(
"VM",
None,
),
[
Ident(
"async",
None,
),
Ident(
"throws",
None,
),
Ident(
"execs",
None,
),
],
Block(
[
Prototype {
name: Ident(
"catch",
None,
),
args: [],
ret: None,
effects: [],
},
Prototype {
name: Ident(
"await",
Some(
[
Ident(
"T",
None,
),
],
),
),
args: [
Field(
Field(
Ident(
"f",
None,
),
Ident(
"Future",
Some(
[
Ident(
"T",
None,
),
],
),
),
),
),
],
ret: Some(
Ident(
"T",
None,
),
),
effects: [],
},
Prototype {
name: Ident(
"exec",
None,
),
args: [
Field(
Field(
Ident(
"arg0",
None,
),
Ident(
"string",
None,
),
),
),
Field(
Field(
Ident(
"args",
None,
),
Ident(
"stringvec",
None,
),
),
),
],
ret: None,
effects: [],
},
],
),
),
),
],
)

View file

@ -1,384 +0,0 @@
---
source: src/parser/parser_snap_tests.rs
expression: "format!(\"{:#?}\", t.unwrap())"
---
Module(
[
UseDef(
UseDef(
[
Ident(
"crosmvm",
None,
),
],
Ident(
"std",
None,
),
),
),
EffectDef(
EffectDef(
Ident(
"Vm",
None,
),
[
Ident(
"async",
None,
),
Ident(
"throws",
None,
),
Ident(
"execs",
None,
),
],
Block(
[
Prototype {
name: Ident(
"catch",
None,
),
args: [],
ret: None,
effects: [
Ident(
"throws",
None,
),
],
},
Prototype {
name: Ident(
"await",
Some(
[
Ident(
"T",
None,
),
],
),
),
args: [
Field(
Field(
Ident(
"f",
None,
),
Ident(
"Future",
Some(
[
Ident(
"T",
None,
),
],
),
),
),
),
],
ret: Some(
Ident(
"T",
None,
),
),
effects: [
Ident(
"async",
None,
),
Ident(
"throws",
None,
),
],
},
Prototype {
name: Ident(
"exec",
None,
),
args: [
Field(
Field(
Ident(
"arg0",
None,
),
Ident(
"string",
None,
),
),
),
Field(
Field(
Ident(
"args",
None,
),
Ident(
"stringvec",
None,
),
),
),
],
ret: Some(
Ident(
"i32",
None,
),
),
effects: [
Ident(
"Vm",
None,
),
],
},
],
),
),
),
StructDef(
StructDef(
Ident(
"coopvm",
None,
),
Block(
[],
),
),
),
ImplDef(
ImplDef(
Ident(
"Vm",
None,
),
Some(
Ident(
"coopvm",
None,
),
),
Block(
[
FnDef(
FnDef(
Prototype {
name: Ident(
"catch",
None,
),
args: [
Reciever,
],
ret: None,
effects: [
Ident(
"throws",
None,
),
],
},
Block(
[],
),
[],
),
),
FnDef(
FnDef(
Prototype {
name: Ident(
"await",
Some(
[
Ident(
"T",
None,
),
],
),
),
args: [
Field(
Field(
Ident(
"f",
None,
),
Ident(
"Future",
Some(
[
Ident(
"T",
None,
),
],
),
),
),
),
],
ret: Some(
Ident(
"T",
None,
),
),
effects: [
Ident(
"async",
None,
),
Ident(
"trhows",
None,
),
],
},
Block(
[
FnCall(
FnCall(
Ident(
"yield",
None,
),
[],
),
),
],
),
[],
),
),
FnDef(
FnDef(
Prototype {
name: Ident(
"exec",
None,
),
args: [
Reciever,
Field(
Field(
Ident(
"arg0",
None,
),
Ident(
"string",
None,
),
),
),
Field(
Field(
Ident(
"args",
None,
),
Ident(
"vec",
Some(
[
Ident(
"string",
None,
),
],
),
),
),
),
],
ret: Some(
Ident(
"i32",
None,
),
),
effects: [
Ident(
"Vm",
None,
),
],
},
Block(
[
Branch(
Branch(
FnCall(
FnCall(
Ident(
"self.exec",
None,
),
[
Ident(
Ident(
"arg0",
None,
),
),
Ident(
Ident(
"args",
None,
),
),
],
),
),
[
(
Bool(
true,
),
Block(
[
FnCall(
FnCall(
Ident(
"raise",
None,
),
[],
),
),
],
),
),
],
),
),
],
),
[],
),
),
],
),
),
),
],
)

View file

@ -1,492 +0,0 @@
---
source: src/parser/parser_snap_tests.rs
assertion_line: 110
expression: "format!(\"{:#?}\", t.unwrap())"
---
Module(
[
UseDef(
UseDef(
[
Ident(
"exec",
None,
),
],
Ident(
"host",
None,
),
),
),
EffectDef(
EffectDef(
Ident(
"Make",
None,
),
[
Ident(
"async",
None,
),
Ident(
"throws",
None,
),
Ident(
"execs",
None,
),
Ident(
"reads",
None,
),
Ident(
"writes",
None,
),
],
Block(
[
Prototype {
name: Ident(
"catch",
None,
),
args: [],
ret: None,
effects: [
Ident(
"throws",
None,
),
],
},
Prototype {
name: Ident(
"await",
Some(
[
Ident(
"T",
None,
),
],
),
),
args: [
Field(
Field(
Ident(
"f",
None,
),
Ident(
"Future",
Some(
[
Ident(
"T",
None,
),
],
),
),
),
),
],
ret: Some(
Ident(
"T",
None,
),
),
effects: [
Ident(
"async",
None,
),
Ident(
"throws",
None,
),
],
},
Prototype {
name: Ident(
"exec",
None,
),
args: [
Field(
Field(
Ident(
"arg0",
None,
),
Ident(
"string",
None,
),
),
),
Field(
Field(
Ident(
"args",
None,
),
Ident(
"stringvec",
None,
),
),
),
],
ret: Some(
Ident(
"i32",
None,
),
),
effects: [
Ident(
"Make",
None,
),
],
},
Prototype {
name: Ident(
"read",
None,
),
args: [
Field(
Field(
Ident(
"name",
None,
),
Ident(
"string",
None,
),
),
),
],
ret: Some(
Ident(
"string",
None,
),
),
effects: [
Ident(
"reads",
None,
),
],
},
Prototype {
name: Ident(
"write",
None,
),
args: [
Field(
Field(
Ident(
"name",
None,
),
Ident(
"string",
None,
),
),
),
Field(
Field(
Ident(
"value",
None,
),
Ident(
"string",
None,
),
),
),
],
ret: None,
effects: [
Ident(
"writes",
None,
),
],
},
],
),
),
),
StructDef(
StructDef(
Ident(
"Local",
None,
),
Block(
[
Field(
Ident(
"host",
None,
),
Ident(
"host",
None,
),
),
],
),
),
),
ImplDef(
ImplDef(
Ident(
"Make",
None,
),
Some(
Ident(
"Local",
None,
),
),
Block(
[
FnDef(
FnDef(
Prototype {
name: Ident(
"catch",
None,
),
args: [
Reciever,
],
ret: None,
effects: [
Ident(
"throws",
None,
),
],
},
Block(
[],
),
[],
),
),
FnDef(
FnDef(
Prototype {
name: Ident(
"await",
Some(
[
Ident(
"T",
None,
),
],
),
),
args: [
Field(
Field(
Ident(
"f",
None,
),
Ident(
"Future",
Some(
[
Ident(
"T",
None,
),
],
),
),
),
),
],
ret: Some(
Ident(
"T",
None,
),
),
effects: [
Ident(
"async",
None,
),
Ident(
"trhows",
None,
),
],
},
Block(
[
FnCall(
FnCall(
Ident(
"yield",
None,
),
[],
),
),
],
),
[],
),
),
FnDef(
FnDef(
Prototype {
name: Ident(
"exec",
None,
),
args: [
Reciever,
Field(
Field(
Ident(
"arg0",
None,
),
Ident(
"string",
None,
),
),
),
Field(
Field(
Ident(
"args",
None,
),
Ident(
"vec",
Some(
[
Ident(
"string",
None,
),
],
),
),
),
),
],
ret: Some(
Ident(
"i32",
None,
),
),
effects: [
Ident(
"Vm",
None,
),
],
},
Block(
[
FnCall(
FnCall(
Ident(
"self.host.read",
None,
),
[
String(
"jobserver",
),
],
),
),
Branch(
Branch(
FnCall(
FnCall(
Ident(
"self.host.exec",
None,
),
[
Ident(
Ident(
"arg0",
None,
),
),
Ident(
Ident(
"args",
None,
),
),
],
),
),
[
(
Bool(
true,
),
Block(
[
FnCall(
FnCall(
Ident(
"raise",
None,
),
[
Integer(
1,
),
],
),
),
],
),
),
],
),
),
],
),
[],
),
),
],
),
),
),
],
)

View file

@ -1,53 +0,0 @@
---
source: src/parser/parser_snap_tests.rs
expression: "format!(\"{:#?}\", t.unwrap())"
---
Module(
[
FnDef(
FnDef(
Prototype {
name: Ident(
"some",
None,
),
args: [],
ret: None,
effects: [],
},
Block(
[
Binding(
Binding(
Ident(
"a",
None,
),
FnCall(
FnCall(
Ident(
"some_fnExpr",
None,
),
[
Integer(
1,
),
String(
"2",
),
Integer(
3,
),
],
),
),
),
),
],
),
[],
),
),
],
)

View file

@ -1,120 +0,0 @@
---
source: src/parser/parser_snap_tests.rs
expression: "format!(\"{:#?}\", t.unwrap())"
---
Module(
[
FnDef(
FnDef(
Prototype {
name: Ident(
"call",
None,
),
args: [
Field(
Field(
Ident(
"a",
None,
),
Ident(
"b",
None,
),
),
),
Field(
Field(
Ident(
"b",
None,
),
Ident(
"c",
None,
),
),
),
],
ret: None,
effects: [
Ident(
"throws",
None,
),
Ident(
"awaits",
None,
),
Ident(
"execs",
None,
),
],
},
Block(
[
FnCall(
FnCall(
Ident(
"call",
None,
),
[
BinaryExpression(
BinaryOperation {
lhs: Integer(
1,
),
op: Add,
rhs: Integer(
1,
),
},
),
],
),
),
Binding(
Binding(
Ident(
"a",
None,
),
Integer(
1,
),
),
),
],
),
[
(
Ident(
"throws",
None,
),
Block(
[
FnCall(
FnCall(
Ident(
"raise",
None,
),
[
Integer(
1,
),
],
),
),
],
),
),
],
),
),
],
)

View file

@ -1,40 +0,0 @@
---
source: src/parser/parser_snap_tests.rs
expression: "format!(\"{:#?}\", t.unwrap())"
---
Module(
[
StructDef(
StructDef(
Ident(
"VM",
None,
),
Block(
[
Field(
Ident(
"a",
None,
),
Ident(
"string",
None,
),
),
Field(
Ident(
"b",
None,
),
Ident(
"string",
None,
),
),
],
),
),
),
],
)

Some files were not shown because too many files have changed in this diff Show more