fix clippy warnings
Some checks are pending
rust-clippy analyze / Run rust-clippy analyzing (push) Waiting to run
Deploy mdBook site to Pages / build (push) Waiting to run
Deploy mdBook site to Pages / deploy (push) Blocked by required conditions
Rust / build (push) Waiting to run

This commit is contained in:
sevki 2024-05-27 14:17:28 +01:00
parent 19dcb4e8f3
commit d09cf0033f
15 changed files with 45 additions and 52 deletions

View file

@ -9,7 +9,7 @@ src = "docs"
additional-css = ["ok.css"]
theme = "docs/theme"
default-theme = "dark"
git-repository-url = "https://github.com/oknotokcomputer/roar"
git-repository-url = "https://ok.software/ok/src"
preferred-dark-theme = "rust"
[preprocessor.svgbob]

View file

@ -1,7 +1,7 @@
use core::panic;
use std::fmt::Write as _;
use std::fs::File;
use std::io::{BufRead, BufReader, BufWriter, Write};
use std::io::{BufRead, BufReader, Write};
use std::path::PathBuf;
use tiny_keccak::{Hasher, Sha3};
@ -77,7 +77,7 @@ fn try_lalrpop(source: &str, target: &str) -> anyhow::Result<()> {
let full_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(SOURCE);
let path = full_path.to_str().unwrap();
println!("cargo:rerun-if-changed={}", path);
let p = lalrpop::Configuration::new()
lalrpop::Configuration::new()
.generate_in_source_tree()
.process_file(path).expect("msg");
Ok(())

View file

@ -2,12 +2,9 @@ use std::sync::{Arc, Mutex};
use salsa::DebugWithDb;
use crate::{
lexer::{self, Lexer, Token},
Db,
};
use super::text::SourceProgram;
#[derive(Default)]
#[salsa::db(crate::Jar)]

View file

@ -1,8 +1,8 @@
use std::ops::Range;
use crate::Db;
use super::text::SourceProgram;
pub struct Errors<'a>(Vec<lalrpop_util::ErrorRecovery<usize, crate::lexer::Token<'a>, &'a str>>);
@ -27,12 +27,12 @@ impl<'a> IntoIterator for Errors<'a> {
.into_iter()
.map(|error| match error.error {
lalrpop_util::ParseError::InvalidToken { location } => location..location,
lalrpop_util::ParseError::UnrecognizedEof { location, expected } => {
lalrpop_util::ParseError::UnrecognizedEof { location, expected: _ } => {
location..location
}
lalrpop_util::ParseError::UnrecognizedToken { token, expected } => token.0..token.2,
lalrpop_util::ParseError::UnrecognizedToken { token, expected: _ } => token.0..token.2,
lalrpop_util::ParseError::ExtraToken { token } => token.0..token.2,
lalrpop_util::ParseError::User { error } => todo!(),
lalrpop_util::ParseError::User { error: _ } => todo!(),
})
.collect::<Vec<_>>()
.into_iter()
@ -46,8 +46,8 @@ fn handle_errors(errors: Vec<lalrpop_util::ErrorRecovery<usize, crate::lexer::To
for error in errors {
match error.error {
lalrpop_util::ParseError::InvalidToken { location } => todo!(),
lalrpop_util::ParseError::UnrecognizedEof { location, expected } => todo!(),
lalrpop_util::ParseError::InvalidToken { location: _ } => todo!(),
lalrpop_util::ParseError::UnrecognizedEof { location: _, expected: _ } => todo!(),
lalrpop_util::ParseError::UnrecognizedToken { token, expected } => {
// find the line and column of the start and end tokens,
// and print the line with a caret pointing to the error
@ -66,8 +66,8 @@ fn handle_errors(errors: Vec<lalrpop_util::ErrorRecovery<usize, crate::lexer::To
pretty.push_str(&"^".repeat(end_col - start_col));
last_end = end;
},
lalrpop_util::ParseError::ExtraToken { token } => todo!(),
lalrpop_util::ParseError::User { error } => todo!(),
lalrpop_util::ParseError::ExtraToken { token: _ } => todo!(),
lalrpop_util::ParseError::User { error: _ } => todo!(),
};
}

View file

@ -1,11 +1,8 @@
#![allow(clippy::needless_borrow)]
use salsa::*;
use std::{
array::IntoIter,
collections::BTreeMap,
path::Iter,
sync::{Arc, Mutex},
};
use crate::{parser::ast};

View file

@ -1,11 +1,10 @@
use std::{
collections::BTreeMap,
ops::{Range, RangeBounds},
};
use crate::{
compiler::{errors::Errors, text::{Position, Span, Spanned}},
parser::ast::{self, EffectDef, Module},
compiler::{errors::Errors},
parser::ast::{self},
Db,
};
@ -25,7 +24,7 @@ pub fn compile(db: &dyn Db, src: SourceProgram) -> ir::Program {
let t = crate::parser::src::SourceParser::new().parse(&mut errors, wrapper);
// let mut errors_in_positions: Vec<ir::Position> = vec![];
if !errors.is_empty() {
for error_range in Into::<Errors>::into(errors) {
for _error_range in Into::<Errors>::into(errors) {
text::to_spans(db, src);
}
panic!();
@ -66,7 +65,7 @@ pub fn compile(db: &dyn Db, src: SourceProgram) -> ir::Program {
}
#[salsa::tracked]
pub fn compile_effect(db: &dyn Db, effect: ir::EffectDef) {}
pub fn compile_effect(_db: &dyn Db, _effect: ir::EffectDef) {}
#[salsa::tracked]
pub fn add_imports(db: &dyn Db, import: ir::Import) -> Vec<ir::Mangled> {

View file

@ -106,7 +106,7 @@ pub fn to_spans(db: &dyn Db, src: SourceProgram) -> SourceMap {
// Lexer tokens have a start and end position, and we want to map these to the line lengths
// first we iterate over the lexer tokens
for token in lexer {
let size = token.end - token.start;
let _size = token.end - token.start;
// then we peek at the first line
let mut start: Option<(usize, usize)> = None;
loop {

View file

@ -1,7 +1,11 @@
use super::*;
use proptest::{num::i32, prelude::*};
use proptest::{prelude::*};
#[allow(unused_imports)]
use crate::lexer::{Position, Spanned, Lexer, Token};
proptest! {
#[test]
fn test_strings(rnd in ("[a-z]+", 1..10)) {
let input = format!(r#"let {} = "{}""#, rnd.0, rnd.1);

View file

@ -4,12 +4,9 @@ lexer.rs is a lexer for the src language
use std::{fmt::Display, iter::Iterator, iter::Peekable, str::Chars};
use lalrpop_util::{
lexer::Token as LAToken,
state_machine::{ParserDefinition, TokenTriple},
};
use okstd::prelude::*;
use syn::token;
// Identifier
#[derive(Debug, PartialEq, Clone, Copy)]
@ -266,7 +263,7 @@ impl<'input> Token<'input> {
Token::Equals => "=".chars(),
Token::LessThan => "<".chars(),
Token::GreaterThan => ">".chars(),
Token::Variable(identifier) => {
Token::Variable(_identifier) => {
// Implement the conversion to chars for Variable
// based on its fields
"".chars()
@ -274,8 +271,8 @@ impl<'input> Token<'input> {
Token::Word(word) => word.chars(),
Token::String(string) => string.chars(),
Token::Comment(comment) => comment.chars(),
Token::Integer(number) => "".chars(),
Token::Float(number) => "".chars(),
Token::Integer(_number) => "".chars(),
Token::Float(_number) => "".chars(),
Token::Eof => "".chars(),
Token::NewLine => "\n".chars(),
Token::LeftParen => "(".chars(),

View file

@ -3,7 +3,7 @@ pub mod parser;
pub mod compiler;
use compiler::text;
use parser::ast;
use crate::compiler::ir;

View file

@ -3,8 +3,8 @@ pub fn pretty_errors<'input>(src: &'input str, errors: Vec<lalrpop_util::ErrorRe
let mut last_end = 0;
for error in errors {
match error.error {
lalrpop_util::ParseError::InvalidToken { location } => todo!(),
lalrpop_util::ParseError::UnrecognizedEof { location, expected } => todo!(),
lalrpop_util::ParseError::InvalidToken { location: _ } => todo!(),
lalrpop_util::ParseError::UnrecognizedEof { location: _, expected: _ } => todo!(),
lalrpop_util::ParseError::UnrecognizedToken { token, expected } => {
// find the line and column of the start and end tokens,
// and print the line with a caret pointing to the error
@ -23,8 +23,8 @@ pub fn pretty_errors<'input>(src: &'input str, errors: Vec<lalrpop_util::ErrorRe
pretty.push_str(&"^".repeat(end_col - start_col));
last_end = end;
},
lalrpop_util::ParseError::ExtraToken { token } => todo!(),
lalrpop_util::ParseError::User { error } => todo!(),
lalrpop_util::ParseError::ExtraToken { token: _ } => todo!(),
lalrpop_util::ParseError::User { error: _ } => todo!(),
};
}

View file

@ -1,4 +1,3 @@
use lalrpop_util::lalrpop_mod;
mod parser_snap_tests;
mod string;

View file

@ -1,7 +1,7 @@
use crate::lexer::Lexer;
use crate::parser::errors::pretty_errors;
use insta::assert_snapshot;
use okstd::prelude::*;
#[cfg(test)]
#[okstd::test]

View file

@ -1,10 +1,10 @@
// auto-generated: "lalrpop 0.20.2"
// sha3: 58a0b50c7ef28fe4c687287586df7da9739726b0ed43438d355383eb2237332f
use std::str::FromStr;
use crate::parser::string::apply_string_escapes;
use super::ast::*;
use lalrpop_util::{ErrorRecovery, ParseError};
use crate::lexer::{Position, Token, Word, Variable};
use lalrpop_util::{ErrorRecovery};
use crate::lexer::{Token};
use okstd::prelude::*;
#[allow(unused_extern_crates)]
extern crate lalrpop_util as __lalrpop_util;

View file

@ -1,8 +1,8 @@
use std::str::Chars;
use stringzilla::{sz, StringZilla};
use syn::spanned::Spanned as _;
use crate::lexer::{self, Spanned};
#[derive(Debug, PartialEq)]
pub enum ParseError {