mirror of
https://forge.katzen.cafe/katzen-cafe/iowo.git
synced 2024-11-05 15:26:24 +01:00
lang: improve and simplify error handling and storage
fixes wrong error ordering with errors using `forward_parents`.
This commit is contained in:
parent
ed151c2e3c
commit
f6da90a354
|
@ -1,6 +1,12 @@
|
||||||
use drop_bomb::DropBomb;
|
use drop_bomb::DropBomb;
|
||||||
|
|
||||||
use self::{error::SyntaxError, events::Event, input::Input, syntax_kind::SyntaxKind};
|
use self::{
|
||||||
|
error::SyntaxError,
|
||||||
|
events::{Event, NodeKind},
|
||||||
|
input::Input,
|
||||||
|
syntax_kind::SyntaxKind,
|
||||||
|
};
|
||||||
|
use std::cell::Cell;
|
||||||
|
|
||||||
pub mod syntax_kind;
|
pub mod syntax_kind;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -12,11 +18,13 @@ pub mod grammar;
|
||||||
pub mod input;
|
pub mod input;
|
||||||
pub mod output;
|
pub mod output;
|
||||||
|
|
||||||
|
const PARSER_STEP_LIMIT: u32 = 4096;
|
||||||
|
|
||||||
pub struct Parser<'src, 'toks> {
|
pub struct Parser<'src, 'toks> {
|
||||||
input: Input<'src, 'toks>,
|
input: Input<'src, 'toks>,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
events: Vec<Event>,
|
events: Vec<Event>,
|
||||||
errors: Vec<SyntaxError>,
|
steps: Cell<u32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'src, 'toks> Parser<'src, 'toks> {
|
impl<'src, 'toks> Parser<'src, 'toks> {
|
||||||
|
@ -25,15 +33,16 @@ impl<'src, 'toks> Parser<'src, 'toks> {
|
||||||
input,
|
input,
|
||||||
pos: 0,
|
pos: 0,
|
||||||
events: Vec::new(),
|
events: Vec::new(),
|
||||||
errors: Vec::new(),
|
steps: Cell::new(0),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn finish(self) -> (Vec<Event>, Vec<SyntaxError>) {
|
pub fn finish(self) -> Vec<Event> {
|
||||||
(self.events, self.errors)
|
self.events
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
|
pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
|
||||||
|
self.step();
|
||||||
self.input.kind(self.pos + n)
|
self.input.kind(self.pos + n)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,6 +53,7 @@ impl<'src, 'toks> Parser<'src, 'toks> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn current(&self) -> SyntaxKind {
|
pub(crate) fn current(&self) -> SyntaxKind {
|
||||||
|
self.step();
|
||||||
self.input.kind(self.pos)
|
self.input.kind(self.pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -80,6 +90,12 @@ impl<'src, 'toks> Parser<'src, 'toks> {
|
||||||
fn push_ev(&mut self, event: Event) {
|
fn push_ev(&mut self, event: Event) {
|
||||||
self.events.push(event)
|
self.events.push(event)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn step(&self) {
|
||||||
|
let steps = self.steps.get();
|
||||||
|
assert!(steps <= PARSER_STEP_LIMIT, "the parser seems stuck...");
|
||||||
|
self.steps.set(steps + 1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct Marker {
|
pub(crate) struct Marker {
|
||||||
|
@ -95,12 +111,13 @@ impl Marker {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn complete(mut self, p: &mut Parser<'_, '_>, kind: SyntaxKind) -> CompletedMarker {
|
fn complete_node(mut self, p: &mut Parser, kind: NodeKind) -> CompletedMarker {
|
||||||
self.bomb.defuse();
|
self.bomb.defuse();
|
||||||
match &mut p.events[self.pos] {
|
match &mut p.events[self.pos] {
|
||||||
Event::Start { kind: slot, .. } => *slot = kind,
|
Event::Start { kind: slot, .. } => *slot = kind.clone(),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
|
|
||||||
p.push_ev(Event::Finish);
|
p.push_ev(Event::Finish);
|
||||||
|
|
||||||
CompletedMarker {
|
CompletedMarker {
|
||||||
|
@ -109,9 +126,12 @@ impl Marker {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn complete_err(mut self, p: &mut Parser, kind: SyntaxError) -> CompletedMarker {
|
pub(crate) fn complete(self, p: &mut Parser<'_, '_>, kind: SyntaxKind) -> CompletedMarker {
|
||||||
p.errors.push(kind);
|
self.complete_node(p, NodeKind::Syntax(kind))
|
||||||
self.complete(p, SyntaxKind::PARSE_ERR)
|
}
|
||||||
|
|
||||||
|
pub(crate) fn error(self, p: &mut Parser, kind: SyntaxError) -> CompletedMarker {
|
||||||
|
self.complete_node(p, NodeKind::Error(kind))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn abandon(mut self, p: &mut Parser<'_, '_>) {
|
pub(crate) fn abandon(mut self, p: &mut Parser<'_, '_>) {
|
||||||
|
@ -119,7 +139,7 @@ impl Marker {
|
||||||
if self.pos == p.events.len() - 1 {
|
if self.pos == p.events.len() - 1 {
|
||||||
match p.events.pop() {
|
match p.events.pop() {
|
||||||
Some(Event::Start {
|
Some(Event::Start {
|
||||||
kind: SyntaxKind::TOMBSTONE,
|
kind: NodeKind::Syntax(SyntaxKind::TOMBSTONE),
|
||||||
forward_parent: None,
|
forward_parent: None,
|
||||||
}) => (),
|
}) => (),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
|
@ -130,7 +150,7 @@ impl Marker {
|
||||||
|
|
||||||
pub(crate) struct CompletedMarker {
|
pub(crate) struct CompletedMarker {
|
||||||
pos: usize,
|
pos: usize,
|
||||||
kind: SyntaxKind,
|
kind: NodeKind,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CompletedMarker {
|
impl CompletedMarker {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::lst_parser::syntax_kind::SyntaxKind;
|
use crate::lst_parser::syntax_kind::SyntaxKind;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
pub enum SyntaxError {
|
pub enum SyntaxError {
|
||||||
Expected(Vec<SyntaxKind>),
|
Expected(Vec<SyntaxKind>),
|
||||||
PipelineNeedsSink,
|
PipelineNeedsSink,
|
||||||
|
|
|
@ -1,22 +1,69 @@
|
||||||
use crate::lst_parser::syntax_kind::SyntaxKind;
|
use crate::lst_parser::syntax_kind::SyntaxKind;
|
||||||
|
|
||||||
|
use super::error::SyntaxError;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Event {
|
pub enum Event {
|
||||||
Start {
|
Start {
|
||||||
kind: SyntaxKind,
|
kind: NodeKind,
|
||||||
forward_parent: Option<usize>,
|
forward_parent: Option<usize>,
|
||||||
},
|
},
|
||||||
Finish,
|
Finish,
|
||||||
Eat {
|
Eat {
|
||||||
count: usize,
|
count: usize,
|
||||||
},
|
},
|
||||||
Error,
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
|
pub enum NodeKind {
|
||||||
|
Syntax(SyntaxKind),
|
||||||
|
Error(SyntaxError),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl NodeKind {
|
||||||
|
pub fn is_syntax(&self) -> bool {
|
||||||
|
matches!(self, Self::Syntax(_))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_error(&self) -> bool {
|
||||||
|
matches!(self, Self::Error(_))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SyntaxKind> for NodeKind {
|
||||||
|
fn from(value: SyntaxKind) -> Self {
|
||||||
|
NodeKind::Syntax(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<SyntaxError> for NodeKind {
|
||||||
|
fn from(value: SyntaxError) -> Self {
|
||||||
|
NodeKind::Error(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<SyntaxKind> for NodeKind {
|
||||||
|
fn eq(&self, other: &SyntaxKind) -> bool {
|
||||||
|
match self {
|
||||||
|
NodeKind::Syntax(s) => s == other,
|
||||||
|
NodeKind::Error(_) => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialEq<SyntaxError> for NodeKind {
|
||||||
|
fn eq(&self, other: &SyntaxError) -> bool {
|
||||||
|
match self {
|
||||||
|
NodeKind::Syntax(_) => false,
|
||||||
|
NodeKind::Error(e) => e == other,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Event {
|
impl Event {
|
||||||
pub(crate) fn tombstone() -> Self {
|
pub(crate) fn tombstone() -> Self {
|
||||||
Self::Start {
|
Self::Start {
|
||||||
kind: SyntaxKind::TOMBSTONE,
|
kind: SyntaxKind::TOMBSTONE.into(),
|
||||||
forward_parent: None,
|
forward_parent: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ pub fn parenthesized_expr(p: &mut Parser) -> Option<CompletedMarker> {
|
||||||
let par_expr = p.start("parenthesized");
|
let par_expr = p.start("parenthesized");
|
||||||
expression(p, false);
|
expression(p, false);
|
||||||
if !p.eat(R_PAREN) {
|
if !p.eat(R_PAREN) {
|
||||||
return Some(par_expr.complete_err(p, SyntaxError::Expected(vec![R_PAREN])));
|
return Some(par_expr.error(p, SyntaxError::Expected(vec![R_PAREN])));
|
||||||
}
|
}
|
||||||
|
|
||||||
return Some(par_expr.complete(p, PARENTHESIZED_EXPR));
|
return Some(par_expr.complete(p, PARENTHESIZED_EXPR));
|
||||||
|
|
|
@ -21,7 +21,7 @@ pub fn vec_matrix_list(p: &mut Parser) -> CompletedMarker {
|
||||||
start.complete(p, LIST)
|
start.complete(p, LIST)
|
||||||
} else {
|
} else {
|
||||||
row_start.abandon(p);
|
row_start.abandon(p);
|
||||||
start.complete_err(p, SyntaxError::Expected(vec![EXPR, R_BRACK]))
|
start.error(p, SyntaxError::Expected(vec![EXPR, R_BRACK]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@ pub fn pipeline(p: &mut Parser, start_expr: CompletedMarker) -> Option<Completed
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
if expression(p, true).is_none() {
|
if expression(p, true).is_none() {
|
||||||
return Some(pipeline_marker.complete_err(p, SyntaxError::PipelineNeedsSink));
|
return Some(pipeline_marker.error(p, SyntaxError::PipelineNeedsSink));
|
||||||
}
|
}
|
||||||
if !pipe(p) {
|
if !pipe(p) {
|
||||||
return Some(pipeline_marker.complete(p, PIPELINE));
|
return Some(pipeline_marker.complete(p, PIPELINE));
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use clap::builder;
|
||||||
use owo_colors::{unset_override, OwoColorize};
|
use owo_colors::{unset_override, OwoColorize};
|
||||||
use rowan::{GreenNode, GreenNodeBuilder, GreenNodeData, GreenTokenData, Language, NodeOrToken};
|
use rowan::{GreenNode, GreenNodeBuilder, GreenNodeData, GreenTokenData, Language, NodeOrToken};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
@ -7,7 +8,10 @@ use crate::lst_parser::{
|
||||||
syntax_kind::{Lang, SyntaxKind},
|
syntax_kind::{Lang, SyntaxKind},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{error::SyntaxError, events::Event};
|
use super::{
|
||||||
|
error::SyntaxError,
|
||||||
|
events::{Event, NodeKind},
|
||||||
|
};
|
||||||
|
|
||||||
pub struct Output {
|
pub struct Output {
|
||||||
pub green_node: GreenNode,
|
pub green_node: GreenNode,
|
||||||
|
@ -23,6 +27,7 @@ impl std::fmt::Debug for Output {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const INDENT_STR: &str = " ";
|
||||||
fn debug_print_green_node(
|
fn debug_print_green_node(
|
||||||
node: NodeOrToken<&GreenNodeData, &GreenTokenData>,
|
node: NodeOrToken<&GreenNodeData, &GreenTokenData>,
|
||||||
f: &mut dyn std::fmt::Write,
|
f: &mut dyn std::fmt::Write,
|
||||||
|
@ -31,7 +36,7 @@ fn debug_print_green_node(
|
||||||
colored: bool,
|
colored: bool,
|
||||||
) -> std::fmt::Result {
|
) -> std::fmt::Result {
|
||||||
for _ in 0..lvl {
|
for _ in 0..lvl {
|
||||||
f.write_str(" ")?;
|
f.write_str(INDENT_STR)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if !colored {
|
if !colored {
|
||||||
|
@ -68,7 +73,7 @@ fn debug_print_green_node(
|
||||||
debug_print_green_node(c, f, lvl + 1, errs, colored)?;
|
debug_print_green_node(c, f, lvl + 1, errs, colored)?;
|
||||||
}
|
}
|
||||||
for _ in 0..lvl {
|
for _ in 0..lvl {
|
||||||
f.write_str(" ")?;
|
f.write_str(INDENT_STR)?;
|
||||||
}
|
}
|
||||||
if kind != SyntaxKind::PARSE_ERR {
|
if kind != SyntaxKind::PARSE_ERR {
|
||||||
write!(f, "{}", "}\n".yellow())
|
write!(f, "{}", "}\n".yellow())
|
||||||
|
@ -123,10 +128,11 @@ impl Output {
|
||||||
}
|
}
|
||||||
pub fn from_parser_output(
|
pub fn from_parser_output(
|
||||||
mut raw_toks: Vec<(SyntaxKind, &str)>,
|
mut raw_toks: Vec<(SyntaxKind, &str)>,
|
||||||
(mut events, errs): (Vec<Event>, Vec<SyntaxError>),
|
mut events: Vec<Event>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let mut builder = GreenNodeBuilder::new();
|
let mut builder = GreenNodeBuilder::new();
|
||||||
let mut fw_parents = Vec::new();
|
let mut fw_parents = Vec::new();
|
||||||
|
let mut errors = Vec::new();
|
||||||
raw_toks.reverse();
|
raw_toks.reverse();
|
||||||
|
|
||||||
for i in 0..events.len() {
|
for i in 0..events.len() {
|
||||||
|
@ -170,8 +176,15 @@ impl Output {
|
||||||
}
|
}
|
||||||
|
|
||||||
for kind in fw_parents.drain(..).rev() {
|
for kind in fw_parents.drain(..).rev() {
|
||||||
if kind != SyntaxKind::TOMBSTONE {
|
match kind {
|
||||||
builder.start_node(kind.into());
|
NodeKind::Syntax(kind) if kind != SyntaxKind::TOMBSTONE => {
|
||||||
|
builder.start_node(kind.into())
|
||||||
|
}
|
||||||
|
NodeKind::Error(err) => {
|
||||||
|
errors.push(err);
|
||||||
|
builder.start_node(SyntaxKind::PARSE_ERR.into())
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -180,13 +193,12 @@ impl Output {
|
||||||
let (tok, text): (SyntaxKind, &str) = raw_toks.pop().unwrap();
|
let (tok, text): (SyntaxKind, &str) = raw_toks.pop().unwrap();
|
||||||
builder.token(tok.into(), text);
|
builder.token(tok.into(), text);
|
||||||
}),
|
}),
|
||||||
Event::Error => todo!(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
green_node: builder.finish(),
|
green_node: builder.finish(),
|
||||||
errors: errs,
|
errors,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
19
flake.nix
19
flake.nix
|
@ -27,8 +27,7 @@
|
||||||
pkgs = nixpkgs.legacyPackages.${system};
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
toolchain = with fenix.packages.${system};
|
toolchain = with fenix.packages.${system};
|
||||||
combine [
|
combine [
|
||||||
default.toolchain
|
complete.toolchain
|
||||||
rust-analyzer
|
|
||||||
];
|
];
|
||||||
in {
|
in {
|
||||||
default = devenv.lib.mkShell {
|
default = devenv.lib.mkShell {
|
||||||
|
@ -39,24 +38,14 @@
|
||||||
config,
|
config,
|
||||||
...
|
...
|
||||||
}: {
|
}: {
|
||||||
# languages.rust = {
|
|
||||||
# enable = true;
|
|
||||||
# channel = "nightly";
|
|
||||||
# components = [
|
|
||||||
# "rustc"
|
|
||||||
# "cargo"
|
|
||||||
# "clippy"
|
|
||||||
# "rustfmt"
|
|
||||||
# "rust-src"
|
|
||||||
# "rust-analyzer"
|
|
||||||
# ];
|
|
||||||
# };
|
|
||||||
|
|
||||||
pre-commit.hooks = {
|
pre-commit.hooks = {
|
||||||
clippy.enable = false;
|
clippy.enable = false;
|
||||||
rustfmt.enable = true;
|
rustfmt.enable = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
env = {
|
||||||
|
RUST_BACKTRACE = 1;
|
||||||
|
};
|
||||||
packages = with pkgs; [
|
packages = with pkgs; [
|
||||||
just
|
just
|
||||||
nushell
|
nushell
|
||||||
|
|
Loading…
Reference in a new issue