mirror of
https://forge.katzen.cafe/katzen-cafe/iowo.git
synced 2024-12-11 21:45:53 +01:00
pawarser: Implement Parser::finish
This commit is contained in:
parent
9b1f6a1dc1
commit
ac75978c01
2 changed files with 97 additions and 1 deletions
|
@ -1,6 +1,9 @@
|
||||||
use std::cell::Cell;
|
use std::{cell::Cell, marker::PhantomData, mem};
|
||||||
|
|
||||||
use enumset::{EnumSet, EnumSetType};
|
use enumset::{EnumSet, EnumSetType};
|
||||||
|
use rowan::{GreenNode, GreenNodeBuilder};
|
||||||
|
|
||||||
|
use crate::parser::event::NodeKind;
|
||||||
|
|
||||||
use self::{event::Event, input::Input, marker::Marker};
|
use self::{event::Event, input::Input, marker::Marker};
|
||||||
pub use error::SyntaxError;
|
pub use error::SyntaxError;
|
||||||
|
@ -93,6 +96,90 @@ impl<'src, 'toks, SyntaxKind: SyntaxElement, SyntaxErr: SyntaxError>
|
||||||
assert!(steps <= self.step_limit, "the parser seems stuck.");
|
assert!(steps <= self.step_limit, "the parser seems stuck.");
|
||||||
self.steps.set(steps + 1);
|
self.steps.set(steps + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn finish(
|
||||||
|
Self {
|
||||||
|
input,
|
||||||
|
pos,
|
||||||
|
mut events,
|
||||||
|
step_limit,
|
||||||
|
steps,
|
||||||
|
}: Self,
|
||||||
|
) -> ParserOutput<SyntaxKind, SyntaxErr> {
|
||||||
|
let (mut raw_toks, meaningless_tokens) = input.dissolve();
|
||||||
|
let mut builder = GreenNodeBuilder::new();
|
||||||
|
// TODO: document what the hell a forward parent is
|
||||||
|
let mut fw_parents = Vec::new();
|
||||||
|
let mut errors: Vec<SyntaxErr> = Vec::new();
|
||||||
|
raw_toks.reverse();
|
||||||
|
|
||||||
|
for i in 0..events.len() {
|
||||||
|
match mem::replace(&mut events[i], Event::tombstone()) {
|
||||||
|
Event::Start {
|
||||||
|
kind,
|
||||||
|
forward_parent,
|
||||||
|
} => {
|
||||||
|
if kind == NodeKind::Tombstone && forward_parent.is_none() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// resolving forward parents
|
||||||
|
// temporarily jump around with the parser index and replace them with tombstones
|
||||||
|
fw_parents.push(kind);
|
||||||
|
let mut idx = i;
|
||||||
|
let mut fp = forward_parent;
|
||||||
|
while let Some(fwd) = fp {
|
||||||
|
idx += fwd as usize;
|
||||||
|
fp = match mem::replace(&mut events[idx], Event::tombstone()) {
|
||||||
|
Event::Start {
|
||||||
|
kind,
|
||||||
|
forward_parent,
|
||||||
|
} => {
|
||||||
|
fw_parents.push(kind);
|
||||||
|
forward_parent
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// clear semantically meaningless tokens before the new tree node for aesthetic reasons
|
||||||
|
while raw_toks
|
||||||
|
.last()
|
||||||
|
.is_some_and(|v| meaningless_tokens.contains(v.0))
|
||||||
|
{
|
||||||
|
// update first next Eat event
|
||||||
|
match events.iter_mut().find(|ev| matches!(ev, Event::Eat { .. })) {
|
||||||
|
Some(Event::Eat { count }) => *count -= 1,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// put whitespace into lst
|
||||||
|
let (tok, text) = raw_toks.pop().unwrap();
|
||||||
|
builder.token(tok.into(), text);
|
||||||
|
}
|
||||||
|
|
||||||
|
// insert forward parents into the tree in correct order
|
||||||
|
for kind in fw_parents.drain(..).rev() {
|
||||||
|
match kind {
|
||||||
|
NodeKind::Syntax(kind) => builder.start_node(kind.into()),
|
||||||
|
NodeKind::Error(err) => {
|
||||||
|
errors.push(err);
|
||||||
|
builder.start_node(SyntaxKind::ERROR.into())
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Event::Finish => builder.finish_node(),
|
||||||
|
Event::Eat { count } => (0..count).for_each(|_| {
|
||||||
|
let (tok, text) = raw_toks.pop().unwrap();
|
||||||
|
builder.token(tok.into(), text);
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ParserBuilder<
|
pub struct ParserBuilder<
|
||||||
|
|
|
@ -55,4 +55,13 @@ impl<'src, SyntaxKind: SyntaxElement> Input<'src, SyntaxKind> {
|
||||||
pub fn meaningless_tail_len(&self) -> usize {
|
pub fn meaningless_tail_len(&self) -> usize {
|
||||||
self.raw.len() - (self.meaningful_toks.last().unwrap() + 1)
|
self.raw.len() - (self.meaningful_toks.last().unwrap() + 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn dissolve(self) -> (Vec<(SyntaxKind, &'src str)>, EnumSet<SyntaxKind>) {
|
||||||
|
let Self {
|
||||||
|
raw,
|
||||||
|
semantically_meaningless,
|
||||||
|
..
|
||||||
|
} = self;
|
||||||
|
(raw, semantically_meaningless)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue