From f0c9635db5efd0c66e01bef1be0a8f140fdbdd84 Mon Sep 17 00:00:00 2001 From: Laurenz Date: Thu, 4 Nov 2021 15:16:46 +0100 Subject: [PATCH] Notes --- src/parse/mod.rs | 11 +++++++++++ src/parse/parser.rs | 43 ++++++++++++++++++++----------------------- src/syntax/mod.rs | 14 ++++++++++++++ 3 files changed, 45 insertions(+), 23 deletions(-) diff --git a/src/parse/mod.rs b/src/parse/mod.rs index bfe938960..30e20c0db 100644 --- a/src/parse/mod.rs +++ b/src/parse/mod.rs @@ -53,6 +53,8 @@ where p.start(); while !p.eof() && f(p) { markup_node(p, &mut at_start); + // NOTE: Just do this at the end of markup_node. Maybe even gives a + // speed boost. Wasn't possible in old parser due to use of ?. if let Some(node) = p.last_child() { at_start &= matches!(node.kind(), &NodeKind::Space(_) | &NodeKind::Parbreak | @@ -115,6 +117,7 @@ fn markup_node(p: &mut Parser, at_start: &mut bool) { let group = if stmt { Group::Stmt } else { Group::Expr }; p.start_group(group, TokenMode::Code); + // NOTE: Return success from expr_with? expr_with(p, true, 0); if stmt && p.success() && !p.eof() { p.expected_at("semicolon or line break"); @@ -138,6 +141,7 @@ fn markup_node(p: &mut Parser, at_start: &mut bool) { /// Parse a heading. fn heading(p: &mut Parser) { + // NOTE: Remove HeadingLevel kind and simply count Eq children in AST. p.start(); p.start(); p.eat_assert(&NodeKind::Eq); @@ -198,6 +202,8 @@ fn expr_with(p: &mut Parser, atomic: bool, min_prec: usize) { let prec = op.precedence(); expr_with(p, atomic, prec); + // NOTE: Lifting not needed if we don't start in the first place. + // Then we could simply do expr_with(p, atomic, prec)?; if p.may_lift_abort() { return; } @@ -264,6 +270,10 @@ fn expr_with(p: &mut Parser, atomic: bool, min_prec: usize) { break; } + // NOTE: All lifts up to here wouldn't be needed. + // Only here we then need to do + // marker.end(p, NodeKind::Binary); + offset = p.end_and_start_with(NodeKind::Binary).0; } } @@ -456,6 +466,7 @@ fn item(p: &mut Parser) -> NodeKind { if p.eat_if(&NodeKind::Dots) { expr(p); + // NOTE: Should be called `Spread`. p.end_or_abort(NodeKind::ParameterSink); return NodeKind::ParameterSink; } diff --git a/src/parse/parser.rs b/src/parse/parser.rs index 5833c724a..5ecb6e9dc 100644 --- a/src/parse/parser.rs +++ b/src/parse/parser.rs @@ -187,17 +187,8 @@ impl<'s> Parser<'s> { /// Eat and wrap the next token. pub fn convert(&mut self, kind: NodeKind) { - let len = self.tokens.index() - self.next_start; - - self.children.push( - GreenNode::with_child( - kind, - len, - GreenData::new(self.next.clone().unwrap(), len), - ) - .into(), - ); - self.fast_forward(); + self.eat(); + self.children.last_mut().unwrap().set_kind(kind); self.success = true; } @@ -278,6 +269,7 @@ impl<'s> Parser<'s> { } /// Consume the next token and return its kind. + // NOTE: This isn't great. fn eat_peeked(&mut self) -> Option { let token = self.peek()?.clone(); self.eat(); @@ -319,6 +311,7 @@ impl<'s> Parser<'s> { /// Consume the next token, debug-asserting that it is one of the given ones. pub fn eat_assert(&mut self, t: &NodeKind) { + // NOTE: assert with peek(), then eat() let next = self.eat_peeked(); debug_assert_eq!(next.as_ref(), Some(t)); } @@ -438,8 +431,6 @@ impl<'s> Parser<'s> { // Rescan the peeked token if the mode changed. if rescan { - self.tokens.jump(self.prev_end()); - if prev_mode == TokenMode::Code { let len = self.children.len(); for n in (0 .. len).rev() { @@ -451,7 +442,11 @@ impl<'s> Parser<'s> { } } - self.fast_forward(); + self.tokens.jump(self.prev_end()); + self.prev_end = self.tokens.index().into(); + self.next_start = self.tokens.index().into(); + self.next = self.tokens.next(); + self.repeek(); } } @@ -527,21 +522,23 @@ impl<'s> Parser<'s> { .into(), ); - self.fast_forward(); - } - - /// Move to the next token. - pub fn fast_forward(&mut self) { - if !self.next.as_ref().map_or(false, |x| self.skip_type(x)) { - self.prev_end = self.tokens.index().into(); - } + self.prev_end = self.tokens.index().into(); self.next_start = self.tokens.index().into(); self.next = self.tokens.next(); if self.tokens.mode() == TokenMode::Code { // Skip whitespace and comments. while self.next.as_ref().map_or(false, |x| self.skip_type(x)) { - self.eat(); + self.children.push( + GreenData::new( + self.next.clone().unwrap(), + self.tokens.index() - self.next_start, + ) + .into(), + ); + + self.next_start = self.tokens.index().into(); + self.next = self.tokens.next(); } } diff --git a/src/syntax/mod.rs b/src/syntax/mod.rs index d26c64849..112fc220f 100644 --- a/src/syntax/mod.rs +++ b/src/syntax/mod.rs @@ -40,6 +40,15 @@ impl Green { self.data().kind() } + /// Set the type of the node. + pub fn set_kind(&mut self, kind: NodeKind) { + let data = match self { + Self::Node(node) => &mut Rc::make_mut(node).data, + Self::Token(data) => data, + }; + data.set_kind(kind); + } + /// The length of the node. pub fn len(&self) -> usize { self.data().len() @@ -141,6 +150,11 @@ impl GreenData { &self.kind } + /// Set the type of the node. + pub fn set_kind(&mut self, kind: NodeKind) { + self.kind = kind; + } + /// The length of the node. pub fn len(&self) -> usize { self.len