Skip to content

Commit

Permalink
refactor!: Rename RuleKind, TokenKind and NodeLabel (#971)
Browse files Browse the repository at this point in the history
Closes #834 😭
  • Loading branch information
Xanewok committed May 22, 2024
1 parent a2f5094 commit be943b7
Show file tree
Hide file tree
Showing 105 changed files with 7,402 additions and 6,729 deletions.
5 changes: 5 additions & 0 deletions .changeset/smooth-cougars-film.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@nomicfoundation/slang": minor
---

Rename RuleKind, TerminalKind and NodeLabel
1 change: 1 addition & 0 deletions .cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
"napi",
"nomic",
"nomicfoundation",
"nonterminal",
"rustup",
"struct",
"structs",
Expand Down
12 changes: 6 additions & 6 deletions crates/codegen/runtime/cargo/src/runtime/generated/kinds.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 8 additions & 8 deletions crates/codegen/runtime/cargo/src/runtime/kinds.rs.jinja2
Original file line number Diff line number Diff line change
Expand Up @@ -15,20 +15,20 @@ use napi_derive::napi;
)]
#[cfg_attr(feature = "slang_napi_interfaces", /* derives `Clone` and `Copy` */ napi(string_enum, namespace = "kinds"))]
#[cfg_attr(not(feature = "slang_napi_interfaces"), derive(Clone, Copy))]
pub enum RuleKind {
pub enum NonTerminalKind {
{%- if rendering_in_stubs -%}
Stub1,
Stub2,
Stub3,
{%- else -%}
{%- for variant in model.rule_kinds -%}
{%- for variant in model.nonterminal_kinds -%}
{# variant.documentation | indent(prefix = "/// ", first = true, blank = true) #}
{{ variant }},
{%- endfor -%}
{%- endif -%}
}

impl metaslang_cst::NonTerminalKind for RuleKind {}
impl metaslang_cst::NonTerminalKind for NonTerminalKind {}

#[derive(
Debug,
Expand All @@ -45,7 +45,7 @@ impl metaslang_cst::NonTerminalKind for RuleKind {}
#[strum(serialize_all = "snake_case")]
#[cfg_attr(feature = "slang_napi_interfaces", /* derives `Clone` and `Copy` */ napi(string_enum, namespace = "kinds"))]
#[cfg_attr(not(feature = "slang_napi_interfaces"), derive(Clone, Copy))]
pub enum NodeLabel {
pub enum EdgeLabel {
// Built-in:
{# _SLANG_INTERNAL_RESERVED_NODE_LABELS_ (keep in sync) #}
Item,
Expand All @@ -69,7 +69,7 @@ pub enum NodeLabel {
{%- endif -%}
}

impl metaslang_cst::EdgeKind for NodeLabel {}
impl metaslang_cst::EdgeLabel for EdgeLabel {}

#[derive(
Debug,
Expand All @@ -85,7 +85,7 @@ impl metaslang_cst::EdgeKind for NodeLabel {}
)]
#[cfg_attr(feature = "slang_napi_interfaces", /* derives `Clone` and `Copy` */ napi(string_enum, namespace = "kinds"))]
#[cfg_attr(not(feature = "slang_napi_interfaces"), derive(Clone, Copy))]
pub enum TokenKind {
pub enum TerminalKind {
// Built-in:
SKIPPED,

Expand All @@ -95,14 +95,14 @@ pub enum TokenKind {
Stub2,
Stub3,
{%- else -%}
{%- for variant in model.token_kinds -%}
{%- for variant in model.terminal_kinds -%}
{# variant.documentation | indent(prefix = "/// ", first = true, blank = true) #}
{{ variant }},
{%- endfor -%}
{%- endif -%}
}

impl metaslang_cst::TerminalKind for TokenKind {
impl metaslang_cst::TerminalKind for TerminalKind {
fn is_trivia(&self) -> bool {
{%- if rendering_in_stubs -%}
false
Expand Down
18 changes: 9 additions & 9 deletions crates/codegen/runtime/cargo/src/runtime/language.rs.jinja2
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use napi_derive::napi;

use crate::cst;
use crate::kinds::{
NodeLabel, IsLexicalContext, LexicalContext, LexicalContextType, RuleKind, TokenKind,
EdgeLabel, IsLexicalContext, LexicalContext, LexicalContextType, NonTerminalKind, TerminalKind,
};
use crate::lexer::{KeywordScan, Lexer, ScannedToken};
#[cfg(feature = "slang_napi_interfaces")]
Expand Down Expand Up @@ -115,13 +115,13 @@ impl Language {

{% endif %}

pub fn parse(&self, kind: RuleKind, input: &str) -> ParseOutput {
pub fn parse(&self, kind: NonTerminalKind, input: &str) -> ParseOutput {
{%- if rendering_in_stubs -%}
unreachable!("Attempting to parse in stubs: {kind}: {input}")
{%- else -%}
match kind {
{%- for parser_name, _ in model.parser_functions -%}
RuleKind::{{ parser_name }} => Self::{{ parser_name | snake_case }}.parse(self, input),
NonTerminalKind::{{ parser_name }} => Self::{{ parser_name | snake_case }}.parse(self, input),
{%- endfor -%}
}
{%- endif -%}
Expand All @@ -145,15 +145,15 @@ impl Lexer for Language {
{%- endif -%}
}

fn delimiters<LexCtx: IsLexicalContext>() -> &'static [(TokenKind, TokenKind)] {
fn delimiters<LexCtx: IsLexicalContext>() -> &'static [(TerminalKind, TerminalKind)] {
{%- if rendering_in_stubs -%}
unreachable!("Invoking delimiters in stubs.")
{%- else -%}
match LexCtx::value() {
{%- for context_name, context in model.scanner_contexts %}
LexicalContext::{{ context_name }} => &[
{%- for open, close in context.delimiters %}
(TokenKind::{{ open }}, TokenKind::{{ close }}),
(TerminalKind::{{ open }}, TerminalKind::{{ close }}),
{%- endfor %}
],
{%- endfor %}
Expand All @@ -175,7 +175,7 @@ impl Lexer for Language {
if self.$function(input) && input.position() > furthest_position {
furthest_position = input.position();

longest_token = Some(TokenKind::$kind);
longest_token = Some(TerminalKind::$kind);
}
input.set_position(save);
)*
Expand Down Expand Up @@ -209,7 +209,7 @@ impl Lexer for Language {
if let Some(identifier) = longest_token.filter(|tok|
[
{% for name in context.promotable_identifier_scanners %}
TokenKind::{{ name }},
TerminalKind::{{ name }},
{% endfor %}
]
.contains(tok)
Expand Down Expand Up @@ -260,7 +260,7 @@ impl Lexer for Language {
// Skip a character if possible and if we didn't recognize a token
None if input.peek().is_some() => {
let _ = input.next();
Some(ScannedToken::Single(TokenKind::SKIPPED))
Some(ScannedToken::Single(TerminalKind::SKIPPED))
},
None => None,
}
Expand Down Expand Up @@ -294,7 +294,7 @@ impl Language {
#[napi(js_name = "parse", ts_return_type = "parse_output.ParseOutput", catch_unwind)]
pub fn parse_napi(
&self,
#[napi(ts_arg_type = "kinds.RuleKind")] kind: RuleKind,
#[napi(ts_arg_type = "kinds.NonTerminalKind")] kind: NonTerminalKind,
input: String
) -> NAPIParseOutput {
self.parse(kind, input.as_str()).into()
Expand Down
26 changes: 13 additions & 13 deletions crates/codegen/runtime/cargo/src/runtime/lexer.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::cst::{self, LabeledNode};
use crate::kinds::{IsLexicalContext, TokenKind};
use crate::cst::{self, Edge};
use crate::kinds::{IsLexicalContext, TerminalKind};
use crate::parser_support::{ParserContext, ParserResult};

/// Whether a keyword has been scanned and if so, whether it is reserved (unusable as an identifier)
Expand All @@ -10,22 +10,22 @@ pub enum KeywordScan {
Absent,
/// The keyword is present, but is not reserved.
#[allow(unused)]
Present(TokenKind),
Present(TerminalKind),
/// The keyword is present and is reserved.
Reserved(TokenKind),
Reserved(TerminalKind),
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ScannedToken {
Single(TokenKind),
Single(TerminalKind),
IdentifierOrKeyword {
identifier: TokenKind,
identifier: TerminalKind,
kw: KeywordScan,
},
}

impl ScannedToken {
pub fn accepted_as(self, expected: TokenKind) -> bool {
pub fn accepted_as(self, expected: TerminalKind) -> bool {
match self {
Self::Single(kind) => kind == expected,
Self::IdentifierOrKeyword { identifier, kw } => match kw {
Expand All @@ -40,7 +40,7 @@ impl ScannedToken {
///
/// If the scanned token is an identifier, returns the specific keyword kind if the keyword is reserved,
/// otherwise returns the general identifier kind. For other tokens, returns the token kind itself.
pub fn unambiguous(self) -> TokenKind {
pub fn unambiguous(self) -> TerminalKind {
match self {
Self::Single(kind) => kind,
Self::IdentifierOrKeyword { identifier, kw } => match kw {
Expand All @@ -67,7 +67,7 @@ pub(crate) trait Lexer {
fn trailing_trivia(&self, input: &mut ParserContext<'_>) -> ParserResult;
#[doc(hidden)]
/// Returns valid grouping delimiters in the given lexical context.
fn delimiters<LexCtx: IsLexicalContext>() -> &'static [(TokenKind, TokenKind)];
fn delimiters<LexCtx: IsLexicalContext>() -> &'static [(TerminalKind, TerminalKind)];

/// Peeks the next token, including trivia. Does not advance the input.
fn peek_token<LexCtx: IsLexicalContext>(
Expand Down Expand Up @@ -98,7 +98,7 @@ pub(crate) trait Lexer {
fn parse_token<LexCtx: IsLexicalContext>(
&self,
input: &mut ParserContext<'_>,
kind: TokenKind,
kind: TerminalKind,
) -> ParserResult {
let start = input.position();
if !self
Expand All @@ -111,7 +111,7 @@ pub(crate) trait Lexer {
let end = input.position();

ParserResult::r#match(
vec![LabeledNode::anonymous(cst::Node::token(
vec![Edge::anonymous(cst::Node::terminal(
kind,
input.content(start.utf8..end.utf8),
))],
Expand All @@ -124,7 +124,7 @@ pub(crate) trait Lexer {
fn parse_token_with_trivia<LexCtx: IsLexicalContext>(
&self,
input: &mut ParserContext<'_>,
kind: TokenKind,
kind: TerminalKind,
) -> ParserResult {
let mut children = vec![];

Expand All @@ -144,7 +144,7 @@ pub(crate) trait Lexer {
return ParserResult::no_match(vec![kind]);
}
let end = input.position();
children.push(LabeledNode::anonymous(cst::Node::token(
children.push(Edge::anonymous(cst::Node::terminal(
kind,
input.content(start.utf8..end.utf8),
)));
Expand Down
14 changes: 7 additions & 7 deletions crates/codegen/runtime/cargo/src/runtime/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ mod metaslang_cst {
pub enum KindTypes {}

impl metaslang_cst::KindTypes for KindTypes {
type NonTerminalKind = crate::kinds::RuleKind;
type TerminalKind = crate::kinds::TokenKind;
type EdgeKind = crate::kinds::NodeLabel;
type NonTerminalKind = crate::kinds::NonTerminalKind;
type TerminalKind = crate::kinds::TerminalKind;
type EdgeLabel = crate::kinds::EdgeLabel;
}
}

Expand All @@ -40,9 +40,9 @@ pub mod cst {
use super::metaslang_cst::KindTypes;

pub type Node = cst::Node<KindTypes>;
pub type RuleNode = cst::NonTerminalNode<KindTypes>;
pub type TokenNode = cst::TerminalNode<KindTypes>;
pub type LabeledNode = cst::LabeledNode<KindTypes>;
pub type NonTerminalNode = cst::NonTerminalNode<KindTypes>;
pub type TerminalNode = cst::TerminalNode<KindTypes>;
pub type Edge = cst::Edge<KindTypes>;
}

pub mod cursor {
Expand All @@ -51,7 +51,7 @@ pub mod cursor {
use super::metaslang_cst::KindTypes;

pub type Cursor = cursor::Cursor<KindTypes>;
pub type CursorWithLabels = cursor::CursorWithLabels<KindTypes>;
pub type CursorWithEdges = cursor::CursorWithEdges<KindTypes>;
}

pub mod query {
Expand Down
Loading

0 comments on commit be943b7

Please sign in to comment.