Skip to content

Commit

Permalink
update remaining public API and user docs terminology (#975)
Browse files Browse the repository at this point in the history
Following up on changes in #971 and #918.
  • Loading branch information
OmarTawfik authored May 27, 2024
1 parent 099f2d5 commit 46b1dde
Show file tree
Hide file tree
Showing 139 changed files with 4,354 additions and 4,345 deletions.
5 changes: 5 additions & 0 deletions .changeset/honest-shirts-enjoy.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@nomicfoundation/slang": minor
---

rename `QueryResult` to `QueryMatch`, and its `bindings` to `captures`.
2 changes: 1 addition & 1 deletion .changeset/smooth-cougars-film.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
"@nomicfoundation/slang": minor
---

Rename RuleKind, TerminalKind and NodeLabel
Rename `RuleKind` to `NonterminalKind`, `TokenKind` to `TerminalKind`, and `NodeLabel` to `EdgeLabel`.
1 change: 1 addition & 0 deletions .cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
"nomic",
"nomicfoundation",
"nonterminal",
"nonterminals",
"rustup",
"struct",
"structs",
Expand Down
2 changes: 1 addition & 1 deletion crates/codegen/ebnf/src/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use codegen_language_definition::model::Identifier;
///
/// Additionally, it computes the EBNF ID from the name, accourding to the following rules:
///
/// - For non-terminals, we use the original name in `PascalCase`.
/// - For nonterminals, we use the original name in `PascalCase`.
/// - For terminals, we use the name in `SCREAMING_SNAKE_CASE`.
/// - For fragments, we add `«guillemets»` around the name.
#[derive(derive_new::new)]
Expand Down
4 changes: 2 additions & 2 deletions crates/codegen/language/definition/src/model/mod.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
mod item;
mod manifest;
mod non_terminals;
mod nonterminals;
mod terminals;
mod utils;

pub use item::*;
pub use manifest::*;
pub use non_terminals::*;
pub use nonterminals::*;
pub use terminals::*;
pub use utils::*;
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ pub struct FieldDelimiters {
///
/// By default, we assume no lookahead (0) is required to recover from
/// unrecognized body between delimiters, so it's always triggered.
pub tokens_matched_acceptance_threshold: Option<u8>,
pub terminals_matched_acceptance_threshold: Option<u8>,
}

#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
Expand Down
4 changes: 2 additions & 2 deletions crates/codegen/runtime/cargo/src/runtime/generated/kinds.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

17 changes: 9 additions & 8 deletions crates/codegen/runtime/cargo/src/runtime/generated/language.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions crates/codegen/runtime/cargo/src/runtime/kinds.rs.jinja2
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use napi_derive::napi;
)]
#[cfg_attr(feature = "slang_napi_interfaces", /* derives `Clone` and `Copy` */ napi(string_enum, namespace = "kinds"))]
#[cfg_attr(not(feature = "slang_napi_interfaces"), derive(Clone, Copy))]
pub enum NonTerminalKind {
pub enum NonterminalKind {
{%- if rendering_in_stubs -%}
Stub1,
Stub2,
Expand All @@ -29,7 +29,7 @@ pub enum NonTerminalKind {
{%- endif -%}
}

impl metaslang_cst::NonTerminalKind for NonTerminalKind {}
impl metaslang_cst::NonterminalKind for NonterminalKind {}

#[derive(
Debug,
Expand Down
37 changes: 19 additions & 18 deletions crates/codegen/runtime/cargo/src/runtime/language.rs.jinja2
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,16 @@ use napi_derive::napi;

use crate::cst;
use crate::kinds::{
EdgeLabel, IsLexicalContext, LexicalContext, LexicalContextType, NonTerminalKind, TerminalKind,
EdgeLabel, IsLexicalContext, LexicalContext, LexicalContextType, NonterminalKind, TerminalKind,
};
use crate::lexer::{KeywordScan, Lexer, ScannedToken};
use crate::lexer::{KeywordScan, Lexer, ScannedTerminal};
#[cfg(feature = "slang_napi_interfaces")]
use crate::napi_interface::parse_output::ParseOutput as NAPIParseOutput;
use crate::parse_output::ParseOutput;
use crate::parser_support::{
ChoiceHelper, OneOrMoreHelper, OptionalHelper, ParserContext, ParserFunction, ParserResult,
PrecedenceHelper, SeparatedHelper, SequenceHelper, TokenAcceptanceThreshold, ZeroOrMoreHelper,
PrecedenceHelper, SeparatedHelper, SequenceHelper, TerminalAcceptanceThreshold,
ZeroOrMoreHelper,
};

#[derive(Debug)]
Expand Down Expand Up @@ -115,13 +116,13 @@ impl Language {

{% endif %}

pub fn parse(&self, kind: NonTerminalKind, input: &str) -> ParseOutput {
pub fn parse(&self, kind: NonterminalKind, input: &str) -> ParseOutput {
{%- if rendering_in_stubs -%}
unreachable!("Attempting to parse in stubs: {kind}: {input}")
{%- else -%}
match kind {
{%- for parser_name, _ in model.parser.parser_functions -%}
NonTerminalKind::{{ parser_name }} => Self::{{ parser_name | snake_case }}.parse(self, input),
NonterminalKind::{{ parser_name }} => Self::{{ parser_name | snake_case }}.parse(self, input),
{%- endfor -%}
}
{%- endif -%}
Expand Down Expand Up @@ -161,21 +162,21 @@ impl Lexer for Language {
{%- endif -%}
}

fn next_token<LexCtx: IsLexicalContext>(&self, input: &mut ParserContext<'_>) -> Option<ScannedToken> {
fn next_terminal<LexCtx: IsLexicalContext>(&self, input: &mut ParserContext<'_>) -> Option<ScannedTerminal> {
{%- if rendering_in_stubs -%}
unreachable!("Invoking next_token in stubs: {input:#?}")
unreachable!("Invoking next_terminal in stubs: {input:#?}")
{%- else -%}
let save = input.position();
let mut furthest_position = input.position();
let mut longest_token = None;
let mut longest_terminal = None;

macro_rules! longest_match {
($( { $kind:ident = $function:ident } )*) => {
$(
if self.$function(input) && input.position() > furthest_position {
furthest_position = input.position();

longest_token = Some(TerminalKind::$kind);
longest_terminal = Some(TerminalKind::$kind);
}
input.set_position(save);
)*
Expand All @@ -187,7 +188,7 @@ impl Lexer for Language {
LexicalContext::{{ context_name }} => {
if let Some(kind) = {{ context.literal_scanner }} {
furthest_position = input.position();
longest_token = Some(kind);
longest_terminal = Some(kind);
}
input.set_position(save);

Expand All @@ -206,7 +207,7 @@ impl Lexer for Language {
}

// We have an identifier; we need to check if it's a keyword
if let Some(identifier) = longest_token.filter(|tok|
if let Some(identifier) = longest_terminal.filter(|tok|
[
{% for name in context.promotable_identifier_scanners %}
TerminalKind::{{ name }},
Expand Down Expand Up @@ -246,21 +247,21 @@ impl Lexer for Language {
{% endif %}

input.set_position(furthest_position);
return Some(ScannedToken::IdentifierOrKeyword { identifier, kw: kw_scan });
return Some(ScannedTerminal::IdentifierOrKeyword { identifier, kw: kw_scan });
}
},
{%- endfor %}
}

match longest_token {
Some(token) => {
match longest_terminal {
Some(terminal) => {
input.set_position(furthest_position);
Some(ScannedToken::Single(token))
Some(ScannedTerminal::Single(terminal))
},
// Skip a character if possible and if we didn't recognize a token
// Skip a character if possible and if we didn't recognize a terminal
None if input.peek().is_some() => {
let _ = input.next();
Some(ScannedToken::Single(TerminalKind::SKIPPED))
Some(ScannedTerminal::Single(TerminalKind::SKIPPED))
},
None => None,
}
Expand Down Expand Up @@ -294,7 +295,7 @@ impl Language {
#[napi(js_name = "parse", ts_return_type = "parse_output.ParseOutput", catch_unwind)]
pub fn parse_napi(
&self,
#[napi(ts_arg_type = "kinds.NonTerminalKind")] kind: NonTerminalKind,
#[napi(ts_arg_type = "kinds.NonterminalKind")] kind: NonterminalKind,
input: String
) -> NAPIParseOutput {
self.parse(kind, input.as_str()).into()
Expand Down
48 changes: 24 additions & 24 deletions crates/codegen/runtime/cargo/src/runtime/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,15 @@ pub enum KeywordScan {
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ScannedToken {
pub enum ScannedTerminal {
Single(TerminalKind),
IdentifierOrKeyword {
identifier: TerminalKind,
kw: KeywordScan,
},
}

impl ScannedToken {
impl ScannedTerminal {
pub fn accepted_as(self, expected: TerminalKind) -> bool {
match self {
Self::Single(kind) => kind == expected,
Expand All @@ -36,10 +36,10 @@ impl ScannedToken {
}
}

/// Returns the most general token kind that can be accepted for the scanned token.
/// Returns the most general terminal kind that can be accepted for the scanned terminal.
///
/// If the scanned token is an identifier, returns the specific keyword kind if the keyword is reserved,
/// otherwise returns the general identifier kind. For other tokens, returns the token kind itself.
/// If the scanned terminal is an identifier, returns the specific keyword kind if the keyword is reserved,
/// otherwise returns the general identifier kind. For other terminals, returns the terminal kind itself.
pub fn unambiguous(self) -> TerminalKind {
match self {
Self::Single(kind) => kind,
Expand All @@ -56,10 +56,10 @@ impl ScannedToken {
pub(crate) trait Lexer {
// Generated by the templating engine
#[doc(hidden)]
fn next_token<LexCtx: IsLexicalContext>(
fn next_terminal<LexCtx: IsLexicalContext>(
&self,
input: &mut ParserContext<'_>,
) -> Option<ScannedToken>;
) -> Option<ScannedTerminal>;
// NOTE: These are context-insensitive
#[doc(hidden)]
fn leading_trivia(&self, input: &mut ParserContext<'_>) -> ParserResult;
Expand All @@ -69,40 +69,40 @@ pub(crate) trait Lexer {
/// Returns valid grouping delimiters in the given lexical context.
fn delimiters<LexCtx: IsLexicalContext>() -> &'static [(TerminalKind, TerminalKind)];

/// Peeks the next token, including trivia. Does not advance the input.
fn peek_token<LexCtx: IsLexicalContext>(
/// Peeks the next terminal, including trivia. Does not advance the input.
fn peek_terminal<LexCtx: IsLexicalContext>(
&self,
input: &mut ParserContext<'_>,
) -> Option<ScannedToken> {
) -> Option<ScannedTerminal> {
let start = input.position();
let token = self.next_token::<LexCtx>(input);
let terminal = self.next_terminal::<LexCtx>(input);
input.set_position(start);
token
terminal
}

/// Peeks the next significant (i.e. non-trivia) token. Does not advance the input.
fn peek_token_with_trivia<LexCtx: IsLexicalContext>(
/// Peeks the next significant (i.e. non-trivia) terminal. Does not advance the input.
fn peek_terminal_with_trivia<LexCtx: IsLexicalContext>(
&self,
input: &mut ParserContext<'_>,
) -> Option<ScannedToken> {
) -> Option<ScannedTerminal> {
let start = input.position();

let _ = self.leading_trivia(input);
let token = self.next_token::<LexCtx>(input);
let terminal = self.next_terminal::<LexCtx>(input);

input.set_position(start);
token
terminal
}

/// Attempts to consume the next expected token. Advances the input only if the token matches.
fn parse_token<LexCtx: IsLexicalContext>(
/// Attempts to consume the next expected terminal. Advances the input only if the terminal matches.
fn parse_terminal<LexCtx: IsLexicalContext>(
&self,
input: &mut ParserContext<'_>,
kind: TerminalKind,
) -> ParserResult {
let start = input.position();
if !self
.next_token::<LexCtx>(input)
.next_terminal::<LexCtx>(input)
.is_some_and(|t| t.accepted_as(kind))
{
input.set_position(start);
Expand All @@ -119,9 +119,9 @@ pub(crate) trait Lexer {
)
}

/// Attempts to consume the next significant token including both leading and trailing trivia.
/// Advances the input only if the token matches.
fn parse_token_with_trivia<LexCtx: IsLexicalContext>(
/// Attempts to consume the next significant terminal including both leading and trailing trivia.
/// Advances the input only if the terminal matches.
fn parse_terminal_with_trivia<LexCtx: IsLexicalContext>(
&self,
input: &mut ParserContext<'_>,
kind: TerminalKind,
Expand All @@ -137,7 +137,7 @@ pub(crate) trait Lexer {

let start = input.position();
if !self
.next_token::<LexCtx>(input)
.next_terminal::<LexCtx>(input)
.is_some_and(|t| t.accepted_as(kind))
{
input.set_position(restore);
Expand Down
4 changes: 2 additions & 2 deletions crates/codegen/runtime/cargo/src/runtime/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ mod metaslang_cst {
pub enum KindTypes {}

impl metaslang_cst::KindTypes for KindTypes {
type NonTerminalKind = crate::kinds::NonTerminalKind;
type NonterminalKind = crate::kinds::NonterminalKind;
type TerminalKind = crate::kinds::TerminalKind;
type EdgeLabel = crate::kinds::EdgeLabel;
}
Expand All @@ -40,7 +40,7 @@ pub mod cst {
use super::metaslang_cst::KindTypes;

pub type Node = cst::Node<KindTypes>;
pub type NonTerminalNode = cst::NonTerminalNode<KindTypes>;
pub type NonterminalNode = cst::NonterminalNode<KindTypes>;
pub type TerminalNode = cst::TerminalNode<KindTypes>;
pub type Edge = cst::Edge<KindTypes>;
}
Expand Down
Loading

0 comments on commit 46b1dde

Please sign in to comment.