diff --git a/lib/text_builder/text_builder.ts b/lib/text_builder/text_builder.ts index c46ad2f..27d0137 100644 --- a/lib/text_builder/text_builder.ts +++ b/lib/text_builder/text_builder.ts @@ -5,7 +5,7 @@ import { CartridgeEvent, PropertyDefinition, } from "../cartridge/mod.ts"; -import { Lexicon, Token } from "../tokenize/mod.ts"; +import type { Token } from "../tokenize/mod.ts"; import { makeFileEndEventContext, makeFileStartEventContext, @@ -35,12 +35,12 @@ export class TextBuilder { ): Promise; public async append( event: CartridgeEvent.InlineComment, - tokens: Token[], + tokens: [Token], comments: Token[], ): Promise; public async append( event: CartridgeEvent.MultilineComment, - tokens: Token[], + tokens: [Token], comments: Token[], ): Promise; public async append( @@ -63,7 +63,7 @@ export class TextBuilder { ): Promise; public async append( event: CartridgeEvent.StructClose, - tokens: Token[], + tokens: [Token], comments: Token[], ): Promise; public async append( @@ -90,14 +90,14 @@ export class TextBuilder { case CartridgeEvent.InlineComment: { code = await this.cartridge.dispatch( CartridgeEvent.InlineComment, - makeInlineCommentEventContext(this.currentBlock, tokens, comments), + makeInlineCommentEventContext(this.currentBlock, tokens), ); break; } case CartridgeEvent.MultilineComment: { code = await this.cartridge.dispatch( CartridgeEvent.MultilineComment, - makeMultilineCommentEventContext(this.currentBlock, tokens, comments), + makeMultilineCommentEventContext(this.currentBlock, tokens), ); break; } diff --git a/lib/text_builder/utils.test.ts b/lib/text_builder/utils.test.ts index 3fb6057..7642a39 100644 --- a/lib/text_builder/utils.test.ts +++ b/lib/text_builder/utils.test.ts @@ -1,5 +1,6 @@ import { assertEquals } from "../../deps/std/testing.ts"; -import { T } from "../tokenize/mod.ts"; +import { T, Token } from "../tokenize/mod.ts"; +import { CodeBlock } from "../code_block/mod.ts"; import { cleanComment, makeFileEndEventContext, @@ -11,6 +12,11 @@ import { makeStructCloseEventContext, makeStructOpenEventContext, } from "./utils.ts"; +import { CartridgeEvent } from "../cartridge/mod.ts"; +import type { + CartridgeEventContext, + PropertyDefinition, +} from "../cartridge/mod.ts"; Deno.test("cleans inlined comments to extract text content", () => { const expectation = ["example"]; @@ -35,3 +41,153 @@ Deno.test("cleans multi-inlined comments to extract text content (omits whitespa )); assertEquals(expectation, reality); }); + +Deno.test("successfully makes a 'file_end' event context object", () => { + const code = new CodeBlock(); + const data = null; + const tokens: Token[] = []; + const expectation: CartridgeEventContext = { + type: CartridgeEvent.FileEnd, + code, + data, + tokens, + }; + const reality = makeFileEndEventContext(code, tokens); + assertEquals(expectation, reality); +}); + +Deno.test("successfully makes a 'file_start' event context object", () => { + const code = new CodeBlock(); + const data = null; + const tokens: Token[] = []; + const expectation: CartridgeEventContext = { + type: CartridgeEvent.FileStart, + code, + data, + tokens, + }; + const reality = makeFileStartEventContext(code, tokens); + assertEquals(expectation, reality); +}); + +Deno.test("successfully makes a 'inline_comment' event context object", () => { + const code = new CodeBlock(); + const tokens: Token[] = [T.comment("; example", 1, 1)]; + const expectation: CartridgeEventContext = { + type: CartridgeEvent.InlineComment, + code, + data: { + comments: ["example"], + }, + tokens, + }; + const reality = makeInlineCommentEventContext(code, tokens); + assertEquals(expectation, reality); +}); + +Deno.test("successfully makes a 'multiline_comment' event context object", () => { + const code = new CodeBlock(); + const tokens: Token[] = [T.comment("; example", 1, 1)]; + const expectation: CartridgeEventContext = { + type: CartridgeEvent.MultilineComment, + code, + tokens, + data: { + comments: ["example"], + }, + }; + const reality = makeMultilineCommentEventContext(code, tokens); + assertEquals(expectation, reality); +}); + +Deno.test("successfully makes a 'load' event context object", () => { + const code = new CodeBlock(); + const source = "./example.fart"; + const dependencies = ["Example1", "Example2", "Example3"]; + const tokens: Token[] = [ + T.load(1, 1), + T.text_1(source, 1, 6), + T.nest(1, 23), + T.id("Example1", 2, 3), + T.separator(2, 11), + T.id("Example2", 3, 3), + T.separator(3, 11), + T.id("Example3", 4, 3), + T.separator(4, 11), + T.denest(5, 1), + ]; + const expectation: CartridgeEventContext = { + type: CartridgeEvent.Load, + code, + tokens, + data: { source, dependencies, comments: [] }, + }; + const reality = makeLoadEventContext( + code, + tokens, + /*comments=*/ [], + source, + dependencies, + ); + assertEquals(expectation, reality); +}); + +Deno.test("successfully makes a 'set_property' event context object", () => { + const code = new CodeBlock(); + const name = "property"; + const definition: PropertyDefinition = { value: "number" }; + const tokens: Token[] = [ + T.id(name, 2, 3), + T.setter_1(2, 11), + T.id("number", 2, 13), + ]; + const expectation: CartridgeEventContext = { + type: CartridgeEvent.SetProperty, + code, + tokens, + data: { name, definition, comments: [] }, + }; + const reality = makeSetPropertyEventContext( + code, + tokens, + /*comments=*/ [], + name, + definition, + ); + assertEquals(expectation, reality); +}); + +Deno.test("successfully makes a 'struct_close' event context object", () => { + const code = new CodeBlock(); + const tokens: Token[] = []; + const expectation: CartridgeEventContext = { + type: CartridgeEvent.StructClose, + code, + tokens, + data: null, + }; + const reality = makeStructCloseEventContext(code, tokens); + assertEquals(expectation, reality); +}); + +Deno.test("successfully makes a 'struct_open' event context object", () => { + const code = new CodeBlock(); + const tokens: Token[] = [T.type(1, 1), T.id("Example", 1, 6), T.nest(1, 14)]; + const name = "Example"; + const expectation: CartridgeEventContext = { + type: CartridgeEvent.StructOpen, + code, + tokens, + data: { + name, + comments: [], + }, + }; + const reality = makeStructOpenEventContext( + code, + tokens, + /*comments=*/ [], + name, + ); + assertEquals(expectation, reality); +}); diff --git a/lib/text_builder/utils.ts b/lib/text_builder/utils.ts index 298ec1d..34d1929 100644 --- a/lib/text_builder/utils.ts +++ b/lib/text_builder/utils.ts @@ -50,23 +50,21 @@ export const makeFileStartEventContext = ( export const makeInlineCommentEventContext = ( code: CodeBlock, tokens: Token[], - comments: Token[], ): CartridgeEventContext => ({ type: CartridgeEvent.InlineComment, code, tokens, - data: { comments: comments.flatMap(cleanComment) }, + data: { comments: tokens.flatMap(cleanComment) }, }); export const makeMultilineCommentEventContext = ( code: CodeBlock, tokens: Token[], - comments: Token[], ): CartridgeEventContext => ({ type: CartridgeEvent.MultilineComment, code, tokens, - data: { comments: comments.flatMap(cleanComment) }, + data: { comments: tokens.flatMap(cleanComment) }, }); export const makeLoadEventContext = ( diff --git a/lib/tokenize/lexicon.ts b/lib/tokenize/lexicon.ts index de698bc..573565f 100644 --- a/lib/tokenize/lexicon.ts +++ b/lib/tokenize/lexicon.ts @@ -1,5 +1,6 @@ export enum Lexicon { Identifier, + Load, StructOpener, StructCloser, TupleOpener, @@ -26,6 +27,7 @@ export const LEXICON: LexiconType = new Map< string | string[] | null >([ [Lexicon.Identifier, null], + [Lexicon.Load, "load"], [Lexicon.StructOpener, "{"], [Lexicon.StructCloser, "}"], [Lexicon.TupleOpener, "("], diff --git a/lib/tokenize/t.ts b/lib/tokenize/t.ts index 424464f..3caff3c 100644 --- a/lib/tokenize/t.ts +++ b/lib/tokenize/t.ts @@ -11,6 +11,8 @@ type SpecialTokenMaker = (raw: string, line: number, col: number) => Token; export interface LexiconAliasLayer { /** `___` — identifier */ id: SpecialTokenMaker; + /** `load` — struct opener */ + load: SimpleTokenMaker; /** `{` — struct opener */ nest: SimpleTokenMaker; /** `}` — struct closer */ @@ -52,6 +54,7 @@ export interface LexiconAliasLayer { const makeSpecialToken: SpecialTokenMaker = (raw, line, col) => new Token(raw, line, col); +const LOAD = LEXICON.get(Lexicon.Load) as string; const NEST = LEXICON.get(Lexicon.StructOpener) as string; const DENEST = LEXICON.get(Lexicon.StructCloser) as string; const OPEN_TUPLE = LEXICON.get(Lexicon.TupleOpener) as string; @@ -65,6 +68,7 @@ const SEPARATOR = LEXICON.get(Lexicon.Separator) as string; export const T: LexiconAliasLayer = { id: makeSpecialToken, + load: (line, col) => new Token(LOAD, line, col), nest: (line, col) => new Token(NEST, line, col), denest: (line, col) => new Token(DENEST, line, col), open_tuple: (line, col) => new Token(OPEN_TUPLE, line, col), diff --git a/lib/tokenize/token.test.ts b/lib/tokenize/token.test.ts index 49cfce0..07ee6c0 100644 --- a/lib/tokenize/token.test.ts +++ b/lib/tokenize/token.test.ts @@ -2,3 +2,11 @@ // TODO(@ethandavidson): test `getKindOf` method // TODO(@ethandavidson): test `toString` method // TODO(@ethandavidson): test `value` method + +import { assertEquals } from "../../deps/std/testing.ts"; +import { Token } from "./token.ts"; +import { Lexicon } from "./lexicon.ts"; + +Deno.test("creates a token with an empty string without crashing", () => { + assertEquals(new Token("").kind, Lexicon.Unknown); +}); diff --git a/lib/tokenize/tokenize.test.ts b/lib/tokenize/tokenize.test.ts index cf13c8c..4c6069e 100644 --- a/lib/tokenize/tokenize.test.ts +++ b/lib/tokenize/tokenize.test.ts @@ -309,3 +309,25 @@ spec Example const reality = [...tokenize(input)]; assertEquals(expectation, reality); }); + +Deno.test("yields tokens of proper `load` statement", () => { + const input = `load "./example.fart" { + Example1, + Example2, + Example3, +}`; + const expectation = [ + T.load(1, 1), + T.text_1("./example.fart", 1, 6), + T.nest(1, 23), + T.id("Example1", 2, 3), + T.separator(2, 11), + T.id("Example2", 3, 3), + T.separator(3, 11), + T.id("Example3", 4, 3), + T.separator(4, 11), + T.denest(5, 1), + ]; + const reality = [...tokenize(input)]; + assertEquals(expectation, reality); +});