diff --git a/.github/workflows/check_lib.yaml b/.github/workflows/check_lib.yaml
new file mode 100644
index 0000000..6f61ecb
--- /dev/null
+++ b/.github/workflows/check_lib.yaml
@@ -0,0 +1,38 @@
+# This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node
+# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
+
+name: Fart Lib Check
+
+on:
+ push:
+ branches: [main]
+ pull_request:
+ branches: [main]
+
+jobs:
+ website_check:
+ runs-on: ubuntu-latest
+
+ strategy:
+ matrix:
+ deno_version: [canary]
+ # See supported Deno releases at:
+ # https://github.com/denoland/deno/releases
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+
+ - name: Set up Deno ${{ matrix.deno_version }}
+ uses: denoland/setup-deno@v1
+ with:
+ node-version: ${{ matrix.deno_version }}
+
+ - name: Lint
+ run: deno lint
+
+ - name: Test and Check Coverage
+ run: |
+ deno test lib --coverage=cov_profile
+ deno coverage cov_profile --lcov > cov_profile.lcov
+ deno run --allow-read --unstable devops/check_cov.ts
diff --git a/.gitignore b/.gitignore
index 8a900b7..b5dd95f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,5 @@
# Mac for Deno
-Users
\ No newline at end of file
+Users
+
+# Secrets
+.env
\ No newline at end of file
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 99e0296..6b66ee3 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -13,5 +13,5 @@
"https://cdn.skypack.dev": false,
"https://fart.tools": false
},
- "cSpell.words": ["typemap", "typemaps"]
+ "cSpell.words": ["transpiles", "typedefs", "typemap", "typemaps"]
}
diff --git a/deps/std/flags.ts b/deps/std/flags.ts
index e86c052..fc11c01 100644
--- a/deps/std/flags.ts
+++ b/deps/std/flags.ts
@@ -1 +1 @@
-export { parse } from "https://deno.land/std@0.110.0/flags/mod.ts";
+export { parse } from "https://deno.land/std@0.119.0/flags/mod.ts";
diff --git a/deps/std/fs.ts b/deps/std/fs.ts
index d0909a6..3b81acf 100644
--- a/deps/std/fs.ts
+++ b/deps/std/fs.ts
@@ -1 +1 @@
-export { exists, expandGlob } from "https://deno.land/std@0.110.0/fs/mod.ts";
+export { exists, expandGlob } from "https://deno.land/std@0.119.0/fs/mod.ts";
diff --git a/deps/std/path.ts b/deps/std/path.ts
index c433d2c..bc6187f 100644
--- a/deps/std/path.ts
+++ b/deps/std/path.ts
@@ -5,4 +5,4 @@ export {
join,
normalize,
parse,
-} from "https://deno.land/std@0.110.0/path/mod.ts";
+} from "https://deno.land/std@0.119.0/path/mod.ts";
diff --git a/deps/std/testing.ts b/deps/std/testing.ts
index 1e2508a..f159a73 100644
--- a/deps/std/testing.ts
+++ b/deps/std/testing.ts
@@ -2,4 +2,13 @@ export {
assert,
assertEquals,
assertThrows,
-} from "https://deno.land/std@0.110.0/testing/asserts.ts";
+} from "https://deno.land/std@0.119.0/testing/asserts.ts";
+export {
+ bench,
+ runBenchmarks,
+} from "https://deno.land/std@0.119.0/testing/bench.ts";
+export type {
+ BenchmarkResult,
+ BenchmarkRunProgress,
+ BenchmarkTimer,
+} from "https://deno.land/std@0.119.0/testing/bench.ts";
diff --git a/deps/third_party/octokit/rest.ts b/deps/third_party/octokit/rest.ts
index 529d74a..1a56343 100644
--- a/deps/third_party/octokit/rest.ts
+++ b/deps/third_party/octokit/rest.ts
@@ -1,4 +1,2 @@
+// More info: https://cdn.skypack.dev/@octokit/rest/rest.js
export { Octokit } from "https://cdn.skypack.dev/@octokit/rest@18.12.0";
-
-// More info:
-// https://cdn.skypack.dev/@octokit/rest/rest.js
diff --git a/devops/check_cov.ts b/devops/check_cov.ts
new file mode 100644
index 0000000..a490ab9
--- /dev/null
+++ b/devops/check_cov.ts
@@ -0,0 +1,59 @@
+/**
+ * Name: check_cov.ts
+ * Author: EthanThatOneKid
+ * Description: This script checks the coverage of the codebase.
+ *
+ * Handy Commands:
+ * - Generate cov: deno test lib --coverage=cov_profile & deno coverage cov_profile --lcov > cov_profile.lcov
+ * - Check cov: deno run --allow-read --unstable devops/check_cov.ts
+ * - Visualize cov: deno coverage cov_profile
+ */
+
+import { source as parseFile } from "https://cdn.skypack.dev/lcov-parse";
+
+interface LineDetail {
+ line: number;
+ hit: number;
+}
+
+interface FunctionDetail {
+ name: string;
+ line: number;
+ hit: number;
+}
+
+interface BranchDetail {
+ line: number;
+ block: number;
+ branch: number;
+ taken: number;
+}
+
+interface LcovResult {
+ file: string;
+ lines: { found: number; hit: number; details: LineDetail[] };
+ functions: { found: number; hit: number; details: FunctionDetail[] };
+ branches: { found: number; hit: number; details: BranchDetail[] };
+}
+
+const lcov = await Deno.readTextFile("./cov_profile.lcov");
+
+// TODO: Compute which files have uncovered code and its percentage.
+// TODO: Compute overall coverage percentage.
+parseFile(lcov, (errorMessage: string | null, results: LcovResult[]) => {
+ if (errorMessage !== null) {
+ return console.error(errorMessage);
+ }
+ for (const report of results) {
+ const uncoveredFns = report.functions.details.filter((fn) => fn.hit === 0);
+ if (uncoveredFns.length > 0) {
+ console.log("\nFile:", report.file);
+ for (const fn of uncoveredFns) {
+ console.log(
+ "Uncovered function!",
+ `${fn.name} (${report.file}:${fn.line})`,
+ );
+ }
+ }
+ }
+});
diff --git a/docs/getting-started.md b/docs/getting-started.md
index 4a3ef1a..963d1d8 100644
--- a/docs/getting-started.md
+++ b/docs/getting-started.md
@@ -8,3 +8,10 @@ self_link: https://fart.tools/getting-started
> Fart deserves better!
> Curious reader, visit .
+
+## Project Scripts
+
+- **Upgrade a Dependency**: `deno run --unstable --allow-read --allow-write devops/upgrade_dep.ts -y --verbose --dep=std --v=0.116.0`
+- **Run the CLI**: `deno run --allow-read --allow-write fart_cli/run.ts ./ex/pokemon/mod.fart --reg=ts --output=./ex/pokemon/mod.out.ts`
+- **Spin up Server**: `deno run --allow-net --allow-read --allow-env --unstable fart_server/serve_http.ts`
+- **Develop the Server**: `deployctl run --watch fart_server/worker.ts`
diff --git a/docs/scripts.md b/docs/scripts.md
deleted file mode 100644
index b45902b..0000000
--- a/docs/scripts.md
+++ /dev/null
@@ -1,10 +0,0 @@
----
-self_link: https://fart.tools/scripts
----
-
-# Project Scripts
-
-- **Upgrade a Dependency**: `deno run --unstable --allow-read --allow-write devops/upgrade_dep.ts -y --verbose --dep=std --v=0.110.0`
-- **Run the CLI**: `deno run --allow-read --allow-write std/cli/run.ts ./ex/pokemon/mod.fart --reg=ts --output=./ex/pokemon/mod.out.ts`
-- **Spin up Server**: `deno run --allow-net --allow-read --allow-env --unstable std/server/serve_http.ts`
-- **Develop the Server**: `deployctl run --watch std/server/worker.ts`
diff --git a/ex/README.md b/ex/README.md
deleted file mode 100644
index 97c3a33..0000000
--- a/ex/README.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# Pokemon Fart Example
-
-Please refer to .
diff --git a/ex/generate-deno-cli/README.md b/ex/generate-deno-cli/README.md
deleted file mode 100644
index 936eed6..0000000
--- a/ex/generate-deno-cli/README.md
+++ /dev/null
@@ -1,3 +0,0 @@
-```bash
-deno run --reload --allow-read --allow-net https://fart.tools/ts.deno.cli/EthanThatOneKid/fart/main/std/fart/fart.ts compile --filepath="https://github.com/EthanThatOneKid/fart/raw/main/ex/pokemon/mod.fart" --cartridge_id="ts"
-```
diff --git a/ex/go-generate/README.md b/ex/go-generate/README.md
deleted file mode 100644
index 5f586c1..0000000
--- a/ex/go-generate/README.md
+++ /dev/null
@@ -1 +0,0 @@
-TODO: Create example using go:generate with Fart CLI.
diff --git a/ex/import_map.json b/ex/import_map.json
deleted file mode 100644
index 201fd36..0000000
--- a/ex/import_map.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "imports": {
- "fart/": "https://fart.deno.dev/ts/"
- }
-}
diff --git a/ex/pokemon/dex.ts b/ex/pokemon/dex.ts
deleted file mode 100644
index efd9083..0000000
--- a/ex/pokemon/dex.ts
+++ /dev/null
@@ -1,26 +0,0 @@
-import type { Dex as iDex } from "https://fart.tools/ts/EthanThatOneKid/fart/main/ex/pokemon/mod.ts";
-
-export class Dex implements iDex {
- constructor(
- public national = {
- [25 as number]: {
- name: "Pikachu",
- num: 25,
- caught: false,
- summary:
- "This Pokémon has electricity-storing pouches on its cheeks. These appear to become electrically charged during the night while Pikachu sleeps. It occasionally discharges electricity when it is dozy after waking up.",
- types: { type1: "Electric" },
- },
- },
- ) {}
-
- register(num: number) {
- const { name } = this.national[num];
- if (this.national[num].caught) {
- console.log(`${name} has already been registerd.`);
- return;
- }
- this.national[num].caught = true;
- console.log(`Registered ${name}!`);
- }
-}
diff --git a/ex/pokemon/mod.fart b/ex/pokemon/mod.fart
deleted file mode 100644
index 9a72d9a..0000000
--- a/ex/pokemon/mod.fart
+++ /dev/null
@@ -1,38 +0,0 @@
-type Pokeball {
- id*: string
- odds*: number
- used*: boolean
-
- throw*: fn %
-}
-
-type Pokemon {
- name*: string
- num*: number
- ball: Pokeball
-
- catch*: fn %
-}
-
-type PC {
- mons*: array % Pokemon
-}
-
-type DexEntry {
- name*: string
- num*: number
- summary*: string
- caught*: boolean
- types*: { type1*: string
- type2: string }
-}
-
-type Dex {
- national*: dict %
- register*: fn %
-}
-
-type Bag {
- dex*: Dex
- balls*: array % Pokeball
-}
\ No newline at end of file
diff --git a/ex/pokemon/pokeball.ts b/ex/pokemon/pokeball.ts
deleted file mode 100644
index da06278..0000000
--- a/ex/pokemon/pokeball.ts
+++ /dev/null
@@ -1,27 +0,0 @@
-import type { Pokeball as iPokeball } from "https://fart.tools/ts/EthanThatOneKid/fart/main/ex/pokemon/mod.ts";
-
-const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
-
-export class Pokeball implements iPokeball {
- constructor(
- public id: string,
- public odds: number,
- public used: boolean = false,
- ) {}
-
- async throw(name: string): Promise {
- if (this.used) return false;
- this.used = true;
- console.log("wiggle");
- await sleep(1e3);
- const caught = Math.random() > (1 - this.odds);
- console.log("wiggle");
- await sleep(1e3);
- if (caught) {
- console.log(`Caught ${name}`);
- return true;
- }
- console.log(`${name} broke out!`);
- return false;
- }
-}
diff --git a/ex/pokemon/run.ts b/ex/pokemon/run.ts
deleted file mode 100644
index d0837f2..0000000
--- a/ex/pokemon/run.ts
+++ /dev/null
@@ -1,44 +0,0 @@
-// Source Fart:
-import type {
- Bag,
- PC,
- Pokemon,
-} from "https://fart.tools/ts/EthanThatOneKid/fart/main/ex/pokemon/mod.ts";
-
-// Extended Pokeball class
-import { Pokeball } from "./pokeball.ts";
-
-// Extended Dex class
-import { Dex } from "./dex.ts";
-
-// Your stuff
-const dex = new Dex();
-const great_ball = new Pokeball("great", 0.5);
-const ultra_ball = new Pokeball("ultra", 0.8);
-const bag: Bag = { dex, balls: [great_ball, ultra_ball] };
-const pc: PC = { mons: [] };
-
-// A wild Pikachu
-const pikachu: Pokemon = {
- name: "Pikachu",
- num: 25,
- async catch(ball: Pokeball) {
- const caught = await ball.throw(this.name);
- if (caught) {
- this.ball = ball;
- bag.dex.register(this.num);
- pc.mons.push(this);
- console.log(`Moved ${this.name} to the PC.`);
- }
- return caught;
- },
-};
-
-// Try to catch it
-for (const ball of bag.balls) {
- const caught = await pikachu.catch(ball);
- if (caught) break;
-}
-
-// Check the PC
-console.log("PC: ", pc);
diff --git a/fart_server/README.md b/fart_server/README.md
new file mode 100644
index 0000000..407b029
--- /dev/null
+++ b/fart_server/README.md
@@ -0,0 +1,9 @@
+# Fart Server
+
+## Usage
+
+### Spin up local Fart server
+
+```bash
+deno run --allow-env --allow-net fart_server/serve.ts
+```
diff --git a/fart_server/bonus_features/doc_generator/deno_doc.ts b/fart_server/bonus_features/doc_generator/deno_doc.ts
new file mode 100644
index 0000000..a7e1326
--- /dev/null
+++ b/fart_server/bonus_features/doc_generator/deno_doc.ts
@@ -0,0 +1,12 @@
+// deno-lint-ignore-file
+
+/**
+ * @param url ex:
+ * @returns raw HTML generated from [`deno doc`](https://deno.land/manual@v1.16.3/tools/documentation_generator)
+ * @todo @ethanthatonekid mirror https://doc.deno.land/https/deno.land%2Fx%2Ffart%40v0.1%2Flib%2Ffart.ts
+ * @todo @ethanthatonekid serve any static files
+ * @todo @ethanthatonekid remove deno-lint-ignore-file
+ */
+export const fetchDenoDoc = async (url: string): Promise => {
+ return "";
+};
diff --git a/fart_server/bonus_features/doc_generator/gh_doc.ts b/fart_server/bonus_features/doc_generator/gh_doc.ts
new file mode 100644
index 0000000..aaf7e5a
--- /dev/null
+++ b/fart_server/bonus_features/doc_generator/gh_doc.ts
@@ -0,0 +1,10 @@
+// deno-lint-ignore-file
+
+/**
+ * @param url ex:
+ * @returns raw HTML of parsed documentation found on
+ * @todo @ethanthatonekid refactor https://github.com/EthanThatOneKid/fart/blob/c43f2333458b2cbc40d167610d87e2a2e3f89885/std/server/middleware/gh_docs.ts
+ */
+export const fetchGitHubDoc = async (url: string): Promise => {
+ return "";
+};
diff --git a/fart_server/bonus_features/doc_generator/mod.ts b/fart_server/bonus_features/doc_generator/mod.ts
new file mode 100644
index 0000000..1463c0b
--- /dev/null
+++ b/fart_server/bonus_features/doc_generator/mod.ts
@@ -0,0 +1,2 @@
+export { fetchGitHubDoc } from "./gh_doc.ts";
+export { fetchDenoDoc } from "./deno_doc.ts";
diff --git a/fart_server/bonus_features/shortlinks/mod.ts b/fart_server/bonus_features/shortlinks/mod.ts
new file mode 100644
index 0000000..b9c6775
--- /dev/null
+++ b/fart_server/bonus_features/shortlinks/mod.ts
@@ -0,0 +1 @@
+export { redirectIfShortlink } from "./shortlinks.ts";
diff --git a/fart_server/bonus_features/shortlinks/shortlinks.json b/fart_server/bonus_features/shortlinks/shortlinks.json
new file mode 100644
index 0000000..606c1c4
--- /dev/null
+++ b/fart_server/bonus_features/shortlinks/shortlinks.json
@@ -0,0 +1,5 @@
+{
+ "/design": "https://docs.google.com/document/d/1pGNLsDr-WysIIqB4nc1pTCL8FMmPxkJMNoGsRMkA0TY/edit",
+ "/github": "https://github.com/EthanThatOneKid/fart",
+ "/author": "https://etok.codes"
+}
diff --git a/fart_server/bonus_features/shortlinks/shortlinks.test.ts b/fart_server/bonus_features/shortlinks/shortlinks.test.ts
new file mode 100644
index 0000000..27f776b
--- /dev/null
+++ b/fart_server/bonus_features/shortlinks/shortlinks.test.ts
@@ -0,0 +1,36 @@
+import { assertEquals } from "../../../deps/std/testing.ts";
+import { redirectIfShortlink } from "./shortlinks.ts";
+import shortlinks from "./shortlinks.json" assert { type: "json" };
+
+Deno.test("shortlink redirects to GitHub repository", () => {
+ const request = new Request("http://localhost:8080/github");
+ const response = redirectIfShortlink(request);
+ assertEquals(response?.status, 302);
+ assertEquals(response?.headers.get("location"), shortlinks["/github"] + "/");
+});
+
+Deno.test("shortlink redirects to GitHub repository and preserves path", () => {
+ const request = new Request(
+ "http://localhost:8080/github/milestone/1?closed=1",
+ );
+ const response = redirectIfShortlink(request);
+ assertEquals(response?.status, 302);
+ assertEquals(
+ response?.headers.get("location"),
+ shortlinks["/github"] +
+ "/milestone/1?closed=1",
+ );
+});
+
+Object.entries(shortlinks)
+ .forEach(([shortlink, destination]) => {
+ // add a slash if the destination doesn't end with one
+ if (!destination.endsWith("/")) destination += "/";
+
+ Deno.test(`shortlink ${shortlink} redirects to ${destination}`, () => {
+ const request = new Request("http://localhost:8080" + shortlink);
+ const response = redirectIfShortlink(request);
+ assertEquals(response?.status, 302);
+ assertEquals(response?.headers.get("location"), destination);
+ });
+ });
diff --git a/fart_server/bonus_features/shortlinks/shortlinks.ts b/fart_server/bonus_features/shortlinks/shortlinks.ts
new file mode 100644
index 0000000..d76f006
--- /dev/null
+++ b/fart_server/bonus_features/shortlinks/shortlinks.ts
@@ -0,0 +1,21 @@
+import shortlinks from "./shortlinks.json" assert { type: "json" };
+
+const map = Object.entries(shortlinks)
+ .reduce((result, [key, value]) => {
+ result.set(key, value);
+ return result;
+ }, new Map());
+
+export const redirectIfShortlink = (request: Request): Response | null => {
+ const { pathname, searchParams } = new URL(request.url);
+ for (const [shortlink, target] of map) {
+ if (pathname.startsWith(shortlink)) {
+ let destination = target + pathname.slice(shortlink.length);
+ if (searchParams.toString()) destination += "?" + searchParams;
+ // add a slash if the destination doesn't end with one
+ else if (!destination.endsWith("/")) destination += "/";
+ return Response.redirect(destination, 302);
+ }
+ }
+ return null;
+};
diff --git a/fart_server/bonus_features/versions/deno_deploy_redirect.ts b/fart_server/bonus_features/versions/deno_deploy_redirect.ts
new file mode 100644
index 0000000..1e9d244
--- /dev/null
+++ b/fart_server/bonus_features/versions/deno_deploy_redirect.ts
@@ -0,0 +1,62 @@
+import { Time } from "../../../lib/constants/time.ts";
+
+const deployments = new Map();
+const projectName = Deno.env.get("DENO_DEPLOY_PROJECT_NAME") ?? "fart";
+const token = Deno.env.get("DENO_DEPLOY_ACCESS_TOKEN") ??
+ "ddp_10rd56LtEpRv33U37xJQK7Jdl5g0KC3EoB2p";
+const refreshRate = 10 * Time.Minute;
+let lastFetch = -1;
+
+const fetchAllDeployments = async (
+ projectName: string,
+ accessToken: string,
+ limit = 20,
+): Promise => {
+ let currentPage = 0;
+ let totalPages = Infinity;
+
+ const canFetchMore = lastFetch + refreshRate < Date.now();
+ while (canFetchMore && currentPage < totalPages - 1) {
+ const response = await fetch(
+ `https://dash.deno.com/api/projects/${projectName}/deployments?page=${currentPage}&limit=${limit}`,
+ { headers: { "Authorization": `Bearer ${accessToken}` } },
+ );
+ const [
+ incomingDeployments,
+ { page: incomingPage, totalPages: incomingTotalPages },
+ ] = await response.json();
+ incomingDeployments.forEach(
+ (deployment: { id: string; relatedCommit: { hash: string } }) => {
+ const previewUrl = `https://${projectName}-${deployment.id}.deno.dev`;
+ const validIds = [
+ deployment.id,
+ deployment.relatedCommit.hash,
+ deployment.relatedCommit.hash.slice(0, 7),
+ ];
+ for (const id of validIds) {
+ deployments.set(id, previewUrl);
+ }
+ },
+ );
+ currentPage = incomingPage + 1;
+ totalPages = incomingTotalPages;
+ }
+ lastFetch = Date.now();
+};
+
+/**
+ * This handler redirects the request to the correct Deno deployment.
+ * last updated: 12-08-2021
+ */
+export const redirectToDenoDeployPreviewUrl = async (
+ request: Request,
+): Promise => {
+ const url = new URL(request.url);
+ const [, versionHash] = url.pathname.split("/");
+ if (projectName === undefined) return null;
+ if (token === undefined) return null;
+ await fetchAllDeployments(projectName, token);
+ const deployment = deployments.get(versionHash);
+ if (deployment === undefined) return null;
+ return Response.redirect(deployment);
+};
diff --git a/fart_server/bonus_features/versions/mod.ts b/fart_server/bonus_features/versions/mod.ts
new file mode 100644
index 0000000..57cad93
--- /dev/null
+++ b/fart_server/bonus_features/versions/mod.ts
@@ -0,0 +1 @@
+export { redirectToDenoDeployPreviewUrl } from "./deno_deploy_redirect.ts";
diff --git a/fart_server/mod.ts b/fart_server/mod.ts
new file mode 100644
index 0000000..e69de29
diff --git a/fart_server/serve.test.ts b/fart_server/serve.test.ts
new file mode 100644
index 0000000..e69de29
diff --git a/fart_server/serve.ts b/fart_server/serve.ts
new file mode 100644
index 0000000..c440b31
--- /dev/null
+++ b/fart_server/serve.ts
@@ -0,0 +1,57 @@
+import { redirectToDenoDeployPreviewUrl } from "./bonus_features/versions/mod.ts";
+import { redirectIfShortlink } from "./bonus_features/shortlinks/mod.ts";
+import { clear, getSize, inject, register } from "./utils.ts";
+
+const middleware = [
+ // redirect to another server running a different version of the Fart library
+ redirectToDenoDeployPreviewUrl,
+ // redirect to an external URL
+ redirectIfShortlink,
+ // show how many handlers are registered
+ (request: Request) => {
+ if (new URL(request.url).pathname === "/debug/size") {
+ return new Response(String(getSize()));
+ }
+ return null;
+ },
+ // show deployment ID if running on Deno Deploy
+ (request: Request) => {
+ if (new URL(request.url).pathname === "/debug/deployment") {
+ return new Response(String(Deno.env.get("DENO_DEPLOYMENT_ID")));
+ }
+ return null;
+ },
+];
+
+export const setup = () => {
+ if (getSize() === middleware.length) return;
+ clear();
+ register(...middleware);
+};
+
+export const handleRequest = async (event: Deno.RequestEvent) => {
+ setup();
+ event.respondWith(await inject(event.request));
+};
+
+export const serve = async () => {
+ const port = parseInt(Deno.env.get("PORT") || "8080");
+ console.info(`Access HTTP webserver at: http://localhost:${port}/`);
+ for await (const connection of Deno.listen({ port })) {
+ for await (const event of Deno.serveHttp(connection)) {
+ await handleRequest(event);
+ }
+ connection.close();
+ }
+};
+
+if (Deno.env.get("DENO_DEPLOYMENT_ID") !== undefined) {
+ // add the fetch listener if running on Deno Deploy
+ addEventListener(
+ "fetch",
+ handleRequest as unknown as EventListenerOrEventListenerObject,
+ );
+} else if (import.meta.main) {
+ // serve the HTTP server if running locally
+ await serve();
+}
diff --git a/fart_server/utils.test.ts b/fart_server/utils.test.ts
new file mode 100644
index 0000000..2e94d24
--- /dev/null
+++ b/fart_server/utils.test.ts
@@ -0,0 +1,43 @@
+import { assertEquals } from "../deps/std/testing.ts";
+import { clear, getSize, inject, register } from "./utils.ts";
+
+// Note: Make sure each test clears the handlers if changes were made.
+
+Deno.test("returns 404 without registering a handler", async () => {
+ const { status } = await inject(new Request("https://example.com/"));
+ assertEquals(status, 404);
+});
+
+Deno.test("size of handlers is 0 without registering a handler", () => {
+ assertEquals(getSize(), 0);
+});
+
+Deno.test("size is reduced to 0 when clear is called", () => {
+ register(() => null);
+ assertEquals(getSize(), 1);
+ register(() => null, () => null);
+ assertEquals(getSize(), 3);
+ clear();
+ assertEquals(getSize(), 0);
+});
+
+Deno.test("returns 404 when all handlers return null", async () => {
+ register(() => null);
+ const { status } = await inject(new Request("https://example.com/"));
+ assertEquals(status, 404);
+ clear();
+});
+
+Deno.test("returns data when a handler returns a response", async () => {
+ register(() => new Response("abc"));
+ const response = await inject(new Request("https://example.com/"));
+ assertEquals(await response.text(), "abc");
+ clear();
+});
+
+Deno.test("returns data when a handler returns a response and cascades on null", async () => {
+ register(() => null, () => null, () => null, () => new Response("abc"));
+ const response = await inject(new Request("https://example.com/"));
+ assertEquals(await response.text(), "abc");
+ clear();
+});
diff --git a/fart_server/utils.ts b/fart_server/utils.ts
new file mode 100644
index 0000000..15abe2f
--- /dev/null
+++ b/fart_server/utils.ts
@@ -0,0 +1,39 @@
+export type Result = null | Response | Promise;
+
+export type RequestHandler = (r: Request) => Result;
+
+/**
+ * In-memory storage of the Fart Server's configuration.
+ */
+const handlers: RequestHandler[] = [];
+
+/**
+ * Routes a given HTTP request to the intended `bonus_features` and
+ * sets the appropriate content type header.
+ * @param request incoming http request
+ * @returns routed Fart server response
+ */
+export const inject = async (request: Request): Promise => {
+ for (const handler of handlers) {
+ const result = await handler(request);
+ if (result !== null) {
+ return result;
+ }
+ }
+ return new Response("404", { status: 404 });
+};
+
+export const register = (...gimmeHandlers: RequestHandler[]) => {
+ handlers.push(...gimmeHandlers);
+};
+
+export const clear = () => {
+ handlers.length = 0;
+};
+
+export const getSize = () => {
+ return handlers.length;
+};
+
+// TODO(@ethanthatonekid): Write new functions to access the Fart Server's
+// configuration.
diff --git a/lib/compile/compile.test.ts b/lib/compile/compile.test.ts
deleted file mode 100644
index 94767d0..0000000
--- a/lib/compile/compile.test.ts
+++ /dev/null
@@ -1,181 +0,0 @@
-import { CompilationSettings, compile } from "./compile.ts";
-import { assertEquals } from "../../deps/std/testing.ts";
-// import { CartEventName } from "../gen/cart.ts";
-import { default as fakeTypeMap } from "../../std/typemaps/fake.ts";
-import {
- default as fakeCartridge,
- RESULTS,
-} from "../../std/carts/fake.cart.ts";
-
-const TEST_INDENT = " ";
-const NEW_LINE = "\n";
-const TEST_SETTINGS: CompilationSettings = {
- cartridge: fakeCartridge,
- typemap: fakeTypeMap,
-};
-
-// Deno.test("Empty input results in empty output", () => {
-// const actual = compile(``, TEST_SETTINGS);
-// const expected = ``;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully compiles import statement", () => {
-// const actual = compile(
-// `impo \`./path/to/types\` {
-// Thing1, Thing2, Thing3
-// }`,
-// TEST_SETTINGS,
-// );
-// const expected = RESULTS[CartEvent.Import];
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully compiles `type` statement", () => {
-// const actual = compile(
-// `type Thing {
-// abc: string
-// def: number
-// ghi: boolean
-// }`,
-// TEST_SETTINGS,
-// );
-// const expected = RESULTS[CartEvent.StructOpen] + NEW_LINE + TEST_INDENT +
-// RESULTS[CartEvent.SetProperty] + NEW_LINE + TEST_INDENT +
-// RESULTS[CartEvent.SetProperty] + NEW_LINE + TEST_INDENT +
-// RESULTS[CartEvent.SetProperty] + NEW_LINE + RESULTS[CartEvent.StructClose];
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully compiles nested `type` statement", () => {
-// const actual = compile(
-// `type Thing {
-// abc: string
-// def: number
-// ghi: {
-// uvw: {
-// xyz: boolean
-// }
-// }
-// }`,
-// TEST_SETTINGS,
-// );
-// const expected = RESULTS[CartEvent.StructOpen] + NEW_LINE + TEST_INDENT +
-// RESULTS[CartEvent.SetProperty] + NEW_LINE + TEST_INDENT +
-// RESULTS[CartEvent.SetProperty] + NEW_LINE + TEST_INDENT +
-// RESULTS[CartEvent.SetProperty] + NEW_LINE + TEST_INDENT.repeat(2) +
-// RESULTS[CartEvent.SetProperty] + NEW_LINE + TEST_INDENT.repeat(3) +
-// RESULTS[CartEvent.SetProperty] + NEW_LINE + TEST_INDENT.repeat(2) +
-// RESULTS[CartEvent.StructClose] + NEW_LINE + TEST_INDENT +
-// RESULTS[CartEvent.StructClose] + NEW_LINE +
-// RESULTS[CartEvent.StructClose];
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully compiles `depo` statement", () => {
-// const actual = compile(
-// `depo ThingService {
-// doThis:
-// doThat:
-// }`,
-// TEST_SETTINGS,
-// );
-// const expected = ``;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Omits property assignments from `depo` statement", () => {
-// const actual = compile(
-// `depo ThingService {
-// doThis:
-// doThat:
-// abc: string
-// def: { ghi: boolean }
-// }`,
-// TEST_SETTINGS,
-// );
-// const expected = ``;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully compiles entire service definition", () => {
-// const actual = compile(`type Apple {
-// weight*: number
-// }
-// type AppleRequest {
-// filters: {
-// minWeight: number
-// maxWeight: number
-// }
-// }
-// type AppleResponse {
-// value: Apple
-// }
-// depo AppleService {
-// pickBestApple:
-// }`);
-// const expected = `export interface Apple {
-// weight: number;
-// }
-// export interface AppleRequest {
-// filters?: {
-// minWeight?: number;
-// maxWeight?: number;
-// }
-// }
-// export interface AppleResponse {
-// value?: Apple;
-// }
-// export interface AppleService {
-// pickBestApple: (input: AppleRequest) => AppleResponse;
-// }`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully compiles nested `type` statement with required properties", () => {
-// const actual = compile(`type Thing {
-// abc*: string
-// def*: number
-// ghi*: {
-// jkl*: boolean
-// mno*: boolean
-// }
-// }`);
-// const expected = `export interface Thing {
-// abc: string;
-// def: number;
-// ghi: {
-// jkl: boolean;
-// mno: boolean;
-// }
-// }`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully compiles nested `type` statement with methods", () => {
-// const actual = compile(`type Farmer {
-// getApples:
-// }`);
-// const expected = `export interface Farmer {
-// getApples?: (input: string) => number;
-// }`;
-// assertEquals(actual, expected);
-// });
-
-/* TODO: Compile using QB64 cartridge.
-Deno.test("Successfully compiles to QB64", () => {
- const settings: FartSettings = { target: LanguageTarget.Basic };
- const actual = compile(
- `type Calendar {
- color: string
- year: number
- }`,
- settings,
- );
- const expected = `TYPE Calendar
- color AS STRING
- year AS DOUBLE
-END TYPE`;
- assertEquals(actual, expected);
-});
-*/
diff --git a/lib/compile/compile.ts b/lib/compile/compile.ts
deleted file mode 100644
index e68a66b..0000000
--- a/lib/compile/compile.ts
+++ /dev/null
@@ -1,199 +0,0 @@
-import { Token, tokenize } from "../tokenize/mod.ts";
-import { Lexicon } from "../consts/lexicon.ts";
-import { INDENT, Indent } from "../consts/indent.ts";
-import { Builder } from "../gen/builder.ts";
-import type { Cart } from "../gen/cart.ts";
-import { ModifierType, TypeMap, TypeModifier } from "../gen/typemap.ts";
-
-export interface CompilationSettings {
- cartridge: Cart;
- typemap: TypeMap;
- indentation?: string;
-}
-
-/**
- * Side-Effect: Infers omitted settings.
- */
-export function validateCompilationSettings(
- settings: CompilationSettings,
-): Required {
- return {
- cartridge: settings.cartridge,
- typemap: settings.typemap,
- indentation: settings.indentation ?? INDENT[Indent.Space2],
- };
-}
-
-export async function compile(
- content: string,
- settings: CompilationSettings,
-): Promise {
- const { cartridge, typemap, indentation } = validateCompilationSettings(
- settings,
- );
- const builder = new Builder(cartridge, typemap, indentation);
-
- const it = tokenize(content);
- let curr: IteratorResult = it.next();
-
- const applyMods = (
- tokens: Token[],
- ...mods: ModifierType[]
- ): string | undefined =>
- mods.reduceRight(
- (result, mod) => {
- if (typemap !== undefined && typemap[mod] !== undefined) {
- return [(typemap[mod] as TypeModifier)(...result)];
- }
- return result;
- },
- tokens.map(({ value }) => builder.getType(value) ?? value),
- ).pop();
-
- const checkModExists = (identifier: string) =>
- (Object.values(ModifierType) as string[]).includes(identifier);
-
- const nextToken = (): Token => (curr = it.next()).value;
-
- const nextTuple = (
- ateFirstToken = false,
- maxLength?: number,
- closingToken: Lexicon = Lexicon.Denester,
- ): Token[] => {
- if (!ateFirstToken) nextToken(); // TODO: Assert this token === openingToken.
- const list: Token[] = [];
- const isLengthValid = maxLength === undefined || maxLength >= list.length;
- while (!nextToken().is(closingToken) && isLengthValid) {
- if (!curr.value.is(Lexicon.Separator)) {
- const modifiedValue = nextModifier(curr.value);
- if (modifiedValue !== undefined) {
- list.push(
- new Token(
- modifiedValue,
- curr.value.line,
- curr.value.column,
- /*noCheck=*/ true,
- ),
- );
- } else {
- list.push(curr.value);
- }
- }
- }
- return list;
- };
-
- const nextStruct = async (depoMode = false) => {
- builder.incrementIndentLevel();
- while (!nextToken().is(Lexicon.Denester)) {
- const name = curr.value; // TODO: Assert this is identifier.
- const setter = nextToken(); // TODO: Assert this is setter or required_setter.
- let required = depoMode; // All methods of a `depo` are required by default.
- switch (setter.kind) {
- case Lexicon.Setter:
- break;
- case Lexicon.RequiredSetter: {
- required = true;
- break;
- }
- default: {
- console.error(`Expected a setter, but got ${setter} instead.`); // TODO: Throw error.
- }
- }
- const token = nextToken();
- if (token.is(Lexicon.Nester)) {
- if (depoMode) {
- // TODO: Throw warning (depos only register methods).
- continue;
- }
- // console.log("EMPTY_PROP", { token });
- await builder.appendProperty(name.value, required); // Omitting the type sets up for a nest.
- await nextStruct();
- } else if (token.is(Lexicon.OpeningAngle)) {
- const [inputToken //, outputToken
- ] = nextTuple(
- true,
- 16,
- Lexicon.ClosingAngle,
- );
- await builder.appendProperty(
- name.value,
- required,
- inputToken.value,
- true,
- false,
- /*mods=[]*/
- );
- } else {
- if (depoMode) {
- // TODO: Throw warning (depos only register methods).
- continue;
- }
- const isMethod = token.value.startsWith(ModifierType.Function);
- await builder.appendProperty(
- name.value,
- required,
- nextModifier(token) ?? token.value,
- isMethod,
- );
- }
- }
- builder.decrementIndentLevel();
- await builder.appendClosingStruct();
- };
-
- const nextModifier = (
- currentToken: Token,
- ): string | undefined => {
- const mods: ModifierType[] = [];
- while (checkModExists(currentToken.value)) {
- mods.push(currentToken.value as ModifierType);
- nextToken(); // TODO: Assert this is modifier (%)
- currentToken = nextToken();
- }
- const tokens = [];
- if (currentToken.is(Lexicon.OpeningAngle)) {
- tokens.push(...nextTuple(
- true,
- 2,
- Lexicon.ClosingAngle,
- ));
- } else {
- tokens.push(currentToken);
- }
- return applyMods(tokens, ...mods);
- };
-
- while (!curr.done) {
- switch (curr.value.kind) {
- case Lexicon.LoadDefiner: {
- const { value: filename } = nextToken();
- const dependencyTokens = nextTuple();
- const dependencies = dependencyTokens.map(({ value }) => value);
- await builder.appendImport(filename, dependencies);
- break;
- }
- case Lexicon.TypeDefiner: {
- const identifier = nextToken(); // TODO: Assert is valid identifier.
- await builder.appendOpeningStruct(identifier.value);
- nextToken(); // TODO: Assert this token.is(Lexicon.Nester).
- await nextStruct();
- break;
- }
- case Lexicon.DepoDefiner: {
- const identifier = nextToken(); // TODO: Assert is valid identifier.
- await builder.appendOpeningStruct(identifier.value);
- nextToken(); // TODO: Assert this token.is(Lexicon.Nester).
- const depoMode = true;
- await nextStruct(depoMode);
- break;
- }
- default: {
- nextToken(); // TODO: Throw error (unexpected token).
- }
- }
- }
-
- // console.log("BLOCKS", builder.blocks);
- return builder.export();
-}
diff --git a/lib/compile/mod.ts b/lib/compile/mod.ts
deleted file mode 100644
index 10c0358..0000000
--- a/lib/compile/mod.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-export { compile, validateCompilationSettings } from "./compile.ts";
-export type { CompilationSettings } from "./compile.ts";
diff --git a/lib/constants/lang.ts b/lib/constants/lang.ts
new file mode 100644
index 0000000..b6e0d5f
--- /dev/null
+++ b/lib/constants/lang.ts
@@ -0,0 +1,12 @@
+export enum Lang {
+ Fart = "fart", // ex use-case: generating typedefs/config in any lang
+ Proto = "proto", // ex use-case: generating typedefs in multiple langs
+ TypeScript = "ts", // ex use-case: generating all types of programs
+ ESM = "esm", // JavaScript with `import`
+ Go = "go", // ex use-case: generating all types of programs
+ Rust = "rust", // ex use-case: generating fault-tolerant programs
+ JSON = "json", // ex use-case: generating config
+ HTML = "html", // ex use-case: generating web pages
+ YAML = "yaml", // ex use-case: generating config
+ XML = "xml", // ex use-case: generating unstructured info
+}
diff --git a/lib/consts/time.ts b/lib/constants/time.ts
similarity index 94%
rename from lib/consts/time.ts
rename to lib/constants/time.ts
index a1c8c42..9f37e6f 100644
--- a/lib/consts/time.ts
+++ b/lib/constants/time.ts
@@ -1,10 +1,10 @@
-export enum Time {
- Millisecond = 1,
- Second = 1e3,
- Minute = 60 * Second,
- Hour = 60 * Minute,
- Day = 24 * Hour,
- Week = 7 * Day,
- Month = 30 * Day,
- Year = 365 * Day,
-}
+export enum Time {
+ Millisecond = 1,
+ Second = 1e3,
+ Minute = 60 * Second,
+ Hour = 60 * Minute,
+ Day = 24 * Hour,
+ Week = 7 * Day,
+ Month = 30 * Day,
+ Year = 365 * Day,
+}
diff --git a/lib/consts/lexicon.ts b/lib/consts/lexicon.ts
deleted file mode 100644
index b5f8d5d..0000000
--- a/lib/consts/lexicon.ts
+++ /dev/null
@@ -1,47 +0,0 @@
-export enum Lexicon {
- Identifier,
- Nester,
- Denester,
- OpeningAngle,
- ClosingAngle,
- Setter,
- RequiredMarker,
- RequiredSetter,
- TypeDefiner,
- DepoDefiner,
- LoadDefiner,
- Commenter,
- Separator,
- Spacer,
- LineBreaker,
- LineBreaker2,
- StringMarker,
- StringMarker2,
- StringMarker3,
- StringLiteral,
- Modifier,
- EOF,
-}
-
-export const LEXICON = {
- [Lexicon.Nester]: "{",
- [Lexicon.Denester]: "}",
- [Lexicon.OpeningAngle]: "<",
- [Lexicon.ClosingAngle]: ">",
- [Lexicon.Setter]: ":",
- [Lexicon.RequiredMarker]: "*",
- [Lexicon.RequiredSetter]: "*:",
- [Lexicon.TypeDefiner]: "type",
- [Lexicon.DepoDefiner]: "depo",
- [Lexicon.LoadDefiner]: "load",
- [Lexicon.Commenter]: ";",
- [Lexicon.Separator]: ",",
- [Lexicon.Spacer]: " ",
- [Lexicon.LineBreaker]: "\n",
- [Lexicon.LineBreaker2]: "\r",
- [Lexicon.StringMarker]: "\`",
- [Lexicon.StringMarker2]: "'",
- [Lexicon.StringMarker3]: '"',
- [Lexicon.Modifier]: "%",
- [Lexicon.EOF]: "",
-} as const;
diff --git a/lib/consts/mod.ts b/lib/consts/mod.ts
deleted file mode 100644
index 0591c74..0000000
--- a/lib/consts/mod.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-export * from "./indent.ts";
-export * from "./lexicon.ts";
diff --git a/lib/fart.ts b/lib/fart.ts
deleted file mode 100644
index a0e01ed..0000000
--- a/lib/fart.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-export * from "./tokenize/mod.ts";
-export * from "./compile/mod.ts";
-export * from "./consts/mod.ts";
-export * from "./gen/mod.ts";
-export * from "./reg/mod.ts";
diff --git a/lib/fart_error/mod.ts b/lib/fart_error/mod.ts
new file mode 100644
index 0000000..e69de29
diff --git a/lib/gen/builder.test.ts b/lib/gen/builder.test.ts
deleted file mode 100644
index 8d0d511..0000000
--- a/lib/gen/builder.test.ts
+++ /dev/null
@@ -1,8 +0,0 @@
-import { assertEquals } from "../../deps/std/testing.ts";
-
-// TODO: Write tests for `CodeDocument`.
-Deno.test("Empty input results in empty output", () => {
- const actual = ``;
- const expected = ``;
- assertEquals(actual, expected);
-});
diff --git a/lib/gen/builder.ts b/lib/gen/builder.ts
deleted file mode 100644
index 2e5b9b5..0000000
--- a/lib/gen/builder.ts
+++ /dev/null
@@ -1,121 +0,0 @@
-import { IndentOption } from "../consts/indent.ts";
-import { OMIT_PATTERN, ReservedType, TypeMap } from "./typemap.ts";
-import { Cart, CartEventName } from "./cart.ts";
-import { BoC } from "./common.ts";
-
-/**
- * Also known as _File Builder_.
- */
-export class Builder {
- blocks: BoC[] = [];
- currentIndentLevel = 0;
- localTypes: Set = new Set([]);
-
- constructor(
- private cartridge: Cart,
- private typemap: TypeMap,
- private indent: IndentOption | string,
- ) {}
-
- private append(code?: BoC) {
- if (code === undefined) return;
- code.setIndentOffset(this.currentIndentLevel);
- // console.log("APPENDED", { code });
- this.blocks.push(code);
- }
-
- public incrementIndentLevel() {
- this.currentIndentLevel++;
- }
-
- public decrementIndentLevel() {
- // TODO: Assert that indentation level is greater than 0.
- if (this.currentIndentLevel > 0) {
- this.currentIndentLevel--;
- }
- }
-
- public async appendImport(source: string, dependencies: string[]) {
- const code = await this.cartridge.dispatch({
- type: CartEventName.Import,
- source,
- dependencies,
- });
- if (code === null) return;
- for (const depId of dependencies) {
- this.localTypes.add(depId);
- }
- this.append(code);
- }
-
- public async appendOpeningStruct(identifier: string, department = false) {
- const code = await this.cartridge.dispatch({
- type: CartEventName.StructOpen,
- identifier,
- department,
- });
- if (code === null) return;
- this.localTypes.add(identifier);
- this.append(code);
- }
-
- public async appendProperty(
- identifier: string,
- required: boolean,
- value?: string,
- method = false,
- department = false,
- ) {
- value = this.getType(value); // Transforms type before passing to cart.
- const code = await this.cartridge.dispatch({
- type: CartEventName.SetProperty,
- value,
- identifier,
- required,
- method,
- department,
- });
- if (code === null) return;
- this.append(code);
- }
-
- public async appendClosingStruct() {
- const code = await this.cartridge.dispatch({
- type: CartEventName.StructClose,
- });
- if (code === null) return;
- this.append(code);
- }
-
- toString = this.export.bind(this);
-
- public async export(): Promise {
- if (this.currentIndentLevel > 0) return "";
- const topOfFile = await this.cartridge.dispatch({
- type: CartEventName.FileStart,
- });
- const bottomOfFile = await this.cartridge.dispatch({
- type: CartEventName.FileEnd,
- });
- return BoC.join(topOfFile, ...this.blocks, bottomOfFile);
- }
-
- public getType(
- alias?: string,
- ): string | undefined {
- if (alias === undefined) return undefined;
- switch (alias) {
- case ReservedType.Number:
- return this.typemap[ReservedType.Number];
- case ReservedType.String:
- return this.typemap[ReservedType.String];
- case ReservedType.Boolean:
- return this.typemap[ReservedType.Boolean];
- case ReservedType.Default:
- return this.typemap[ReservedType.Default];
- default: {
- return alias.replace(OMIT_PATTERN, "void") ?? "";
- }
- }
- }
-}
diff --git a/lib/gen/cart.ts b/lib/gen/cart.ts
deleted file mode 100644
index 0bfbf10..0000000
--- a/lib/gen/cart.ts
+++ /dev/null
@@ -1,211 +0,0 @@
-import { BoC } from "./common.ts";
-
-export enum CartEventName {
- FileStart = "file_start",
- Import = "import",
- StructOpen = "struct_open",
- SetProperty = "set_property",
- StructClose = "struct_close",
- FileEnd = "file_end",
-}
-
-interface FileStartDetail {
- type: CartEventName.FileStart;
- code: BoC;
-}
-
-interface StructCloseDetail {
- type: CartEventName.StructClose;
- code: BoC;
-}
-
-interface FileEndDetail {
- type: CartEventName.FileEnd;
- code: BoC;
-}
-
-interface ImportDetail {
- type: CartEventName.Import;
- code: BoC;
- source: string;
- dependencies: string[];
-}
-
-interface StructOpenDetail {
- type: CartEventName.StructOpen;
- code: BoC;
- identifier: string;
- department: boolean;
-}
-
-interface SetPropertyDetail {
- type: CartEventName.SetProperty;
- code: BoC;
- identifier: string;
- department: boolean;
- value?: string;
- required: boolean;
- method: boolean;
-}
-
-export type CartDispatch = {
- type: CartEventName.FileStart;
-} | {
- type: CartEventName.StructClose;
-} | {
- type: CartEventName.FileEnd;
-} | {
- type: CartEventName.Import;
- source: string;
- dependencies: string[];
-} | {
- type: CartEventName.StructOpen;
- identifier: string;
- department: boolean;
-} | {
- type: CartEventName.SetProperty;
- identifier: string;
- department: boolean;
- value?: string;
- required: boolean;
- method: boolean;
-};
-
-export type CartEvent =
- | FileStartDetail
- | StructCloseDetail
- | FileEndDetail
- | ImportDetail
- | StructOpenDetail
- | SetPropertyDetail;
-
-/**
- * If a code generation function returns null, that means that the
- * target language omits the requested generated code. A null return
- * value will prevent the requested line from being appended to the result.
- */
-export type CartHandler = (
- event: T extends CartEventName.FileStart ? FileStartDetail
- : T extends CartEventName.Import ? ImportDetail
- : T extends CartEventName.StructOpen ? StructOpenDetail
- : T extends CartEventName.SetProperty ? SetPropertyDetail
- : T extends CartEventName.StructClose ? StructCloseDetail
- : FileEndDetail,
-) => void | Promise;
-
-export class Cart {
- constructor(
- private handlers = {
- [CartEventName.FileStart]: undefined as
- | CartHandler
- | undefined,
- [CartEventName.Import]: undefined as
- | CartHandler
- | undefined,
- [CartEventName.StructOpen]: undefined as
- | CartHandler
- | undefined,
- [CartEventName.SetProperty]: undefined as
- | CartHandler
- | undefined,
- [CartEventName.StructClose]: undefined as
- | CartHandler
- | undefined,
- [CartEventName.FileEnd]: undefined as
- | CartHandler
- | undefined,
- },
- ) {}
-
- async dispatch(event: CartDispatch): Promise {
- const handler = this.handlers[event.type];
- if (handler === undefined) return null;
- const code = new BoC();
- let result: void | Promise;
- switch (event.type) {
- case CartEventName.FileStart: {
- result = (handler as CartHandler)({
- code,
- ...event,
- });
- break;
- }
- case CartEventName.Import: {
- result = (handler as CartHandler)({
- code,
- ...event,
- });
- break;
- }
- case CartEventName.StructOpen: {
- result = (handler as CartHandler)({
- code,
- ...event,
- });
- break;
- }
- case CartEventName.SetProperty: {
- result = (handler as CartHandler)({
- code,
- ...event,
- });
- break;
- }
- case CartEventName.StructClose: {
- result = (handler as CartHandler)({
- code,
- ...event,
- });
- break;
- }
- case CartEventName.FileEnd: {
- result = (handler as CartHandler)({
- code,
- ...event,
- });
- break;
- }
- }
- if (result instanceof Promise) await result;
- return code;
- }
-
- addEventListener(
- name: CartEventName.FileStart,
- handler: CartHandler,
- ): void;
- addEventListener(
- name: CartEventName.Import,
- handler: CartHandler,
- ): void;
- addEventListener(
- name: CartEventName.StructOpen,
- handler: CartHandler,
- ): void;
- addEventListener(
- name: CartEventName.SetProperty,
- handler: CartHandler,
- ): void;
- addEventListener(
- name: CartEventName.StructClose,
- handler: CartHandler,
- ): void;
- addEventListener(
- name: CartEventName.FileEnd,
- handler: CartHandler,
- ): void;
- addEventListener(
- name: CartEventName,
- // deno-lint-ignore no-explicit-any
- handler: any,
- ): void {
- this.handlers[name] = handler;
- }
-
- // `on` serves as an alias for `addEventListener`.
- on = this.addEventListener.bind(this);
-
- removeEventListener(name: CartEventName) {
- delete this.handlers[name];
- }
-}
diff --git a/lib/gen/common.ts b/lib/gen/common.ts
deleted file mode 100644
index 424d880..0000000
--- a/lib/gen/common.ts
+++ /dev/null
@@ -1,245 +0,0 @@
-import {
- INDENT,
- Indent,
- IndentCacheIndex,
- IndentOption,
-} from "../consts/indent.ts";
-
-const NEW_LINE = "\n";
-const DEFAULT_INDENT_OPTION = Indent.Space2;
-
-/**
- * This type covers each way a **block of code** may be represented.
- * A string implies the block is one line. An array of strings
- * implies a multi-line code block. A 2D array of strings implies
- * a multi-line code block with indentations.
- */
-export type SerializedBoC = string | string[] | string[][];
-
-/**
- * LoC stands for _Line of Code_.
- */
-export class LoC {
- constructor(
- public content: string,
- public indentOption: IndentOption | string = DEFAULT_INDENT_OPTION,
- public indentLevel = 0,
- ) {}
-
- export(indentOption?: IndentOption | string, offset = 0): string {
- const indent = getIndent(
- indentOption ?? this.indentOption,
- offset + this.indentLevel,
- );
- return indent + this.content;
- }
-
- setIndentLevel(level: number) {
- if (level > 0) {
- this.indentLevel = Math.floor(level);
- }
- }
-
- toString = this.export.bind(this);
-}
-
-/**
- * BoC stands for _Block of Code_.
- */
-export class BoC {
- public active = true;
-
- constructor(
- public lines: LoC[] = [],
- public indent: IndentOption | string = DEFAULT_INDENT_OPTION,
- public padding = 0, // Blank lines placed below the code block.
- public indentOffset = 0,
- ) {}
-
- append(content: LoC): void;
- append(
- content: string,
- indent?: IndentOption | string,
- indentLevel?: number,
- ): void;
- append(
- content: LoC | string,
- indent?: IndentOption | string,
- indentLevel = 0,
- ) {
- const line = content instanceof LoC
- ? content
- : new LoC(content, indent, indentLevel);
- this.lines.push(line);
- }
-
- // TODO: Implement padding_top/bottom.
- setPadding(padding: number) {
- this.padding = padding;
- }
-
- setIndentOffset(offset: number) {
- this.indentOffset = offset;
- }
-
- /**
- * Omits _this_ from result.
- */
- skip() {
- this.active = false;
- }
-
- export(): string {
- return this.lines
- .map((line) => line.export(this.indent, this.indentOffset))
- .join(NEW_LINE);
- }
-
- toString = this.export.bind(this);
-
- static parse(
- content: string,
- indent: IndentOption | string,
- ): BoC | undefined;
- static parse(
- content: string[],
- indent: IndentOption | string,
- ): BoC | undefined;
- static parse(
- content: string[][],
- indent: IndentOption | string,
- ): BoC | undefined;
- static parse(
- content: SerializedBoC,
- indent: IndentOption | string,
- ): BoC | undefined;
- static parse(
- content?: SerializedBoC,
- indent: IndentOption | string = DEFAULT_INDENT_OPTION,
- ): BoC | undefined {
- if (content === undefined) return;
- const block = new BoC([], indent);
- const gimmeLine = (line: string, offset = 0) =>
- block.append(new LoC(line, indent, offset));
- if (typeof content === "string") {
- gimmeLine(content);
- return block;
- }
- for (const line of content) {
- if (typeof line === "string") {
- gimmeLine(line);
- continue;
- }
- const indentLevelOffset = line.findIndex(({ length }) => length > 0);
- gimmeLine(line[indentLevelOffset], indentLevelOffset);
- }
- }
-
- static join(...blocks: (BoC | null)[]) {
- return blocks
- .filter((block) => block?.active)
- .reduce((result, block) => {
- if (block === null) return result;
- return result + block.export() + NEW_LINE.repeat(block.padding + 1);
- }, "");
- }
-}
-
-export function getIndentOption(
- indentOption: IndentOption | string,
-): IndentOption | null {
- let option: IndentOption | null = null;
- switch (indentOption) {
- case Indent.Tab1: {
- option = Indent.Tab1;
- break;
- }
- case Indent.Space1: {
- option = Indent.Space1;
- break;
- }
- case Indent.Space2: {
- option = Indent.Space2;
- break;
- }
- case Indent.Space3: {
- option = Indent.Space3;
- break;
- }
- case Indent.Space4: {
- option = Indent.Space4;
- break;
- }
- }
- return option;
-}
-
-export function getCachedIndent(
- indentOption: IndentOption,
- indentLevel: number,
-): string | null {
- if (0 > indentLevel || indentLevel > 16) return null;
- switch (indentOption) {
- case Indent.Tab1: {
- const indentCacheIndex = -1 *
- Math.floor(indentLevel) as IndentCacheIndex;
- return INDENT[indentCacheIndex];
- }
- case Indent.Space1:
- case Indent.Space2:
- case Indent.Space3:
- case Indent.Space4: {
- const indentCacheIndex = indentOption *
- Math.floor(indentLevel) as IndentCacheIndex;
- return INDENT[indentCacheIndex];
- }
- default:
- return null;
- }
-}
-
-/**
- * This function will either return a cached indent string
- * from `/lib/constants/indent.ts`.
- *
- * ## Usage
- *
- * ```ts
- * // Tab spacing is represented by -1.
- * getIndent(-1, 1) // "\t"
- * getIndent(-1, 3) // "\t\t\t"
- *
- * // Single, double, triple, and quadruple spaces are
- * // represented by 1, 2, 3, and 4 respectively.
- * getIndent(1, 1) // " "
- * getIndent(1, 3) // " "
- * getIndent(2, 3) // " "
- * getIndent(3, 3) // " "
- * getIndent(4, 3) // " "
- *
- * // For non-cached indents, a string may be passed
- * // instead and will be computed immediately.
- * getIndent("#", 3) // "###"
- * getIndent("_", 20) // "____________________"
- *
- * // Any invalid indentation options will result in the
- * // return of an empty string.
- * getIndent(5, 1) // ""
- * getIndent(-2, 1) // ""
- * ```
- */
-export function getIndent(
- indentOption: IndentOption | string,
- indentLevel: number,
-): string {
- const option = getIndentOption(indentOption);
- indentLevel = Math.floor(Math.max(0, indentLevel)); // Assert indent level is a positive integer.
- if (option !== null) {
- const cachedIndent = getCachedIndent(option, indentLevel);
- if (cachedIndent !== null) return cachedIndent;
- }
- if (typeof indentOption === "string") {
- return indentOption.repeat(Math.max(indentLevel, 0));
- }
- return "";
-}
diff --git a/lib/gen/mod.ts b/lib/gen/mod.ts
deleted file mode 100644
index 995cfdf..0000000
--- a/lib/gen/mod.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export * from "./cart.ts";
-export * from "./builder.ts";
-export * from "./typemap.ts";
diff --git a/lib/gen/typemap.ts b/lib/gen/typemap.ts
deleted file mode 100644
index 3f2b258..0000000
--- a/lib/gen/typemap.ts
+++ /dev/null
@@ -1,44 +0,0 @@
-export enum ReservedType {
- Omit = "_",
- Number = "number",
- String = "string",
- Boolean = "boolean",
- Default = "any",
-}
-
-export enum ModifierType {
- Array = "array", // Modifies anything.
- Async = "async", // Modifies anything.
- Dictionary = "dict", // Modifies length-2 tuples.
- Function = "fn", // Modifies length-2 tuples.
- Date = "date", // Modifies string or number.
- URL = "url", // Modifies string.
-}
-
-/**
- * Returns a type composed into plain text (e.g. `number`,
- * `Array`, `(a: number, b: number) => number`, etc.).
- */
-export type TypeModifier = (...inner: string[]) => string;
-
-/**
- * The TypeMap API is designed to delegate the generation of
- * syntax for various programming languages.
- */
-export interface TypeMap {
- [ReservedType.Omit]: string;
- [ReservedType.Number]: string;
- [ReservedType.String]: string;
- [ReservedType.Boolean]: string;
- [ReservedType.Default]: string;
-
- // Modifiers are not required for all languages.
- [ModifierType.Array]?: TypeModifier;
- [ModifierType.Async]?: TypeModifier;
- [ModifierType.Dictionary]?: TypeModifier;
- [ModifierType.Function]?: TypeModifier;
- [ModifierType.Date]?: TypeModifier;
- [ModifierType.URL]?: TypeModifier;
-}
-
-export const OMIT_PATTERN = /^\_$/;
diff --git a/lib/mod.ts b/lib/mod.ts
new file mode 100644
index 0000000..0604286
--- /dev/null
+++ b/lib/mod.ts
@@ -0,0 +1,5 @@
+export * from "./tokenize/mod.ts";
+export * from "./cartridge/mod.ts";
+export * from "./compile/mod.ts";
+export * from "./text_builder/mod.ts";
+export * from "./registry/mod.ts";
diff --git a/lib/proto_parser/mod.ts b/lib/proto_parser/mod.ts
new file mode 100644
index 0000000..86ee55c
--- /dev/null
+++ b/lib/proto_parser/mod.ts
@@ -0,0 +1,19 @@
+import { parse } from "https://deno.land/x/protoc_parser/mod.ts";
+
+const file = await Deno.open("./lib/proto_parser/my-file.proto");
+try {
+ const proto = await parse(file, {});
+ proto.accept({
+ visitMessage(messageNode) {
+ // Do stuff with message node
+ console.log({ messageNode });
+ },
+ visitService(serviceNode) {
+ // Do stuff with service node
+ console.log({ serviceNode });
+ },
+ // etc
+ });
+} finally {
+ await file.close();
+}
diff --git a/lib/proto_parser/my-file.proto b/lib/proto_parser/my-file.proto
new file mode 100644
index 0000000..482a69f
--- /dev/null
+++ b/lib/proto_parser/my-file.proto
@@ -0,0 +1,35 @@
+/**
+ * This is a generic file comment
+ */
+syntax = "proto3";
+
+/**
+ * This is a comment for MyService
+ */
+service MyService {
+
+ /**
+ * This is a comment for MyMethod
+ */
+ rpc MyMethod (MyRequest) returns (MyResponse);
+}
+
+/**
+ * This is a comment for MyRequest
+ */
+message MyRequest {
+ /**
+ * This is a comment for path
+ */
+ string path = 1;
+}
+
+/**
+ * This is a comment for MyResponse
+ */
+message MyResponse {
+ /**
+ * This is a comment for status
+ */
+ int32 status = 2;
+}
\ No newline at end of file
diff --git a/lib/reg/README.md b/lib/reg/README.md
deleted file mode 100644
index 554a5fe..0000000
--- a/lib/reg/README.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# `/reg/`
-
-This directory includes the source code for a local code cartridge _registry_.
diff --git a/lib/reg/mod.ts b/lib/reg/mod.ts
deleted file mode 100644
index 9d641ca..0000000
--- a/lib/reg/mod.ts
+++ /dev/null
@@ -1 +0,0 @@
-export * from "./registry.ts";
diff --git a/lib/reg/registry.test.ts b/lib/reg/registry.test.ts
deleted file mode 100644
index 95399f2..0000000
--- a/lib/reg/registry.test.ts
+++ /dev/null
@@ -1,86 +0,0 @@
-import { Registry } from "./registry.ts";
-import { assertEquals } from "../../deps/std/testing.ts";
-
-class Dummy {
- constructor(private id?: string) {}
- someCapability(x: string): string {
- return x;
- }
-}
-
-Deno.test("Parses registry ID", () => {
- const actual = Registry.parseId("a");
- const expected = ["a"];
- assertEquals(actual, expected);
-});
-
-Deno.test("Parses registry ID with 3 segments", () => {
- const actual = Registry.parseId("a.b.c");
- const expected = ["a", "b", "c"];
- assertEquals(actual, expected);
-});
-
-Deno.test("Registers class instance successfully", () => {
- const dummy = new Dummy();
- const reg = new Registry("_", dummy);
- const actual = reg.value;
- const expected = dummy;
- assertEquals(actual, expected);
-});
-
-Deno.test("Registers class instance recursively", () => {
- const dummy = new Dummy();
- const reg = new Registry("_");
- reg.set("a.b.c", dummy);
- const actual = reg.vendor("a.b.c");
- const expected = dummy;
- assertEquals(actual, expected);
-});
-
-Deno.test("Gets class instance with default keyword", () => {
- const dummy = new Dummy();
- const reg = new Registry("_");
- reg.set("a.b.c", dummy);
- const actual = reg.vendor("a.b.c.default");
- const expected = dummy;
- assertEquals(actual, expected);
-});
-
-Deno.test("Registers class instance recursively via chaining", () => {
- const dummy = new Dummy();
- const reg = new Registry("_");
- reg.set("a.b.c", dummy);
- const actual = reg.get("a")?.get("b")?.get("c")?.vendor();
- const expected = dummy;
- assertEquals(actual, expected);
-});
-
-Deno.test("Vendors successfully from sample registry", () => {
- const dummy = new Dummy();
- const targetDummy = new Dummy("target");
- const baseRegistry = new Registry("_");
- const tsRegistry = new Registry("ts", dummy);
- tsRegistry.set("deno", dummy);
- tsRegistry.set("deno.api", targetDummy);
- baseRegistry.include(tsRegistry);
- const actual = baseRegistry.vendor("ts.deno.api");
- const expected = targetDummy;
- assertEquals(actual, expected);
-});
-
-Deno.test("Successfully vendors 4 sample registries", () => {
- const dummy = new Dummy();
- const dummyA = new Dummy("A");
- const dummyB = new Dummy("B");
- const dummyC = new Dummy("C");
- const baseRegistry = new Registry("_");
- const tsRegistry = new Registry("ts", dummy);
- tsRegistry.set("deno", dummy);
- tsRegistry.set("deno.api", dummyA);
- tsRegistry.set("deno.cli", dummyB);
- tsRegistry.set("deno.web", dummyC);
- baseRegistry.include(tsRegistry);
- assertEquals(baseRegistry.vendor("ts.deno.api"), dummyA);
- assertEquals(baseRegistry.vendor("ts.deno.cli"), dummyB);
- assertEquals(baseRegistry.vendor("ts.deno.web"), dummyC);
-});
diff --git a/lib/reg/registry.ts b/lib/reg/registry.ts
deleted file mode 100644
index 361783f..0000000
--- a/lib/reg/registry.ts
+++ /dev/null
@@ -1,60 +0,0 @@
-export const INDEX = "default";
-
-export type RegistryKey = string | string[];
-
-export class Registry {
- private registries: Map> = new Map([]);
-
- constructor(
- public id: string,
- public value?: T,
- ) {}
-
- set(id: RegistryKey, value: T) {
- const [currentId, ...segments] = Registry.parseId(id);
- if (currentId !== undefined) {
- const nextRegistry = this.registries.get(currentId) ??
- new Registry(currentId, segments.length === 0 ? value : undefined);
- nextRegistry.set(segments, value);
- this.registries.set(currentId, nextRegistry);
- }
- }
-
- get(id: RegistryKey = INDEX): Registry | undefined {
- const [currentId, ...segments] = Registry.parseId(id);
- if (currentId === undefined) return;
- if (currentId === INDEX) return this;
- const currentRegistry = this.registries.get(currentId);
- if (currentRegistry === undefined) return;
- if (segments.length > 0) {
- return currentRegistry.get(segments);
- }
- return currentRegistry;
- }
-
- vendor(id?: RegistryKey): T | undefined {
- if (id === undefined) return this.value;
- return this.get(id)?.value;
- }
-
- has(id: RegistryKey): boolean {
- return this.vendor(id) !== undefined;
- }
-
- include(registry: Registry) {
- return this.registries.set(registry.id, registry);
- }
-
- static parseId(id: RegistryKey): string[] {
- if (Array.isArray(id)) {
- return id;
- }
- const segments = id.split(".").map((segment) =>
- segment.toLowerCase().replace(/[^a-z0-9]/g, "")
- );
- while (segments[segments.length - 1] === INDEX) {
- segments.pop();
- }
- return segments;
- }
-}
diff --git a/lib/registry/mod.ts b/lib/registry/mod.ts
new file mode 100644
index 0000000..8ce8f76
--- /dev/null
+++ b/lib/registry/mod.ts
@@ -0,0 +1,6 @@
+/**
+ * @todo @ethanthatonekid refactor
+ */
+export class Registry {
+ constructor() {}
+}
diff --git a/lib/tokenize/alias.ts b/lib/tokenize/alias.ts
deleted file mode 100644
index fa27a97..0000000
--- a/lib/tokenize/alias.ts
+++ /dev/null
@@ -1,65 +0,0 @@
-import { LEXICON, Lexicon } from "../consts/lexicon.ts";
-import { Token } from "./token.ts";
-
-interface LexiconUtil {
- "id": (raw: string, line: number, column: number) => Token;
- "string_literal": (raw: string, line: number, column: number) => Token;
- "nester": (line: number, column: number) => Token;
- "denester": (line: number, column: number) => Token;
- "opening_angle": (line: number, column: number) => Token;
- "closing_angle": (line: number, column: number) => Token;
- "setter": (line: number, column: number) => Token;
- "required_setter": (line: number, column: number) => Token;
- "type_definer": (line: number, column: number) => Token;
- "depo_definer": (line: number, column: number) => Token;
- "load_definer": (line: number, column: number) => Token;
- "commenter": (line: number, column: number) => Token;
- "separator": (line: number, column: number) => Token;
- "spacer": (line: number, column: number) => Token;
- "line_breaker": (line: number, column: number) => Token;
- "line_breaker2": (line: number, column: number) => Token;
- "modifier": (line: number, column: number) => Token;
- "string_marker": (line: number, column: number) => Token;
- "string_marker2": (line: number, column: number) => Token;
- "string_marker3": (line: number, column: number) => Token;
- "eof": (line: number, column: number) => Token;
-}
-
-export const T: LexiconUtil = {
- "id": (raw, line, column) => new Token(raw, line, column),
- "string_literal": (raw, line, column) => new Token(raw, line, column),
- "nester": (line, column) => new Token(LEXICON[Lexicon.Nester], line, column),
- "denester": (line, column) =>
- new Token(LEXICON[Lexicon.Denester], line, column),
- "opening_angle": (line, column) =>
- new Token(LEXICON[Lexicon.OpeningAngle], line, column),
- "closing_angle": (line, column) =>
- new Token(LEXICON[Lexicon.ClosingAngle], line, column),
- "setter": (line, column) => new Token(LEXICON[Lexicon.Setter], line, column),
- "required_setter": (line, column) =>
- new Token(LEXICON[Lexicon.RequiredSetter], line, column),
- "type_definer": (line, column) =>
- new Token(LEXICON[Lexicon.TypeDefiner], line, column),
- "depo_definer": (line, column) =>
- new Token(LEXICON[Lexicon.DepoDefiner], line, column),
- "load_definer": (line, column) =>
- new Token(LEXICON[Lexicon.LoadDefiner], line, column),
- "commenter": (line, column) =>
- new Token(LEXICON[Lexicon.Commenter], line, column),
- "separator": (line, column) =>
- new Token(LEXICON[Lexicon.Separator], line, column),
- "spacer": (line, column) => new Token(LEXICON[Lexicon.Spacer], line, column),
- "line_breaker": (line, column) =>
- new Token(LEXICON[Lexicon.LineBreaker], line, column),
- "line_breaker2": (line, column) =>
- new Token(LEXICON[Lexicon.LineBreaker2], line, column),
- "modifier": (line, column) =>
- new Token(LEXICON[Lexicon.Modifier], line, column),
- "string_marker": (line, column) =>
- new Token(LEXICON[Lexicon.StringMarker], line, column),
- "string_marker2": (line, column) =>
- new Token(LEXICON[Lexicon.StringMarker2], line, column),
- "string_marker3": (line, column) =>
- new Token(LEXICON[Lexicon.StringMarker3], line, column),
- "eof": (line, column) => new Token(LEXICON[Lexicon.EOF], line, column),
-};
diff --git a/lib/tokenize/common.ts b/lib/tokenize/common.ts
deleted file mode 100644
index dbf3dcf..0000000
--- a/lib/tokenize/common.ts
+++ /dev/null
@@ -1,8 +0,0 @@
-// TODO(ethanthatonekid): Allow for inclusive period characters in an "identifier".
-export const validateIdentifier = (candidate: string): boolean =>
- /^[a-zA-Z_$][a-zA-Z_$0-9]*$/g.test(candidate);
-
-export const validateStringLiteral = (candidate: string): boolean =>
- /^\`(.*?)\`$/g.test(candidate) ||
- /^\'(.*?)\'$/g.test(candidate) ||
- /^\"(.*?)\"$/g.test(candidate);
diff --git a/lib/tokenize/mod.ts b/lib/tokenize/mod.ts
deleted file mode 100644
index 689cf84..0000000
--- a/lib/tokenize/mod.ts
+++ /dev/null
@@ -1,3 +0,0 @@
-export { T } from "./alias.ts";
-export { Token } from "./token.ts";
-export { tokenize } from "./tokenize.ts";
diff --git a/lib/tokenize/token.ts b/lib/tokenize/token.ts
deleted file mode 100644
index 85a359c..0000000
--- a/lib/tokenize/token.ts
+++ /dev/null
@@ -1,91 +0,0 @@
-import { LEXICON, Lexicon } from "../consts/lexicon.ts";
-import { validateIdentifier, validateStringLiteral } from "./common.ts";
-
-export class Token {
- public kind: Lexicon | null;
- constructor(
- private raw: string,
- public line: number,
- public column: number,
- noCheck = false,
- ) {
- this.kind = noCheck ? Lexicon.Identifier : Token.getKindOf(raw);
- }
-
- // deno-lint-ignore getter-return
- get value(): string {
- switch (this.kind) {
- case Lexicon.Identifier:
- return this.raw;
- case Lexicon.StringMarker:
- case Lexicon.StringMarker2:
- case Lexicon.StringMarker3:
- case Lexicon.StringLiteral: {
- const clean = (stringLiteral: string): string => {
- const marker = LEXICON[Lexicon.StringMarker];
- const pattern = new RegExp(`^\\${marker}|\\${marker}$`, "g");
- return stringLiteral.replace(pattern, "");
- };
- return clean(this.raw);
- }
- default: {
- if (this.kind !== null && LEXICON[this.kind] !== undefined) {
- return LEXICON[this.kind];
- }
- throw new Error(`Invalid token`);
- }
- }
- }
-
- is(kind: Lexicon | null): boolean {
- return this.kind === kind;
- }
-
- toString() {
- return this.value;
- }
-
- static getKindOf(raw: string): Lexicon | null {
- switch (raw) {
- case LEXICON[Lexicon.Nester]:
- return Lexicon.Nester;
- case LEXICON[Lexicon.Denester]:
- return Lexicon.Denester;
- case LEXICON[Lexicon.OpeningAngle]:
- return Lexicon.OpeningAngle;
- case LEXICON[Lexicon.ClosingAngle]:
- return Lexicon.ClosingAngle;
- case LEXICON[Lexicon.RequiredMarker]:
- return Lexicon.RequiredMarker;
- case LEXICON[Lexicon.Setter]:
- return Lexicon.Setter;
- case LEXICON[Lexicon.RequiredSetter]:
- return Lexicon.RequiredSetter;
- case LEXICON[Lexicon.TypeDefiner]:
- return Lexicon.TypeDefiner;
- case LEXICON[Lexicon.DepoDefiner]:
- return Lexicon.DepoDefiner;
- case LEXICON[Lexicon.LoadDefiner]:
- return Lexicon.LoadDefiner;
- case LEXICON[Lexicon.Commenter]:
- return Lexicon.Commenter;
- case LEXICON[Lexicon.Separator]:
- return Lexicon.Separator;
- case LEXICON[Lexicon.Spacer]:
- return Lexicon.Spacer;
- case LEXICON[Lexicon.LineBreaker]:
- return Lexicon.LineBreaker;
- case LEXICON[Lexicon.LineBreaker2]:
- return Lexicon.LineBreaker2;
- case LEXICON[Lexicon.Modifier]:
- return Lexicon.Modifier;
- case LEXICON[Lexicon.EOF]:
- return Lexicon.EOF;
- default: {
- if (validateIdentifier(raw)) return Lexicon.Identifier;
- else if (validateStringLiteral(raw)) return Lexicon.StringLiteral;
- else return null;
- }
- }
- }
-}
diff --git a/lib/tokenize/tokenize.test.ts b/lib/tokenize/tokenize.test.ts
deleted file mode 100644
index 67ba093..0000000
--- a/lib/tokenize/tokenize.test.ts
+++ /dev/null
@@ -1,257 +0,0 @@
-import { Token } from "./token.ts";
-import { tokenize } from "./tokenize.ts";
-import { T } from "./alias.ts";
-import { assert, assertEquals } from "../../deps/std/testing.ts";
-import { Lexicon } from "../consts/lexicon.ts";
-
-const assertTokensEqual = (
- actual: Generator,
- expected: Token[],
-) => assertEquals([...actual], expected);
-
-Deno.test("Successfully creates identifier token", () => {
- const {
- kind: actualKind,
- value: actualValue,
- } = new Token("abc123ABC", 0, 0);
- const expectedKind = Lexicon.Identifier;
- const expectedValue = "abc123ABC";
- assertEquals(actualKind, expectedKind);
- assertEquals(actualValue, expectedValue);
-});
-
-Deno.test("Successfully creates string literal token", () => {
- const { kind: actualKind, value: actualValue } = new Token("\`abc\`", 0, 0);
- const expectedKind = Lexicon.StringLiteral;
- const expectedValue = "abc";
- assertEquals(actualKind, expectedKind);
- assertEquals(actualValue, expectedValue);
-});
-
-Deno.test("An empty raw value has a kind of EOF", () => {
- const { kind: actualKind } = new Token("", 0, 0);
- const expectedKind = Lexicon.EOF;
- assertEquals(actualKind, expectedKind);
-});
-
-Deno.test("Empty input results in empty output", () => {
- const { done } = tokenize("").next();
- assert(done);
-});
-
-Deno.test("Successfully tokenizes given syntax", () => {
- const actual = tokenize(`type Thing {
- foo: number
- bar: string
-}`);
- const expected = [
- T.type_definer(1, 1),
- T.id("Thing", 1, 6),
- T.nester(1, 12),
- T.id("foo", 2, 3),
- T.setter(2, 6),
- T.id("number", 2, 8),
- T.id("bar", 3, 3),
- T.setter(3, 6),
- T.id("string", 3, 8),
- T.denester(4, 1),
- ];
- assertTokensEqual(actual, expected);
-});
-
-Deno.test("Successfully tokenizes nested syntax", () => {
- const actual = tokenize(`type Thing {
- abc: {
- def: {
- ghi: number
- }
- }
-}`);
- const expected = [
- T.type_definer(1, 1),
- T.id("Thing", 1, 6),
- T.nester(1, 12),
- T.id("abc", 2, 3),
- T.setter(2, 6),
- T.nester(2, 8),
- T.id("def", 3, 5),
- T.setter(3, 8),
- T.nester(3, 10),
- T.id("ghi", 4, 7),
- T.setter(4, 10),
- T.id("number", 4, 12),
- T.denester(5, 5),
- T.denester(6, 3),
- T.denester(7, 1),
- ];
- assertTokensEqual(actual, expected);
-});
-
-Deno.test("Omits comments from results", () => {
- const actual = tokenize(`type Thing {
- foo: number; This is a comment
-}`);
- const expected = [
- T.type_definer(1, 1),
- T.id("Thing", 1, 6),
- T.nester(1, 12),
- T.id("foo", 2, 3),
- T.setter(2, 6),
- T.id("number", 2, 8),
- T.denester(3, 1),
- ];
- assertTokensEqual(actual, expected);
-});
-
-Deno.test("Omits valid code comments from results", () => {
- const actual = [...tokenize(`type Thing {
- foo: number; bar: string
-}`)];
- const expected = [
- T.type_definer(1, 1),
- T.id("Thing", 1, 6),
- T.nester(1, 12),
- T.id("foo", 2, 3),
- T.setter(2, 6),
- T.id("number", 2, 8),
- T.denester(3, 1),
- ];
- assertEquals(actual, expected);
-});
-
-Deno.test("Tokenizes a string literal", () => {
- const actual = tokenize(`load './path/to/types' {
- Thing1, Thing2
-}`);
- const expected = [
- T.load_definer(1, 1),
- T.string_literal("'./path/to/types'", 1, 5),
- T.nester(1, 24),
- T.id("Thing1", 2, 3),
- T.separator(2, 9),
- T.id("Thing2", 2, 11),
- T.denester(3, 1),
- ];
- assertTokensEqual(actual, expected);
-});
-
-Deno.test("Tokenizes a required setter", () => {
- const actual = tokenize(`type Thing {
- foobar*: number
-}`);
- const expected = [
- T.type_definer(1, 1),
- T.id("Thing", 1, 6),
- T.nester(1, 12),
- T.id("foobar", 2, 3),
- T.required_setter(2, 9),
- T.id("number", 2, 12),
- T.denester(3, 1),
- ];
- assertTokensEqual(actual, expected);
-});
-
-Deno.test("Tokenizes a method definition", () => {
- const actual = tokenize(`type Thing {
- getSomething:
-}`);
- const expected = [
- T.type_definer(1, 1),
- T.id("Thing", 1, 6),
- T.nester(1, 12),
- T.id("getSomething", 2, 3),
- T.setter(2, 15),
- T.opening_angle(2, 17),
- T.id("ThingInput", 2, 18),
- T.separator(2, 28),
- T.id("ThingOutput", 2, 30),
- T.closing_angle(2, 41),
- T.denester(3, 1),
- ];
- assertTokensEqual(actual, expected);
-});
-
-Deno.test("Tokenizes Pokemon-themed structs", () => {
- const actual = tokenize(`type Pokeball {
- id*: string
- used*: boolean
-
- catch*:
-}
-
-type Pokemon {
- name*: string
- ball: Pokeball
- types*: { type1*: string
- type2: string }
-
- obtain*:
-}`);
- const expected = [
- T.type_definer(1, 1),
- T.id("Pokeball", 1, 6),
- T.nester(1, 15),
- T.id("id", 2, 3),
- T.required_setter(2, 5),
- T.id("string", 2, 8),
- T.id("used", 3, 3),
- T.required_setter(3, 7),
- T.id("boolean", 3, 10),
- T.id("catch", 5, 3),
- T.required_setter(5, 8),
- T.opening_angle(5, 11),
- T.id("string", 5, 12),
- T.separator(5, 18),
- T.id("boolean", 5, 20),
- T.closing_angle(5, 27),
- T.denester(6, 1),
- T.type_definer(8, 1),
- T.id("Pokemon", 8, 6),
- T.nester(8, 14),
- T.id("name", 9, 3),
- T.required_setter(9, 7),
- T.id("string", 9, 10),
- T.id("ball", 10, 3),
- T.setter(10, 7),
- T.id("Pokeball", 10, 9),
- T.id("types", 11, 3),
- T.required_setter(11, 8),
- T.nester(11, 11),
- T.id("type1", 11, 13),
- T.required_setter(11, 18),
- T.id("string", 11, 21),
- T.id("type2", 12, 13),
- T.setter(12, 18),
- T.id("string", 12, 21),
- T.denester(12, 28),
- T.id("obtain", 14, 3),
- T.required_setter(14, 9),
- T.opening_angle(14, 12),
- T.id("Pokeball", 14, 13),
- T.closing_angle(14, 21),
- T.denester(15, 1),
- ];
- assertTokensEqual(actual, expected);
-});
-
-Deno.test("Tokenizes a modified definition", () => {
- const actual = tokenize(`type Thing {
- getSomething: fn %
-}`);
- const expected = [
- T.type_definer(1, 1),
- T.id("Thing", 1, 6),
- T.nester(1, 12),
- T.id("getSomething", 2, 3),
- T.setter(2, 15),
- T.id("fn", 2, 17),
- T.modifier(2, 20),
- T.opening_angle(2, 22),
- T.id("ThingInput", 2, 23),
- T.separator(2, 33),
- T.id("ThingOutput", 2, 35),
- T.closing_angle(2, 46),
- T.denester(3, 1),
- ];
- assertTokensEqual(actual, expected);
-});
diff --git a/lib/tokenize/tokenize.ts b/lib/tokenize/tokenize.ts
deleted file mode 100644
index 73610e2..0000000
--- a/lib/tokenize/tokenize.ts
+++ /dev/null
@@ -1,169 +0,0 @@
-import { LEXICON, Lexicon } from "../consts/lexicon.ts";
-import { validateIdentifier, validateStringLiteral } from "./common.ts";
-import { Token } from "./token.ts";
-
-export function* tokenize(
- content: string,
-): Generator {
- let currentToken = "";
- let commentMode = false;
- let stringLiteralMode:
- | Lexicon.StringMarker
- | Lexicon.StringMarker2
- | Lexicon.StringMarker3
- | null = null;
- let lineCount = 1;
- let columnCount = 0;
- const makeToken = (
- raw: string,
- lineOffset = 0,
- columnOffset = 0,
- ) => {
- const tokenLine = lineCount + lineOffset;
- const tokenColumn =
- (raw.length === 1 ? columnCount : columnCount - raw.length) +
- columnOffset;
- return new Token(raw, tokenLine, tokenColumn);
- };
- const breakLine = (
- breaker: (
- | typeof LEXICON[Lexicon.LineBreaker]
- | typeof LEXICON[Lexicon.LineBreaker2]
- ),
- ) => {
- if (breaker === LEXICON[Lexicon.LineBreaker]) {
- lineCount++;
- columnCount = 0;
- commentMode = false;
- }
- };
- const closeCurrentToken = (
- currentCharacter: string | null = null,
- ): Token | null => {
- if (currentToken.length === 0 || commentMode) return null;
- let nextToken: string | null = currentCharacter;
- switch (currentToken) {
- case LEXICON[Lexicon.Spacer]:
- case LEXICON[Lexicon.LineBreaker]:
- case LEXICON[Lexicon.LineBreaker2]: {
- break;
- }
- case LEXICON[Lexicon.TypeDefiner]:
- case LEXICON[Lexicon.DepoDefiner]:
- case LEXICON[Lexicon.LoadDefiner]:
- case LEXICON[Lexicon.RequiredMarker]:
- case LEXICON[Lexicon.Setter]: {
- nextToken = currentToken;
- break;
- }
- default: {
- if (
- validateIdentifier(currentToken) ||
- validateStringLiteral(currentToken)
- ) {
- nextToken = currentToken;
- } else {
- // TODO: Throw a syntax error here (expected identifier).
- }
- }
- }
- currentToken = "";
- if (nextToken !== null) {
- return makeToken(nextToken);
- }
- return null;
- };
- for (const character of content) {
- columnCount++;
- let nextToken: Token | null;
- if (
- character === LEXICON[Lexicon.LineBreaker] ||
- character === LEXICON[Lexicon.LineBreaker2]
- ) {
- nextToken = closeCurrentToken();
- if (nextToken !== null) yield nextToken;
- breakLine(character);
- continue;
- }
- if (commentMode) continue;
- if (stringLiteralMode !== null) {
- currentToken += character;
- if (character === LEXICON[stringLiteralMode]) {
- nextToken = closeCurrentToken();
- if (nextToken !== null) yield nextToken;
- stringLiteralMode = null;
- }
- continue;
- }
- switch (character) {
- case LEXICON[Lexicon.Commenter]: {
- nextToken = closeCurrentToken();
- if (nextToken !== null) yield nextToken;
- commentMode = true;
- break;
- }
- case LEXICON[Lexicon.StringMarker]: {
- nextToken = closeCurrentToken();
- if (nextToken !== null) yield nextToken;
- stringLiteralMode = Lexicon.StringMarker;
- currentToken += character;
- break;
- }
- case LEXICON[Lexicon.StringMarker2]: {
- nextToken = closeCurrentToken();
- if (nextToken !== null) yield nextToken;
- stringLiteralMode = Lexicon.StringMarker2;
- currentToken += character;
- break;
- }
- case LEXICON[Lexicon.StringMarker3]: {
- nextToken = closeCurrentToken();
- if (nextToken !== null) yield nextToken;
- stringLiteralMode = Lexicon.StringMarker3;
- currentToken += character;
- break;
- }
- case LEXICON[Lexicon.Nester]:
- case LEXICON[Lexicon.Denester]:
- case LEXICON[Lexicon.OpeningAngle]:
- case LEXICON[Lexicon.ClosingAngle]:
- case LEXICON[Lexicon.Modifier]:
- case LEXICON[Lexicon.Separator]: {
- nextToken = closeCurrentToken();
- if (nextToken !== null) yield nextToken;
- yield makeToken(character);
- break;
- }
- case LEXICON[Lexicon.RequiredMarker]: {
- nextToken = closeCurrentToken();
- if (nextToken !== null) yield nextToken;
- currentToken += character;
- break;
- }
- case LEXICON[Lexicon.Setter]: {
- nextToken = closeCurrentToken(character);
- if (nextToken !== null) {
- if (nextToken.is(Lexicon.RequiredMarker)) {
- yield makeToken(LEXICON[Lexicon.RequiredSetter], 0, 1);
- } else {
- yield nextToken;
- yield makeToken(character);
- }
- }
- break;
- }
- case LEXICON[Lexicon.Spacer]:
- case LEXICON[Lexicon.LineBreaker]:
- case LEXICON[Lexicon.LineBreaker2]: {
- nextToken = closeCurrentToken();
- if (nextToken !== null) yield nextToken;
- break;
- }
- default: {
- if (!commentMode) currentToken += character;
- break;
- }
- }
- }
- return makeToken(LEXICON[Lexicon.EOF]);
-}
diff --git a/lib/transpile/cartridge/cartridge.test.ts b/lib/transpile/cartridge/cartridge.test.ts
new file mode 100644
index 0000000..7201dd5
--- /dev/null
+++ b/lib/transpile/cartridge/cartridge.test.ts
@@ -0,0 +1,202 @@
+import { assertEquals } from "../../../deps/std/testing.ts";
+import { Cartridge, CartridgeEvent } from "./cartridge.ts";
+import { CodeBlock } from "../code_block/mod.ts";
+
+Deno.test("event 'file_start' makes a successful dispatch", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(CartridgeEvent.FileStart, (event) => {
+ assertEquals(event.type, CartridgeEvent.FileStart, "matches event name");
+ assertEquals(event.tokens.length, 0, "expects 0 tokens");
+ assertEquals(event.data, null, "always null");
+ return "ABC";
+ });
+ const result = await cartridge.dispatch(CartridgeEvent.FileStart, {
+ type: CartridgeEvent.FileStart,
+ code: new CodeBlock(),
+ data: null,
+ tokens: [],
+ });
+ assertEquals(result, "ABC");
+});
+
+Deno.test("event 'inline_comment' makes a successful dispatch", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(
+ CartridgeEvent.InlineComment,
+ (event) => {
+ assertEquals(
+ event.type,
+ CartridgeEvent.InlineComment,
+ "matches event name",
+ );
+ assertEquals(event.data.comments.length, 1, "expects 1 comment");
+ return event.data.comments.map((comment) => `// ${comment}`).join("\n");
+ },
+ );
+ const expectation = "// ABC";
+ const reality = await cartridge.dispatch(CartridgeEvent.InlineComment, {
+ type: CartridgeEvent.InlineComment,
+ code: new CodeBlock(),
+ data: { comments: ["ABC"] },
+ tokens: [],
+ });
+ assertEquals(expectation, reality);
+});
+
+Deno.test("event 'multiline_comment' makes a successful dispatch", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(
+ CartridgeEvent.MultilineComment,
+ (event) => {
+ assertEquals(
+ event.type,
+ CartridgeEvent.MultilineComment,
+ "matches event name",
+ );
+ assertEquals(event.data.comments.length, 3, "expects 3 comment lines");
+ return event.data.comments.map((comment) => `// ${comment}`).join("\n");
+ },
+ );
+ const expectation = `// ABC
+// DEF
+// GEH`;
+ const reality = await cartridge.dispatch(CartridgeEvent.MultilineComment, {
+ type: CartridgeEvent.MultilineComment,
+ code: new CodeBlock(),
+ data: { comments: ["ABC", "DEF", "GEH"] },
+ tokens: [],
+ });
+ assertEquals(expectation, reality);
+});
+
+Deno.test("event 'load' makes a successful dispatch", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(
+ CartridgeEvent.Load,
+ (event) => {
+ assertEquals(
+ event.type,
+ CartridgeEvent.Load,
+ "matches event name",
+ );
+ assertEquals(event.data.source, "example.fart", "matches source");
+ assertEquals(event.data.dependencies, [
+ "Example1",
+ "Example2",
+ "Example3",
+ ]);
+ return `import { ${
+ event.data.dependencies.join(", ")
+ } } from "${event.data.source}";`;
+ },
+ );
+ const expectation =
+ `import { Example1, Example2, Example3 } from "example.fart";`;
+ const reality = await cartridge.dispatch(CartridgeEvent.Load, {
+ type: CartridgeEvent.Load,
+ code: new CodeBlock(),
+ data: {
+ comments: [],
+ source: "example.fart",
+ dependencies: ["Example1", "Example2", "Example3"],
+ },
+ tokens: [],
+ });
+ assertEquals(expectation, reality);
+});
+
+Deno.test("event 'struct_open' makes a successful dispatch (with comment)", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(
+ CartridgeEvent.StructOpen,
+ (event) => {
+ assertEquals(
+ event.type,
+ CartridgeEvent.StructOpen,
+ "matches event name",
+ );
+ assertEquals(event.data.name, "Example", "matches name");
+ assertEquals(event.data.comments.length, 1, "expects 1 comment");
+ return `// ${event.data.comments[0]}
+interface ${event.data.name} {`;
+ },
+ );
+ const expectation = `// ABC
+interface Example {`;
+ const reality = await cartridge.dispatch(CartridgeEvent.StructOpen, {
+ type: CartridgeEvent.StructOpen,
+ code: new CodeBlock(),
+ data: { name: "Example", comments: ["ABC"] },
+ tokens: [],
+ });
+ assertEquals(expectation, reality);
+});
+
+Deno.test("event 'set_property' makes a successful dispatch", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(
+ CartridgeEvent.SetProperty,
+ (event) => {
+ assertEquals(
+ event.type,
+ CartridgeEvent.SetProperty,
+ "matches event name",
+ );
+ return `${event.data.name}: ${event.data.definition.value};`;
+ },
+ );
+ const expectation = `example: string;`;
+ const reality = await cartridge.dispatch(CartridgeEvent.SetProperty, {
+ type: CartridgeEvent.SetProperty,
+ code: new CodeBlock(),
+ data: { name: "example", definition: { value: "string" }, comments: [] },
+ tokens: [],
+ });
+ assertEquals(expectation, reality);
+});
+
+Deno.test("event 'struct_close' makes a successful dispatch", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(
+ CartridgeEvent.StructClose,
+ (event) => {
+ assertEquals(
+ event.type,
+ CartridgeEvent.StructClose,
+ "matches event name",
+ );
+ return "}";
+ },
+ );
+ const expectation = `}`;
+ const reality = await cartridge.dispatch(CartridgeEvent.StructClose, {
+ type: CartridgeEvent.StructClose,
+ code: new CodeBlock(),
+ data: null,
+ tokens: [],
+ });
+ assertEquals(expectation, reality);
+});
+
+Deno.test("event 'file_end' makes a successful dispatch", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(
+ CartridgeEvent.FileEnd,
+ (event) => {
+ assertEquals(
+ event.type,
+ CartridgeEvent.FileEnd,
+ "matches event name",
+ );
+ return `XYZ`;
+ },
+ );
+ const expectation = `XYZ`;
+ const reality = await cartridge.dispatch(CartridgeEvent.FileEnd, {
+ type: CartridgeEvent.FileEnd,
+ code: new CodeBlock(),
+ data: null,
+ tokens: [],
+ });
+ assertEquals(expectation, reality);
+});
diff --git a/lib/transpile/cartridge/cartridge.ts b/lib/transpile/cartridge/cartridge.ts
new file mode 100644
index 0000000..104dc38
--- /dev/null
+++ b/lib/transpile/cartridge/cartridge.ts
@@ -0,0 +1,197 @@
+import type { Token } from "../tokenize/mod.ts";
+
+export enum CartridgeEvent {
+ FileStart = "file_start",
+ InlineComment = "inline_comment",
+ MultilineComment = "multiline_comment",
+ Load = "load",
+ StructOpen = "struct_open",
+ SetProperty = "set_property",
+ StructClose = "struct_close",
+ FileEnd = "file_end",
+}
+
+export enum ReservedType {
+ Omit = "_",
+ Number = "number",
+ String = "string",
+ Boolean = "boolean",
+ Default = "any",
+}
+
+export enum Modifier {
+ Array = "array", // Modifies anything.
+ Async = "async", // Modifies anything.
+ Dictionary = "dict", // Modifies length-2 tuples.
+ Function = "fn", // Modifies length-2 tuples.
+}
+
+export type CartridgeEventReturnType = (
+ | void
+ | Promise
+ | string
+ | Promise
+ | null
+);
+
+// TODO: Refactor PropertyDefinition interface to be more strict using the
+// list of possible definitions as a guide.
+// Possible Property Definitions
+// - example: string; data = { id: "example", optional: false, value: "string" }
+// - example?: string
+// - example: { abc?: string }; data = { id: "example", optional: false, value: { id: "abc", optional: true, value: "string" } }
+// - example: async % string; Promise
+// - example: fn % async % string; () => Promise
+// - example: fn % (a: string, async % string); (a: string) => Promise
+// - example: fn % (cb: fn % (async % _), number); (cb: () => Promise) => number
+// ; data = { id: "example", value: { mods: [{ name: "fn" }] } }
+
+export interface PropertyDefinition {
+ optional?: boolean;
+ modifier?: string;
+ struct?: Record;
+ tuple?: Array<{
+ label?: string;
+ value: PropertyDefinition;
+ }>;
+ value?: string;
+}
+
+export interface CartridgeEventContext {
+ type: T;
+ code: { append: (code: string) => CartridgeEventReturnType };
+ tokens: Token[];
+ data: T extends CartridgeEvent.InlineComment ? { comments: string[] }
+ : T extends CartridgeEvent.MultilineComment ? { comments: string[] }
+ : T extends CartridgeEvent.Load
+ ? { comments: string[]; dependencies: string[]; source: string }
+ : T extends CartridgeEvent.StructOpen
+ ? { comments: string[]; name?: string } // undefined name implies anonymous struct
+ : T extends CartridgeEvent.SetProperty ? ({
+ comments: string[];
+ name: string;
+ definition: PropertyDefinition;
+ })
+ : null;
+}
+
+/**
+ * If a code generation function returns null, that means that the
+ * target language omits the requested generated code. A null return
+ * value will prevent the requested line from being appended to the result.
+ */
+export type CartridgeHandler = (
+ event: CartridgeEventContext,
+) => CartridgeEventReturnType;
+
+export interface CartridgeHandlerMap {
+ [CartridgeEvent.FileStart]?: CartridgeHandler;
+ [CartridgeEvent.InlineComment]?: CartridgeHandler<
+ CartridgeEvent.InlineComment
+ >;
+ [CartridgeEvent.MultilineComment]?: CartridgeHandler<
+ CartridgeEvent.MultilineComment
+ >;
+ [CartridgeEvent.Load]?: CartridgeHandler;
+ [CartridgeEvent.StructOpen]?: CartridgeHandler;
+ [CartridgeEvent.SetProperty]?: CartridgeHandler;
+ [CartridgeEvent.StructClose]?: CartridgeHandler;
+ [CartridgeEvent.FileEnd]?: CartridgeHandler;
+}
+
+/**
+ * Returns a type composed into plain text (e.g. `number`,
+ * `Array`, `(a: number, b: number) => number`, etc.).
+ */
+export type ModHandler = (...inner: string[]) => string;
+
+/**
+ * The TypeMap API is designed to delegate the generation of
+ * syntax for various programming languages.
+ */
+export interface CartridgeTypeMap {
+ [ReservedType.Omit]?: string;
+ [ReservedType.Number]?: string;
+ [ReservedType.String]?: string;
+ [ReservedType.Boolean]?: string;
+ [ReservedType.Default]?: string;
+
+ // Modifiers are not required for all languages.
+ [Modifier.Array]?: ModHandler;
+ [Modifier.Async]?: ModHandler;
+ [Modifier.Dictionary]?: ModHandler;
+ [Modifier.Function]?: ModHandler;
+}
+
+export class Cartridge {
+ constructor(
+ private typemap: CartridgeTypeMap = {},
+ private handlers: CartridgeHandlerMap = {},
+ ) {}
+
+ public addEventListener(
+ name: CartridgeEvent.FileStart,
+ handler: CartridgeHandler,
+ ): void;
+ public addEventListener(
+ name: CartridgeEvent.InlineComment,
+ handler: CartridgeHandler,
+ ): void;
+ public addEventListener(
+ name: CartridgeEvent.MultilineComment,
+ handler: CartridgeHandler,
+ ): void;
+ public addEventListener(
+ name: CartridgeEvent.Load,
+ handler: CartridgeHandler,
+ ): void;
+ public addEventListener(
+ name: CartridgeEvent.StructOpen,
+ handler: CartridgeHandler,
+ ): void;
+ public addEventListener(
+ name: CartridgeEvent.SetProperty,
+ handler: CartridgeHandler,
+ ): void;
+ public addEventListener(
+ name: CartridgeEvent.StructClose,
+ handler: CartridgeHandler,
+ ): void;
+ public addEventListener(
+ name: CartridgeEvent.FileEnd,
+ handler: CartridgeHandler,
+ ): void;
+ public addEventListener(
+ name: CartridgeEvent,
+ // deno-lint-ignore no-explicit-any
+ handler: any,
+ ) {
+ this.handlers[name] = handler;
+ }
+
+ /** `on` is an alias for `addEventListener` */
+ public on = this.addEventListener.bind(this);
+
+ public removeEventListener(name: CartridgeEvent) {
+ delete this.handlers[name];
+ }
+
+ public async dispatch(
+ name: CartridgeEvent,
+ ctx: CartridgeEventContext,
+ ): Promise {
+ const handleEvent = this.handlers[name] as CartridgeHandler;
+ if (handleEvent === undefined) return null;
+ const result = await handleEvent(ctx);
+ if (typeof result === "string") return result;
+ return null;
+ }
+
+ public getType(type?: string): string | undefined {
+ return this.typemap[type as ReservedType];
+ }
+
+ public getMod(mod?: string): ModHandler | undefined {
+ return this.typemap[mod as Modifier];
+ }
+}
diff --git a/lib/transpile/cartridge/mod.ts b/lib/transpile/cartridge/mod.ts
new file mode 100644
index 0000000..ded76b9
--- /dev/null
+++ b/lib/transpile/cartridge/mod.ts
@@ -0,0 +1,9 @@
+export { Cartridge, CartridgeEvent } from "./cartridge.ts";
+export type {
+ CartridgeEventContext,
+ CartridgeHandler,
+ CartridgeHandlerMap,
+ ModHandler,
+ Modifier,
+ PropertyDefinition,
+} from "./cartridge.ts";
diff --git a/lib/transpile/code_block/code_block.test.ts b/lib/transpile/code_block/code_block.test.ts
new file mode 100644
index 0000000..24c67d2
--- /dev/null
+++ b/lib/transpile/code_block/code_block.test.ts
@@ -0,0 +1,54 @@
+import { assertEquals } from "../../../deps/std/testing.ts";
+import { CodeBlock } from "./code_block.ts";
+
+Deno.test("new code block is empty", () => {
+ assertEquals(new CodeBlock().export(), "");
+});
+
+Deno.test("add 3 lines of code to the block", () => {
+ const block = new CodeBlock();
+ block.append("a");
+ block.append("b");
+ block.append("c");
+ const expectation = "a\nb\nc";
+ const reality = block.export();
+ assertEquals(expectation, reality);
+});
+
+Deno.test("add 3 lines of code to the block (indented)", () => {
+ const block = new CodeBlock();
+ block.append("a", 0);
+ block.append("b", 1);
+ block.append("c", 2);
+ const expectation = "a\n b\n c";
+ const reality = block.export();
+ assertEquals(expectation, reality);
+});
+
+Deno.test("join 3 code blocks", () => {
+ const block1 = new CodeBlock();
+ block1.append("a", 0);
+ block1.append("b", 1);
+ block1.append("c", 2);
+ const block2 = new CodeBlock();
+ block2.append("d", 1);
+ block2.append("e", 0);
+ block2.append("f", 1);
+ const block3 = new CodeBlock();
+ block3.append("g", 2);
+ block3.append("h", 1);
+ block3.append("i", 0);
+ const expectation = `a
+ b
+ c
+
+ d
+e
+ f
+
+ g
+ h
+i`;
+ const reality = CodeBlock.join(block1, block2, block3);
+ assertEquals(expectation, reality);
+});
diff --git a/lib/transpile/code_block/code_block.ts b/lib/transpile/code_block/code_block.ts
new file mode 100644
index 0000000..8aa68c3
--- /dev/null
+++ b/lib/transpile/code_block/code_block.ts
@@ -0,0 +1,58 @@
+import { getIndent, Indent, IndentOption } from "../indent/mod.ts";
+
+export interface LineOfCode {
+ content: string;
+ indentLevel: number;
+}
+
+/**
+ * Represents a block of code.
+ */
+export class CodeBlock {
+ public code: LineOfCode[] = [];
+
+ /**
+ * @param content string that is split up by line break
+ * @param indentLevel depth of nesting; defaults to 0
+ */
+ append(content: string, indentLevel = 0): void {
+ this.code.push(
+ ...content.split("\n").map((line) => ({ content: line, indentLevel })),
+ );
+ }
+
+ export(indent: IndentOption = Indent.Space2): string {
+ return this.code
+ .map(({ content, indentLevel }) =>
+ getIndent(indent, indentLevel) + content
+ )
+ .join("\n");
+ }
+
+ /**
+ * `toString` is an alias for `CodeBlock.export`.
+ */
+ toString = this.export.bind(this);
+
+ static join(
+ indentOrFirstBlock: IndentOption | CodeBlock,
+ ...blocks: CodeBlock[]
+ ): string {
+ const blockPadding = 2; // lines between each code block
+ const blockSeparator = "\n".repeat(blockPadding);
+ const indentSpecified = !(indentOrFirstBlock instanceof CodeBlock);
+ if (!indentSpecified) blocks = [indentOrFirstBlock, ...blocks];
+ return blocks
+ .filter((block) => block !== null)
+ .reduce(
+ (file, block, i) => {
+ const exportedCode = indentSpecified
+ ? block.export(indentOrFirstBlock)
+ : block.export();
+ const isLast = blocks.length - 1 <= i;
+ return file + exportedCode + (isLast ? "" : blockSeparator);
+ },
+ "",
+ );
+ }
+}
diff --git a/lib/transpile/code_block/mod.ts b/lib/transpile/code_block/mod.ts
new file mode 100644
index 0000000..933711f
--- /dev/null
+++ b/lib/transpile/code_block/mod.ts
@@ -0,0 +1 @@
+export { CodeBlock } from "./code_block.ts";
diff --git a/lib/transpile/indent/indent.test.ts b/lib/transpile/indent/indent.test.ts
new file mode 100644
index 0000000..a504510
--- /dev/null
+++ b/lib/transpile/indent/indent.test.ts
@@ -0,0 +1,214 @@
+import { assertEquals } from "../../../deps/std/testing.ts";
+import { INDENT, Indent } from "./indent.ts";
+
+Deno.test("cache of Indent.Tab1 equals 1 tab", () => {
+ assertEquals(INDENT[Indent.Tab1], "\t".repeat(1));
+});
+
+Deno.test("cache of Indent.Tab2 equals 2 tabs", () => {
+ assertEquals(INDENT[Indent.Tab2], "\t".repeat(2));
+});
+
+Deno.test("cache of Indent.Tab3 equals 3 tabs", () => {
+ assertEquals(INDENT[Indent.Tab3], "\t".repeat(3));
+});
+
+Deno.test("cache of Indent.Tab4 equals 4 tabs", () => {
+ assertEquals(INDENT[Indent.Tab4], "\t".repeat(4));
+});
+
+Deno.test("cache of Indent.Tab5 equals 5 tabs", () => {
+ assertEquals(INDENT[Indent.Tab5], "\t".repeat(5));
+});
+
+Deno.test("cache of Indent.Tab6 equals 6 tabs", () => {
+ assertEquals(INDENT[Indent.Tab6], "\t".repeat(6));
+});
+
+Deno.test("cache of Indent.Tab7 equals 7 tabs", () => {
+ assertEquals(INDENT[Indent.Tab7], "\t".repeat(7));
+});
+
+Deno.test("cache of Indent.Tab8 equals 8 tabs", () => {
+ assertEquals(INDENT[Indent.Tab8], "\t".repeat(8));
+});
+
+Deno.test("cache of Indent.Tab9 equals 9 tabs", () => {
+ assertEquals(INDENT[Indent.Tab9], "\t".repeat(9));
+});
+
+Deno.test("cache of Indent.Tab10 equals 10 tabs", () => {
+ assertEquals(INDENT[Indent.Tab10], "\t".repeat(10));
+});
+
+Deno.test("cache of Indent.Tab11 equals 11 tabs", () => {
+ assertEquals(INDENT[Indent.Tab11], "\t".repeat(11));
+});
+
+Deno.test("cache of Indent.Tab12 equals 12 tabs", () => {
+ assertEquals(INDENT[Indent.Tab12], "\t".repeat(12));
+});
+
+Deno.test("cache of Indent.Tab13 equals 13 tabs", () => {
+ assertEquals(INDENT[Indent.Tab13], "\t".repeat(13));
+});
+
+Deno.test("cache of Indent.Tab14 equals 14 tabs", () => {
+ assertEquals(INDENT[Indent.Tab14], "\t".repeat(14));
+});
+
+Deno.test("cache of Indent.Tab15 equals 15 tabs", () => {
+ assertEquals(INDENT[Indent.Tab15], "\t".repeat(15));
+});
+
+Deno.test("cache of Indent.Tab16 equals 16 tabs", () => {
+ assertEquals(INDENT[Indent.Tab16], "\t".repeat(16));
+});
+
+Deno.test("cache of Indent.Space1 equals 1 spaces", () => {
+ assertEquals(INDENT[Indent.Space1], " ".repeat(1));
+});
+
+Deno.test("cache of Indent.Space2 equals 2 spaces", () => {
+ assertEquals(INDENT[Indent.Space2], " ".repeat(2));
+});
+
+Deno.test("cache of Indent.Space3 equals 3 spaces", () => {
+ assertEquals(INDENT[Indent.Space3], " ".repeat(3));
+});
+
+Deno.test("cache of Indent.Space4 equals 4 spaces", () => {
+ assertEquals(INDENT[Indent.Space4], " ".repeat(4));
+});
+
+Deno.test("cache of Indent.Space5 equals 5 spaces", () => {
+ assertEquals(INDENT[Indent.Space5], " ".repeat(5));
+});
+
+Deno.test("cache of Indent.Space6 equals 6 spaces", () => {
+ assertEquals(INDENT[Indent.Space6], " ".repeat(6));
+});
+
+Deno.test("cache of Indent.Space7 equals 7 spaces", () => {
+ assertEquals(INDENT[Indent.Space7], " ".repeat(7));
+});
+
+Deno.test("cache of Indent.Space8 equals 8 spaces", () => {
+ assertEquals(INDENT[Indent.Space8], " ".repeat(8));
+});
+
+Deno.test("cache of Indent.Space9 equals 9 spaces", () => {
+ assertEquals(INDENT[Indent.Space9], " ".repeat(9));
+});
+
+Deno.test("cache of Indent.Space10 equals 10 spaces", () => {
+ assertEquals(INDENT[Indent.Space10], " ".repeat(10));
+});
+
+Deno.test("cache of Indent.Space11 equals 11 spaces", () => {
+ assertEquals(INDENT[Indent.Space11], " ".repeat(11));
+});
+
+Deno.test("cache of Indent.Space12 equals 12 spaces", () => {
+ assertEquals(INDENT[Indent.Space12], " ".repeat(12));
+});
+
+Deno.test("cache of Indent.Space13 equals 13 spaces", () => {
+ assertEquals(INDENT[Indent.Space13], " ".repeat(13));
+});
+
+Deno.test("cache of Indent.Space14 equals 14 spaces", () => {
+ assertEquals(INDENT[Indent.Space14], " ".repeat(14));
+});
+
+Deno.test("cache of Indent.Space15 equals 15 spaces", () => {
+ assertEquals(INDENT[Indent.Space15], " ".repeat(15));
+});
+
+Deno.test("cache of Indent.Space16 equals 16 spaces", () => {
+ assertEquals(INDENT[Indent.Space16], " ".repeat(16));
+});
+
+Deno.test("cache of Indent.Space18 equals 18 spaces", () => {
+ assertEquals(INDENT[Indent.Space18], " ".repeat(18));
+});
+
+Deno.test("cache of Indent.Space20 equals 20 spaces", () => {
+ assertEquals(INDENT[Indent.Space20], " ".repeat(20));
+});
+
+Deno.test("cache of Indent.Space21 equals 21 spaces", () => {
+ assertEquals(INDENT[Indent.Space21], " ".repeat(21));
+});
+
+Deno.test("cache of Indent.Space22 equals 22 spaces", () => {
+ assertEquals(INDENT[Indent.Space22], " ".repeat(22));
+});
+
+Deno.test("cache of Indent.Space24 equals 24 spaces", () => {
+ assertEquals(INDENT[Indent.Space24], " ".repeat(24));
+});
+
+Deno.test("cache of Indent.Space26 equals 26 spaces", () => {
+ assertEquals(INDENT[Indent.Space26], " ".repeat(26));
+});
+
+Deno.test("cache of Indent.Space27 equals 27 spaces", () => {
+ assertEquals(INDENT[Indent.Space27], " ".repeat(27));
+});
+
+Deno.test("cache of Indent.Space28 equals 28 spaces", () => {
+ assertEquals(INDENT[Indent.Space28], " ".repeat(28));
+});
+
+Deno.test("cache of Indent.Space30 equals 30 spaces", () => {
+ assertEquals(INDENT[Indent.Space30], " ".repeat(30));
+});
+
+Deno.test("cache of Indent.Space32 equals 32 spaces", () => {
+ assertEquals(INDENT[Indent.Space32], " ".repeat(32));
+});
+
+Deno.test("cache of Indent.Space33 equals 33 spaces", () => {
+ assertEquals(INDENT[Indent.Space33], " ".repeat(33));
+});
+
+Deno.test("cache of Indent.Space36 equals 36 spaces", () => {
+ assertEquals(INDENT[Indent.Space36], " ".repeat(36));
+});
+
+Deno.test("cache of Indent.Space39 equals 39 spaces", () => {
+ assertEquals(INDENT[Indent.Space39], " ".repeat(39));
+});
+
+Deno.test("cache of Indent.Space40 equals 40 spaces", () => {
+ assertEquals(INDENT[Indent.Space40], " ".repeat(40));
+});
+
+Deno.test("cache of Indent.Space42 equals 42 spaces", () => {
+ assertEquals(INDENT[Indent.Space42], " ".repeat(42));
+});
+
+Deno.test("cache of Indent.Space44 equals 44 spaces", () => {
+ assertEquals(INDENT[Indent.Space44], " ".repeat(44));
+});
+
+Deno.test("cache of Indent.Space45 equals 45 spaces", () => {
+ assertEquals(INDENT[Indent.Space45], " ".repeat(45));
+});
+
+Deno.test("cache of Indent.Space48 equals 48 spaces", () => {
+ assertEquals(INDENT[Indent.Space48], " ".repeat(48));
+});
+
+Deno.test("cache of Indent.Space52 equals 52 spaces", () => {
+ assertEquals(INDENT[Indent.Space52], " ".repeat(52));
+});
+
+Deno.test("cache of Indent.Space56 equals 56 spaces", () => {
+ assertEquals(INDENT[Indent.Space56], " ".repeat(56));
+});
+
+Deno.test("cache of Indent.Space60 equals 60 spaces", () => {
+ assertEquals(INDENT[Indent.Space60], " ".repeat(60));
+});
diff --git a/lib/consts/indent.ts b/lib/transpile/indent/indent.ts
similarity index 96%
rename from lib/consts/indent.ts
rename to lib/transpile/indent/indent.ts
index 9232e9b..42fce34 100644
--- a/lib/consts/indent.ts
+++ b/lib/transpile/indent/indent.ts
@@ -1,127 +1,128 @@
-/**
- * Enum containing all possible combinations of tabbed, single-spaced,
- * double-spaced, triple-spaced, and quadruple-spaced indentations.
- */
-export enum Indent {
- Tab1 = -1,
- Tab2 = -2,
- Tab3 = -3,
- Tab4 = -4,
- Tab5 = -5,
- Tab6 = -6,
- Tab7 = -7,
- Tab8 = -8,
- Tab9 = -9,
- Tab10 = -10,
- Tab11 = -11,
- Tab12 = -12,
- Tab13 = -13,
- Tab14 = -14,
- Tab15 = -15,
- Tab16 = -16,
- Space0 = 0,
- Space1 = 1,
- Space2 = 2,
- Space3 = 3,
- Space4 = 4,
- Space5 = 5,
- Space6 = 6,
- Space7 = 7,
- Space8 = 8,
- Space9 = 9,
- Space10 = 10,
- Space11 = 11,
- Space12 = 12,
- Space13 = 13,
- Space14 = 14,
- Space15 = 15,
- Space16 = 16,
- Space18 = 18,
- Space20 = 20,
- Space21 = 21,
- Space22 = 22,
- Space24 = 24,
- Space26 = 26,
- Space27 = 27,
- Space28 = 28,
- Space30 = 30,
- Space32 = 32,
- Space33 = 33,
- Space36 = 36,
- Space39 = 39,
- Space40 = 40,
- Space42 = 42,
- Space44 = 44,
- Space45 = 45,
- Space48 = 48,
- Space52 = 52,
- Space56 = 56,
- Space60 = 60,
-}
-
-export type IndentOption =
- | Indent.Tab1
- | Indent.Space1
- | Indent.Space2
- | Indent.Space3
- | Indent.Space4;
-
-export const INDENT = {
- [Indent.Tab1]: "\t",
- [Indent.Tab2]: "\t\t",
- [Indent.Tab3]: "\t\t\t",
- [Indent.Tab4]: "\t\t\t\t",
- [Indent.Tab5]: "\t\t\t\t\t",
- [Indent.Tab6]: "\t\t\t\t\t\t",
- [Indent.Tab7]: "\t\t\t\t\t\t\t",
- [Indent.Tab8]: "\t\t\t\t\t\t\t\t",
- [Indent.Tab9]: "\t\t\t\t\t\t\t\t\t",
- [Indent.Tab10]: "\t\t\t\t\t\t\t\t\t\t",
- [Indent.Tab11]: "\t\t\t\t\t\t\t\t\t\t\t",
- [Indent.Tab12]: "\t\t\t\t\t\t\t\t\t\t\t\t",
- [Indent.Tab13]: "\t\t\t\t\t\t\t\t\t\t\t\t\t",
- [Indent.Tab14]: "\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
- [Indent.Tab15]: "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
- [Indent.Tab16]: "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
- [Indent.Space0]: "",
- [Indent.Space1]: " ",
- [Indent.Space2]: " ",
- [Indent.Space3]: " ",
- [Indent.Space4]: " ",
- [Indent.Space5]: " ",
- [Indent.Space6]: " ",
- [Indent.Space7]: " ",
- [Indent.Space8]: " ",
- [Indent.Space9]: " ",
- [Indent.Space10]: " ",
- [Indent.Space11]: " ",
- [Indent.Space12]: " ",
- [Indent.Space13]: " ",
- [Indent.Space14]: " ",
- [Indent.Space15]: " ",
- [Indent.Space16]: " ",
- [Indent.Space18]: " ",
- [Indent.Space20]: " ",
- [Indent.Space21]: " ",
- [Indent.Space22]: " ",
- [Indent.Space24]: " ",
- [Indent.Space26]: " ",
- [Indent.Space27]: " ",
- [Indent.Space28]: " ",
- [Indent.Space30]: " ",
- [Indent.Space32]: " ",
- [Indent.Space33]: " ",
- [Indent.Space36]: " ",
- [Indent.Space39]: " ",
- [Indent.Space40]: " ",
- [Indent.Space42]: " ",
- [Indent.Space44]: " ",
- [Indent.Space45]: " ",
- [Indent.Space48]: " ",
- [Indent.Space52]: " ",
- [Indent.Space56]: " ",
- [Indent.Space60]:
- " ",
-} as const;
-
-export type IndentCacheIndex = keyof typeof INDENT;
+/**
+ * Enum containing all possible combinations of tabbed, single-spaced,
+ * double-spaced, triple-spaced, and quadruple-spaced indentations.
+ */
+export enum Indent {
+ Tab1 = -1,
+ Tab2 = -2,
+ Tab3 = -3,
+ Tab4 = -4,
+ Tab5 = -5,
+ Tab6 = -6,
+ Tab7 = -7,
+ Tab8 = -8,
+ Tab9 = -9,
+ Tab10 = -10,
+ Tab11 = -11,
+ Tab12 = -12,
+ Tab13 = -13,
+ Tab14 = -14,
+ Tab15 = -15,
+ Tab16 = -16,
+ Space0 = 0,
+ Space1 = 1,
+ Space2 = 2,
+ Space3 = 3,
+ Space4 = 4,
+ Space5 = 5,
+ Space6 = 6,
+ Space7 = 7,
+ Space8 = 8,
+ Space9 = 9,
+ Space10 = 10,
+ Space11 = 11,
+ Space12 = 12,
+ Space13 = 13,
+ Space14 = 14,
+ Space15 = 15,
+ Space16 = 16,
+ Space18 = 18,
+ Space20 = 20,
+ Space21 = 21,
+ Space22 = 22,
+ Space24 = 24,
+ Space26 = 26,
+ Space27 = 27,
+ Space28 = 28,
+ Space30 = 30,
+ Space32 = 32,
+ Space33 = 33,
+ Space36 = 36,
+ Space39 = 39,
+ Space40 = 40,
+ Space42 = 42,
+ Space44 = 44,
+ Space45 = 45,
+ Space48 = 48,
+ Space52 = 52,
+ Space56 = 56,
+ Space60 = 60,
+}
+
+export type IndentOption =
+ | Indent.Tab1
+ | Indent.Tab2
+ | Indent.Space1
+ | Indent.Space2
+ | Indent.Space3
+ | Indent.Space4;
+
+export const INDENT = {
+ [Indent.Tab1]: "\t",
+ [Indent.Tab2]: "\t\t",
+ [Indent.Tab3]: "\t\t\t",
+ [Indent.Tab4]: "\t\t\t\t",
+ [Indent.Tab5]: "\t\t\t\t\t",
+ [Indent.Tab6]: "\t\t\t\t\t\t",
+ [Indent.Tab7]: "\t\t\t\t\t\t\t",
+ [Indent.Tab8]: "\t\t\t\t\t\t\t\t",
+ [Indent.Tab9]: "\t\t\t\t\t\t\t\t\t",
+ [Indent.Tab10]: "\t\t\t\t\t\t\t\t\t\t",
+ [Indent.Tab11]: "\t\t\t\t\t\t\t\t\t\t\t",
+ [Indent.Tab12]: "\t\t\t\t\t\t\t\t\t\t\t\t",
+ [Indent.Tab13]: "\t\t\t\t\t\t\t\t\t\t\t\t\t",
+ [Indent.Tab14]: "\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
+ [Indent.Tab15]: "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
+ [Indent.Tab16]: "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t",
+ [Indent.Space0]: "",
+ [Indent.Space1]: " ",
+ [Indent.Space2]: " ",
+ [Indent.Space3]: " ",
+ [Indent.Space4]: " ",
+ [Indent.Space5]: " ",
+ [Indent.Space6]: " ",
+ [Indent.Space7]: " ",
+ [Indent.Space8]: " ",
+ [Indent.Space9]: " ",
+ [Indent.Space10]: " ",
+ [Indent.Space11]: " ",
+ [Indent.Space12]: " ",
+ [Indent.Space13]: " ",
+ [Indent.Space14]: " ",
+ [Indent.Space15]: " ",
+ [Indent.Space16]: " ",
+ [Indent.Space18]: " ",
+ [Indent.Space20]: " ",
+ [Indent.Space21]: " ",
+ [Indent.Space22]: " ",
+ [Indent.Space24]: " ",
+ [Indent.Space26]: " ",
+ [Indent.Space27]: " ",
+ [Indent.Space28]: " ",
+ [Indent.Space30]: " ",
+ [Indent.Space32]: " ",
+ [Indent.Space33]: " ",
+ [Indent.Space36]: " ",
+ [Indent.Space39]: " ",
+ [Indent.Space40]: " ",
+ [Indent.Space42]: " ",
+ [Indent.Space44]: " ",
+ [Indent.Space45]: " ",
+ [Indent.Space48]: " ",
+ [Indent.Space52]: " ",
+ [Indent.Space56]: " ",
+ [Indent.Space60]:
+ " ",
+} as const;
+
+export type IndentCacheIndex = keyof typeof INDENT;
diff --git a/lib/transpile/indent/mod.ts b/lib/transpile/indent/mod.ts
new file mode 100644
index 0000000..6ef33f0
--- /dev/null
+++ b/lib/transpile/indent/mod.ts
@@ -0,0 +1,3 @@
+export { INDENT, Indent } from "./indent.ts";
+export type { IndentCacheIndex, IndentOption } from "./indent.ts";
+export { getCachedIndent, getIndent, getIndentOption } from "./utils.ts";
diff --git a/lib/transpile/indent/utils.test.ts b/lib/transpile/indent/utils.test.ts
new file mode 100644
index 0000000..5b0ff5f
--- /dev/null
+++ b/lib/transpile/indent/utils.test.ts
@@ -0,0 +1,56 @@
+import {
+ bench,
+ BenchmarkTimer,
+ runBenchmarks,
+} from "../../../deps/std/testing.ts";
+import { INDENT, Indent } from "./indent.ts";
+import { getCachedIndent } from "./utils.ts";
+
+const CACHE_BENCH_ID = "CACHE_TEST";
+const COMPUTED_BENCH_ID = "COMPUTED_TEST";
+const BENCH_RUNS = 1e6; // the higher the number, the more accurate the benchmark results
+
+/**
+ * @see https://deno.land/std@0.63.0/testing#benching
+ */
+bench({
+ name: CACHE_BENCH_ID,
+ runs: BENCH_RUNS, // averaging execution time over multiple runs
+ func: (timer: BenchmarkTimer): void => {
+ const store: string[] = [];
+ timer.start();
+ for (let i = 1; i <= 16; i++) store.push(getCachedIndent(Indent.Tab1, i));
+ for (let i = 1; i <= 16; i++) store.push(getCachedIndent(Indent.Tab2, i));
+ for (let i = 1; i <= 16; i++) store.push(getCachedIndent(Indent.Space1, i));
+ for (let i = 1; i <= 16; i++) store.push(getCachedIndent(Indent.Space2, i));
+ for (let i = 1; i <= 16; i++) store.push(getCachedIndent(Indent.Space3, i));
+ for (let i = 1; i <= 16; i++) store.push(getCachedIndent(Indent.Space4, i));
+ timer.stop();
+ },
+});
+
+bench({
+ name: COMPUTED_BENCH_ID,
+ runs: BENCH_RUNS, // averaging execution time over multiple runs
+ func: (timer: BenchmarkTimer): void => {
+ const store: string[] = [];
+ timer.start();
+ for (let i = 1; i <= 16; i++) store.push(INDENT[Indent.Tab1].repeat(i));
+ for (let i = 1; i <= 16; i++) store.push(INDENT[Indent.Tab2].repeat(i));
+ for (let i = 1; i <= 16; i++) store.push(INDENT[Indent.Space1].repeat(i));
+ for (let i = 1; i <= 16; i++) store.push(INDENT[Indent.Space2].repeat(i));
+ for (let i = 1; i <= 16; i++) store.push(INDENT[Indent.Space3].repeat(i));
+ for (let i = 1; i <= 16; i++) store.push(INDENT[Indent.Space4].repeat(i));
+ timer.stop();
+ },
+});
+
+if (import.meta.main) {
+ const { results: [cache, computed] } = await runBenchmarks();
+ const speedBoostPercentage = 100 * computed.measuredRunsAvgMs /
+ cache.measuredRunsAvgMs;
+ const finalMessage = `the cache algorithm is ${
+ speedBoostPercentage.toFixed(2)
+ }% the speed of the \`repeat\` algorithm`;
+ console.log(finalMessage);
+}
diff --git a/lib/transpile/indent/utils.ts b/lib/transpile/indent/utils.ts
new file mode 100644
index 0000000..78a79d6
--- /dev/null
+++ b/lib/transpile/indent/utils.ts
@@ -0,0 +1,89 @@
+import { INDENT, Indent, IndentCacheIndex, IndentOption } from "./indent.ts";
+
+export function getIndentOption(
+ indentOption: IndentOption | string,
+): IndentOption | null {
+ let option: IndentOption | null = null;
+ switch (indentOption) {
+ case Indent.Tab1: {
+ option = Indent.Tab1;
+ break;
+ }
+ case Indent.Tab2: {
+ option = Indent.Tab2;
+ break;
+ }
+ case Indent.Space1: {
+ option = Indent.Space1;
+ break;
+ }
+ case Indent.Space2: {
+ option = Indent.Space2;
+ break;
+ }
+ case Indent.Space3: {
+ option = Indent.Space3;
+ break;
+ }
+ case Indent.Space4: {
+ option = Indent.Space4;
+ break;
+ }
+ }
+ return option;
+}
+
+export function getCachedIndent(
+ indentOption: IndentOption,
+ indentLevel: number,
+): string {
+ const indentCacheIndex = indentOption *
+ Math.floor(indentLevel) as IndentCacheIndex;
+ return INDENT[indentCacheIndex];
+}
+
+/**
+ * This function will either return a cached indent string
+ * from `/lib/constants/indent.ts`.
+ *
+ * ## Usage
+ *
+ * ```ts
+ * // Tab spacing is represented by -1.
+ * getIndent(-1, 1) // "\t"
+ * getIndent(-1, 3) // "\t\t\t"
+ *
+ * // Single, double, triple, and quadruple spaces are
+ * // represented by 1, 2, 3, and 4 respectively.
+ * getIndent(1, 1) // " "
+ * getIndent(1, 3) // " "
+ * getIndent(2, 3) // " "
+ * getIndent(3, 3) // " "
+ * getIndent(4, 3) // " "
+ *
+ * // For non-cached indents, a string may be passed
+ * // instead and will be computed immediately.
+ * getIndent("#", 3) // "###"
+ * getIndent("_", 20) // "____________________"
+ *
+ * // Any invalid indentation options will result in the
+ * // return of an empty string.
+ * getIndent(5, 1) // ""
+ * getIndent(-2, 1) // ""
+ * ```
+ */
+export function getIndent(
+ indentOption: IndentOption | string,
+ indentLevel: number,
+): string {
+ const option = getIndentOption(indentOption);
+ indentLevel = Math.floor(Math.max(0, indentLevel)); // Assert indent level is a positive integer.
+ if (option !== null) {
+ const cachedIndent = getCachedIndent(option, indentLevel);
+ if (cachedIndent !== null) return cachedIndent;
+ }
+ if (typeof indentOption === "string") {
+ return indentOption.repeat(Math.max(indentLevel, 0));
+ }
+ return "";
+}
diff --git a/lib/transpile/mod.ts b/lib/transpile/mod.ts
new file mode 100644
index 0000000..57534ec
--- /dev/null
+++ b/lib/transpile/mod.ts
@@ -0,0 +1 @@
+export { transpile } from "./transpile.ts";
diff --git a/lib/transpile/text_builder/mod.ts b/lib/transpile/text_builder/mod.ts
new file mode 100644
index 0000000..e91e626
--- /dev/null
+++ b/lib/transpile/text_builder/mod.ts
@@ -0,0 +1 @@
+export { TextBuilder } from "./text_builder.ts";
diff --git a/lib/transpile/text_builder/text_builder.test.ts b/lib/transpile/text_builder/text_builder.test.ts
new file mode 100644
index 0000000..b4ce8d6
--- /dev/null
+++ b/lib/transpile/text_builder/text_builder.test.ts
@@ -0,0 +1,99 @@
+import { assertEquals } from "../../../deps/std/testing.ts";
+import { TextBuilder } from "./text_builder.ts";
+import { Cartridge, CartridgeEvent } from "../cartridge/mod.ts";
+import { T } from "../tokenize/mod.ts";
+
+Deno.test("text builder exports an empty string when nothing is appended", () => {
+ const cartridge = new Cartridge();
+ const builder = new TextBuilder(cartridge);
+ assertEquals(builder.export(), "");
+});
+
+Deno.test("text builder appends file_start event", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(CartridgeEvent.FileStart, () => "ABC");
+ const builder = new TextBuilder(cartridge);
+ await builder.append(CartridgeEvent.FileStart);
+ assertEquals(builder.export(), "ABC");
+});
+
+Deno.test("text builder appends inline_comment event", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(CartridgeEvent.InlineComment, () => "ABC");
+ const builder = new TextBuilder(cartridge);
+ await builder.append(
+ CartridgeEvent.InlineComment,
+ [T.comment("; Example", 1, 1)],
+ [],
+ );
+ assertEquals(builder.export(), "ABC");
+});
+
+Deno.test("text builder appends multiline_comment event", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(CartridgeEvent.MultilineComment, () => "ABC");
+ const builder = new TextBuilder(cartridge);
+ await builder.append(
+ CartridgeEvent.MultilineComment,
+ [T.multiline_comment(
+ `/*
+ This is a multiline comment!
+*/`,
+ 1,
+ 1,
+ )],
+ [],
+ );
+ assertEquals(builder.export(), "ABC");
+});
+
+Deno.test("text builder appends load event", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(CartridgeEvent.Load, () => "ABC");
+ const builder = new TextBuilder(cartridge);
+ await builder.append(
+ CartridgeEvent.Load,
+ /* tokens=*/ [],
+ /* comments=*/ [],
+ /* always undefined=*/ undefined,
+ /* src=*/ "",
+ /* dep1=*/ "",
+ );
+ assertEquals(builder.export(), "ABC");
+});
+
+Deno.test("text builder appends struct_open event", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(CartridgeEvent.StructOpen, () => "ABC");
+ const builder = new TextBuilder(cartridge);
+ await builder.append(CartridgeEvent.StructOpen, [], []);
+ assertEquals(builder.export(), "ABC");
+});
+
+Deno.test("text builder appends set_property event", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(CartridgeEvent.SetProperty, () => "ABC");
+ const builder = new TextBuilder(cartridge);
+ await builder.append(CartridgeEvent.SetProperty, [], []);
+ assertEquals(builder.export(), "ABC");
+});
+
+Deno.test("text builder appends struct_close event", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(CartridgeEvent.StructClose, () => "ABC");
+ const builder = new TextBuilder(cartridge);
+ await builder.append(
+ CartridgeEvent.StructClose,
+ [T.denest(1, 1)],
+ [],
+ );
+ assertEquals(builder.export(), "ABC");
+});
+
+Deno.test("text builder appends file_end event", async () => {
+ const cartridge = new Cartridge();
+ cartridge.on(CartridgeEvent.FileEnd, () => "ABC");
+ const builder = new TextBuilder(cartridge);
+ await builder.append(CartridgeEvent.FileEnd, [], []);
+ assertEquals(builder.export(), "ABC");
+});
diff --git a/lib/transpile/text_builder/text_builder.ts b/lib/transpile/text_builder/text_builder.ts
new file mode 100644
index 0000000..e822708
--- /dev/null
+++ b/lib/transpile/text_builder/text_builder.ts
@@ -0,0 +1,194 @@
+import { CodeBlock } from "../code_block/mod.ts";
+import { Indent, IndentOption } from "../indent/mod.ts";
+import {
+ Cartridge,
+ CartridgeEvent,
+ PropertyDefinition,
+} from "../cartridge/mod.ts";
+import type { Token } from "../tokenize/mod.ts";
+import {
+ makeFileEndEventContext,
+ makeFileStartEventContext,
+ makeInlineCommentEventContext,
+ makeLoadEventContext,
+ makeMultilineCommentEventContext,
+ makeSetPropertyEventContext,
+ makeStructCloseEventContext,
+ makeStructOpenEventContext,
+} from "./utils.ts";
+// import { assertKind } from "../utils.ts";
+
+export class TextBuilder {
+ private blocks: CodeBlock[];
+ private currentBlock: CodeBlock;
+ private indentLevel: number;
+
+ constructor(private cartridge: Cartridge) {
+ this.blocks = [];
+ this.currentBlock = new CodeBlock();
+ this.indentLevel = 0;
+ }
+
+ /**
+ * _stash_ away the current code block into the list of
+ * code blocks ready to be exported.
+ */
+ private stash() {
+ if (this.currentBlock.code.length > 0) {
+ this.blocks.push(this.currentBlock);
+ this.currentBlock = new CodeBlock();
+ }
+ }
+
+ public async append(
+ event: CartridgeEvent.FileStart,
+ tokens?: Token[],
+ comments?: Token[],
+ ): Promise;
+ public async append(
+ event: CartridgeEvent.InlineComment,
+ tokens: [Token],
+ comments: Token[],
+ ): Promise;
+ public async append(
+ event: CartridgeEvent.MultilineComment,
+ tokens: [Token],
+ comments: Token[],
+ ): Promise;
+ public async append(
+ event: CartridgeEvent.Load,
+ tokens: Token[],
+ comments: Token[],
+ value: undefined,
+ source: string,
+ ...dependencies: string[]
+ ): Promise;
+ public async append(
+ event: CartridgeEvent.StructOpen,
+ tokens: Token[],
+ comments: Token[],
+ value?: PropertyDefinition,
+ ): Promise;
+ public async append(
+ event: CartridgeEvent.SetProperty,
+ tokens: Token[],
+ comments: Token[],
+ ): Promise;
+ public async append(
+ event: CartridgeEvent.StructClose,
+ tokens: [Token],
+ comments: Token[],
+ ): Promise;
+ public async append(
+ event: CartridgeEvent.FileEnd,
+ tokens?: Token[],
+ comments?: Token[],
+ ): Promise;
+ public async append(
+ event: CartridgeEvent,
+ tokens: Token[] = [],
+ comments: Token[] = [],
+ value?: PropertyDefinition,
+ ...rest: string[]
+ ): Promise {
+ let code: string | void | null;
+
+ switch (event) {
+ case CartridgeEvent.FileStart: {
+ code = await this.cartridge.dispatch(
+ CartridgeEvent.FileStart,
+ makeFileStartEventContext(this.currentBlock, tokens),
+ );
+ this.stash();
+ break;
+ }
+
+ case CartridgeEvent.InlineComment: {
+ code = await this.cartridge.dispatch(
+ CartridgeEvent.InlineComment,
+ makeInlineCommentEventContext(this.currentBlock, tokens),
+ );
+ break;
+ }
+
+ case CartridgeEvent.MultilineComment: {
+ code = await this.cartridge.dispatch(
+ CartridgeEvent.MultilineComment,
+ makeMultilineCommentEventContext(this.currentBlock, tokens),
+ );
+ break;
+ }
+
+ case CartridgeEvent.Load: {
+ const [source, ...dependencies] = rest;
+ code = await this.cartridge.dispatch(
+ CartridgeEvent.Load,
+ makeLoadEventContext(
+ this.currentBlock,
+ tokens,
+ comments,
+ source,
+ dependencies,
+ ),
+ );
+ break;
+ }
+
+ case CartridgeEvent.StructOpen: {
+ code = await this.cartridge.dispatch(
+ CartridgeEvent.StructOpen,
+ makeStructOpenEventContext(
+ this.currentBlock,
+ tokens,
+ comments,
+ value?.value,
+ ),
+ );
+ this.indentLevel++;
+ break;
+ }
+
+ case CartridgeEvent.SetProperty: {
+ const [name] = rest;
+ code = await this.cartridge.dispatch(
+ CartridgeEvent.SetProperty,
+ makeSetPropertyEventContext(
+ this.currentBlock,
+ tokens,
+ comments,
+ name,
+ value as PropertyDefinition,
+ ),
+ );
+ break;
+ }
+
+ case CartridgeEvent.StructClose: {
+ if (--this.indentLevel === 0) this.stash();
+ code = await this.cartridge.dispatch(
+ CartridgeEvent.StructClose,
+ makeStructCloseEventContext(this.currentBlock, tokens),
+ );
+ break;
+ }
+
+ case CartridgeEvent.FileEnd: {
+ this.stash();
+ code = await this.cartridge.dispatch(
+ CartridgeEvent.FileEnd,
+ makeFileEndEventContext(this.currentBlock, tokens),
+ );
+ break;
+ }
+ }
+
+ if (typeof code === "string") {
+ this.currentBlock.append(code);
+ }
+ }
+
+ export(indent: IndentOption = Indent.Space2): string {
+ this.stash();
+ return CodeBlock.join(indent, ...this.blocks);
+ }
+}
diff --git a/lib/transpile/text_builder/utils.test.ts b/lib/transpile/text_builder/utils.test.ts
new file mode 100644
index 0000000..b6f936c
--- /dev/null
+++ b/lib/transpile/text_builder/utils.test.ts
@@ -0,0 +1,193 @@
+import { assertEquals } from "../../../deps/std/testing.ts";
+import { T, Token } from "../tokenize/mod.ts";
+import { CodeBlock } from "../code_block/mod.ts";
+import {
+ cleanComment,
+ makeFileEndEventContext,
+ makeFileStartEventContext,
+ makeInlineCommentEventContext,
+ makeLoadEventContext,
+ makeMultilineCommentEventContext,
+ makeSetPropertyEventContext,
+ makeStructCloseEventContext,
+ makeStructOpenEventContext,
+} from "./utils.ts";
+import { CartridgeEvent } from "../cartridge/mod.ts";
+import type {
+ CartridgeEventContext,
+ PropertyDefinition,
+} from "../cartridge/mod.ts";
+
+Deno.test("cleans inlined comments to extract text content", () => {
+ const expectation = ["example"];
+ const reality = cleanComment(T.comment("; example", 1, 1));
+ assertEquals(expectation, reality);
+});
+
+Deno.test("cleans multi-inlined comments to extract text content", () => {
+ const expectation = ["example"];
+ const reality = cleanComment(T.multiline_comment("/* example */", 1, 1));
+ assertEquals(expectation, reality);
+});
+
+Deno.test("cleans multi-inlined comments to extract text content (omits whitespace on edges)", () => {
+ const expectation = ["example"];
+ const reality = cleanComment(T.multiline_comment(
+ `/*
+ example
+*/`,
+ 1,
+ 1,
+ ));
+ assertEquals(expectation, reality);
+});
+
+Deno.test("successfully makes a 'file_end' event context object", () => {
+ const code = new CodeBlock();
+ const data = null;
+ const tokens: Token[] = [];
+ const expectation: CartridgeEventContext = {
+ type: CartridgeEvent.FileEnd,
+ code,
+ data,
+ tokens,
+ };
+ const reality = makeFileEndEventContext(code, tokens);
+ assertEquals(expectation, reality);
+});
+
+Deno.test("successfully makes a 'file_start' event context object", () => {
+ const code = new CodeBlock();
+ const data = null;
+ const tokens: Token[] = [];
+ const expectation: CartridgeEventContext = {
+ type: CartridgeEvent.FileStart,
+ code,
+ data,
+ tokens,
+ };
+ const reality = makeFileStartEventContext(code, tokens);
+ assertEquals(expectation, reality);
+});
+
+Deno.test("successfully makes a 'inline_comment' event context object", () => {
+ const code = new CodeBlock();
+ const tokens: Token[] = [T.comment("; example", 1, 1)];
+ const expectation: CartridgeEventContext = {
+ type: CartridgeEvent.InlineComment,
+ code,
+ data: {
+ comments: ["example"],
+ },
+ tokens,
+ };
+ const reality = makeInlineCommentEventContext(code, tokens);
+ assertEquals(expectation, reality);
+});
+
+Deno.test("successfully makes a 'multiline_comment' event context object", () => {
+ const code = new CodeBlock();
+ const tokens: Token[] = [T.comment("; example", 1, 1)];
+ const expectation: CartridgeEventContext = {
+ type: CartridgeEvent.MultilineComment,
+ code,
+ tokens,
+ data: {
+ comments: ["example"],
+ },
+ };
+ const reality = makeMultilineCommentEventContext(code, tokens);
+ assertEquals(expectation, reality);
+});
+
+Deno.test("successfully makes a 'load' event context object", () => {
+ const code = new CodeBlock();
+ const source = "./example.fart";
+ const dependencies = ["Example1", "Example2", "Example3"];
+ const tokens: Token[] = [
+ T.load(1, 1),
+ T.text_1(source, 1, 6),
+ T.nest(1, 23),
+ T.id("Example1", 2, 3),
+ T.separator(2, 11),
+ T.id("Example2", 3, 3),
+ T.separator(3, 11),
+ T.id("Example3", 4, 3),
+ T.separator(4, 11),
+ T.denest(5, 1),
+ ];
+ const expectation: CartridgeEventContext = {
+ type: CartridgeEvent.Load,
+ code,
+ tokens,
+ data: { source, dependencies, comments: [] },
+ };
+ const reality = makeLoadEventContext(
+ code,
+ tokens,
+ /*comments=*/ [],
+ source,
+ dependencies,
+ );
+ assertEquals(expectation, reality);
+});
+
+Deno.test("successfully makes a 'set_property' event context object", () => {
+ const code = new CodeBlock();
+ const name = "property";
+ const definition: PropertyDefinition = { value: "number" };
+ const tokens: Token[] = [
+ T.id(name, 2, 3),
+ T.setter_1(2, 11),
+ T.id("number", 2, 13),
+ ];
+ const expectation: CartridgeEventContext = {
+ type: CartridgeEvent.SetProperty,
+ code,
+ tokens,
+ data: { name, definition, comments: [] },
+ };
+ const reality = makeSetPropertyEventContext(
+ code,
+ tokens,
+ /*comments=*/ [],
+ name,
+ definition,
+ );
+ assertEquals(expectation, reality);
+});
+
+Deno.test("successfully makes a 'struct_close' event context object", () => {
+ const code = new CodeBlock();
+ const tokens: Token[] = [];
+ const expectation: CartridgeEventContext = {
+ type: CartridgeEvent.StructClose,
+ code,
+ tokens,
+ data: null,
+ };
+ const reality = makeStructCloseEventContext(code, tokens);
+ assertEquals(expectation, reality);
+});
+
+Deno.test("successfully makes a 'struct_open' event context object", () => {
+ const code = new CodeBlock();
+ const tokens: Token[] = [T.type(1, 1), T.id("Example", 1, 6), T.nest(1, 14)];
+ const name = "Example";
+ const expectation: CartridgeEventContext = {
+ type: CartridgeEvent.StructOpen,
+ code,
+ tokens,
+ data: {
+ name,
+ comments: [],
+ },
+ };
+ const reality = makeStructOpenEventContext(
+ code,
+ tokens,
+ /*comments=*/ [],
+ name,
+ );
+ assertEquals(expectation, reality);
+});
diff --git a/lib/transpile/text_builder/utils.ts b/lib/transpile/text_builder/utils.ts
new file mode 100644
index 0000000..34d1929
--- /dev/null
+++ b/lib/transpile/text_builder/utils.ts
@@ -0,0 +1,126 @@
+import type { CodeBlock } from "../code_block/mod.ts";
+import type { Token } from "../tokenize/mod.ts";
+import { Lexicon } from "../tokenize/mod.ts";
+import {
+ CartridgeEvent,
+ CartridgeEventContext,
+ PropertyDefinition,
+} from "../cartridge/mod.ts";
+
+/**
+ * @param commentToken expects a comment token (Lexicon.InlineComment | Lexicon.MultilineComment)
+ * @returns an array of strings, each string is a line of the comment; intended to be used in
+ * conjunction with the `flatMap` method.
+ * @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/flatMap
+ */
+export const cleanComment = (commentToken: Token): string[] => {
+ const trimmedCommentLines: string[] = [];
+ switch (commentToken.kind) {
+ case Lexicon.InlineComment: {
+ const rawComment = commentToken.value;
+ const lineBreakIndex = rawComment.indexOf("\n");
+ const inlineCommentContent = rawComment
+ .slice(0, lineBreakIndex > -1 ? lineBreakIndex : rawComment.length)
+ .trim();
+ trimmedCommentLines.push(inlineCommentContent);
+ break;
+ }
+ case Lexicon.MultilineComment: {
+ const rawCommentLines = commentToken.value.split("\n");
+ rawCommentLines.forEach((rawCommentLine) => {
+ // TODO: push only if the line is not a blank edge
+ trimmedCommentLines.push(rawCommentLine.trim());
+ });
+ break;
+ }
+ }
+ return trimmedCommentLines;
+};
+
+export const makeFileStartEventContext = (
+ code: CodeBlock,
+ tokens: Token[],
+): CartridgeEventContext => ({
+ type: CartridgeEvent.FileStart,
+ code,
+ tokens,
+ data: null,
+});
+
+export const makeInlineCommentEventContext = (
+ code: CodeBlock,
+ tokens: Token[],
+): CartridgeEventContext => ({
+ type: CartridgeEvent.InlineComment,
+ code,
+ tokens,
+ data: { comments: tokens.flatMap(cleanComment) },
+});
+
+export const makeMultilineCommentEventContext = (
+ code: CodeBlock,
+ tokens: Token[],
+): CartridgeEventContext => ({
+ type: CartridgeEvent.MultilineComment,
+ code,
+ tokens,
+ data: { comments: tokens.flatMap(cleanComment) },
+});
+
+export const makeLoadEventContext = (
+ code: CodeBlock,
+ tokens: Token[],
+ comments: Token[],
+ source: string,
+ dependencies: string[],
+): CartridgeEventContext => ({
+ type: CartridgeEvent.Load,
+ code,
+ tokens,
+ data: { comments: comments.flatMap(cleanComment), source, dependencies },
+});
+
+export const makeStructOpenEventContext = (
+ code: CodeBlock,
+ tokens: Token[],
+ comments: Token[],
+ name?: string,
+): CartridgeEventContext => ({
+ type: CartridgeEvent.StructOpen,
+ code,
+ tokens,
+ data: { name, comments: comments.flatMap(cleanComment) },
+});
+
+export const makeSetPropertyEventContext = (
+ code: CodeBlock,
+ tokens: Token[],
+ comments: Token[],
+ name: string,
+ definition: PropertyDefinition,
+): CartridgeEventContext => ({
+ type: CartridgeEvent.SetProperty,
+ code,
+ tokens,
+ data: { name, comments: comments.flatMap(cleanComment), definition },
+});
+
+export const makeStructCloseEventContext = (
+ code: CodeBlock,
+ tokens: Token[],
+): CartridgeEventContext => ({
+ type: CartridgeEvent.StructClose,
+ code,
+ tokens,
+ data: null,
+});
+
+export const makeFileEndEventContext = (
+ code: CodeBlock,
+ tokens: Token[],
+): CartridgeEventContext => ({
+ type: CartridgeEvent.FileEnd,
+ code,
+ tokens,
+ data: null,
+});
diff --git a/lib/transpile/tokenize/lexicon.test.ts b/lib/transpile/tokenize/lexicon.test.ts
new file mode 100644
index 0000000..58328d2
--- /dev/null
+++ b/lib/transpile/tokenize/lexicon.test.ts
@@ -0,0 +1,12 @@
+import { assertThrows } from "../../../deps/std/testing.ts";
+
+import { LEXICON } from "./lexicon.ts";
+
+Deno.test("LEXICON is a frozen map", () => {
+ // @ts-expect-error: set throws intentionally
+ assertThrows(() => LEXICON.set("a", "b"));
+ // @ts-expect-error: delete throws intentionally
+ assertThrows(() => LEXICON.delete("a"));
+ // @ts-expect-error: clear throws intentionally
+ assertThrows(() => LEXICON.clear("a"));
+});
diff --git a/lib/transpile/tokenize/lexicon.ts b/lib/transpile/tokenize/lexicon.ts
new file mode 100644
index 0000000..573565f
--- /dev/null
+++ b/lib/transpile/tokenize/lexicon.ts
@@ -0,0 +1,59 @@
+export enum Lexicon {
+ Identifier,
+ Load,
+ StructOpener,
+ StructCloser,
+ TupleOpener,
+ TupleCloser,
+ TypeDefiner,
+ PropertyDefiner,
+ PropertyOptionalMarker,
+ PropertyOptionalDefiner,
+ Modifier,
+ TextWrapper,
+ TextLiteral,
+ InlineComment,
+ MultilineComment,
+ Separator,
+ Whitespace,
+ Unknown,
+ EOF,
+}
+
+export type LexiconType = ReadonlyMap;
+
+export const LEXICON: LexiconType = new Map<
+ Lexicon,
+ string | string[] | null
+>([
+ [Lexicon.Identifier, null],
+ [Lexicon.Load, "load"],
+ [Lexicon.StructOpener, "{"],
+ [Lexicon.StructCloser, "}"],
+ [Lexicon.TupleOpener, "("],
+ [Lexicon.TupleCloser, ")"],
+ [Lexicon.TypeDefiner, ["type", "spec"]],
+ [Lexicon.PropertyDefiner, ":"],
+ [Lexicon.PropertyOptionalMarker, "?"],
+ [Lexicon.PropertyOptionalDefiner, "?:"],
+ [Lexicon.Modifier, "%"],
+ [Lexicon.TextWrapper, ['"', "'", "`"]],
+ [Lexicon.TextLiteral, null],
+ [Lexicon.InlineComment, null],
+ [Lexicon.MultilineComment, null],
+ [Lexicon.Separator, ","],
+ [Lexicon.Whitespace, " "],
+ [Lexicon.Unknown, null],
+ [Lexicon.EOF, "\n"],
+]);
+
+// force-freezing LEXICON map into place, courtesy of https://stackoverflow.com/a/35776333
+(LEXICON as Map).set = function (key) {
+ throw new Error(`Can't add property ${key}, map is not extensible`);
+};
+(LEXICON as Map).delete = function (key) {
+ throw new Error(`Can't delete property ${key}, map is frozen`);
+};
+(LEXICON as Map).clear = function () {
+ throw new Error("Can't clear map, map is frozen");
+};
diff --git a/lib/transpile/tokenize/mod.ts b/lib/transpile/tokenize/mod.ts
new file mode 100644
index 0000000..d8959f8
--- /dev/null
+++ b/lib/transpile/tokenize/mod.ts
@@ -0,0 +1,6 @@
+export type { FartTokenGenerator } from "./tokenize.ts";
+export { tokenize } from "./tokenize.ts";
+export { Token } from "./token.ts";
+export { LEXICON, Lexicon } from "./lexicon.ts";
+export { T } from "./t.ts";
+export type { LexiconAliasLayer } from "./t.ts";
diff --git a/lib/transpile/tokenize/t.test.ts b/lib/transpile/tokenize/t.test.ts
new file mode 100644
index 0000000..b89f705
--- /dev/null
+++ b/lib/transpile/tokenize/t.test.ts
@@ -0,0 +1,8 @@
+import { assertEquals } from "../../../deps/std/testing.ts";
+
+import { T } from "./t.ts";
+
+// TODO: Generate tests for each LexiconAliasLayer method.
+Deno.test("TODO", () => {
+ assertEquals(T, T);
+});
diff --git a/lib/transpile/tokenize/t.ts b/lib/transpile/tokenize/t.ts
new file mode 100644
index 0000000..dde33cb
--- /dev/null
+++ b/lib/transpile/tokenize/t.ts
@@ -0,0 +1,92 @@
+// deno-lint-ignore-file camelcase
+// This file simply exports an object which contains lightweight
+// functions for creating Token instances with fewer keystrokes;
+// used primarily for testing-purposes.
+
+import { LEXICON, Lexicon } from "./lexicon.ts";
+import { Token } from "./token.ts";
+
+type SimpleTokenMaker = (line: number, col: number) => Token;
+type SpecialTokenMaker = (raw: string, line: number, col: number) => Token;
+
+export interface LexiconAliasLayer {
+ /** `___` — identifier */
+ id: SpecialTokenMaker;
+ /** `load` — struct opener */
+ load: SimpleTokenMaker;
+ /** `{` — struct opener */
+ nest: SimpleTokenMaker;
+ /** `}` — struct closer */
+ denest: SimpleTokenMaker;
+ /** `(` — tuple opener */
+ open_tuple: SimpleTokenMaker;
+ /** `)` — tuple closer */
+ close_tuple: SimpleTokenMaker;
+ /** `type` — type definer */
+ type: SimpleTokenMaker;
+ /** `spec` — type definer alias */
+ spec: SimpleTokenMaker;
+ /** `?` — optional marker */
+ optional: SimpleTokenMaker;
+ /** `:` — required setter */
+ setter_1: SimpleTokenMaker;
+ /** `?:` — optional setter */
+ setter_2: SimpleTokenMaker;
+ /** `%` — modifier */
+ mod: SimpleTokenMaker;
+ /** `,` — separator */
+ separator: SimpleTokenMaker;
+ /** `"___"` — text literal (do not include quotes) */
+ text_1: SpecialTokenMaker;
+ /** `'___'` — text literal (do not include quotes) */
+ text_2: SpecialTokenMaker;
+ /** ```
+ * `___`
+ * ``` — text literal (do not include quotes) */
+ text_3: SpecialTokenMaker;
+ /** `;___` — comment (include semicolon) */
+ comment: SpecialTokenMaker;
+ /** `/* ___ *\/` — comment (include edges) */
+ multiline_comment: SpecialTokenMaker;
+ /** unknown */
+ unknown: SpecialTokenMaker;
+}
+
+const makeSpecialToken: SpecialTokenMaker = (raw, line, col) =>
+ new Token(raw, line, col);
+
+const LOAD = LEXICON.get(Lexicon.Load) as string;
+const NEST = LEXICON.get(Lexicon.StructOpener) as string;
+const DENEST = LEXICON.get(Lexicon.StructCloser) as string;
+const OPEN_TUPLE = LEXICON.get(Lexicon.TupleOpener) as string;
+const CLOSE_TUPLE = LEXICON.get(Lexicon.TupleCloser) as string;
+const [TYPE, SPEC] = LEXICON.get(Lexicon.TypeDefiner) as [string, string];
+const OPTIONAL = LEXICON.get(Lexicon.PropertyOptionalMarker) as string;
+const SETTER_1 = LEXICON.get(Lexicon.PropertyDefiner) as string;
+const SETTER_2 = LEXICON.get(Lexicon.PropertyOptionalDefiner) as string;
+const MODIFIER = LEXICON.get(Lexicon.Modifier) as string;
+const SEPARATOR = LEXICON.get(Lexicon.Separator) as string;
+
+export const T: LexiconAliasLayer = {
+ id: makeSpecialToken,
+ load: (line, col) => new Token(LOAD, line, col),
+ nest: (line, col) => new Token(NEST, line, col),
+ denest: (line, col) => new Token(DENEST, line, col),
+ open_tuple: (line, col) => new Token(OPEN_TUPLE, line, col),
+ close_tuple: (line, col) => new Token(CLOSE_TUPLE, line, col),
+ type: (line, col) => new Token(TYPE, line, col),
+ spec: (line, col) => new Token(SPEC, line, col),
+ optional: (line, col) => new Token(OPTIONAL, line, col),
+ setter_1: (line, col) => new Token(SETTER_1, line, col),
+ setter_2: (line, col) => new Token(SETTER_2, line, col),
+ mod: (line, col) => new Token(MODIFIER, line, col),
+ separator: (line, col) => new Token(SEPARATOR, line, col),
+ text_1: (raw, line, col) => new Token(`"${raw}"`, line, col),
+ text_2: (raw, line, col) => new Token(`'${raw}'`, line, col),
+ text_3: (raw, line, col) => new Token(`\`${raw}\``, line, col),
+ comment: makeSpecialToken,
+ multiline_comment: makeSpecialToken,
+ unknown: makeSpecialToken,
+} as const;
+
+export default T;
diff --git a/lib/transpile/tokenize/token.test.ts b/lib/transpile/tokenize/token.test.ts
new file mode 100644
index 0000000..32097af
--- /dev/null
+++ b/lib/transpile/tokenize/token.test.ts
@@ -0,0 +1,23 @@
+import { assert, assertEquals } from "../../../deps/std/testing.ts";
+import { Token } from "./token.ts";
+import { Lexicon } from "./lexicon.ts";
+
+Deno.test("creates a token with an empty string without crashing", () => {
+ assertEquals(new Token("").kind, Lexicon.Unknown);
+});
+
+Deno.test("tokens can be accurately classified", () => {
+ assert(new Token("").is(Lexicon.Unknown));
+});
+
+Deno.test("raw strings can be accurately classified as a kind of token", () => {
+ assertEquals(Token.getKindOf(""), Lexicon.Unknown);
+});
+
+Deno.test("inherits the value of a token from its raw value", () => {
+ assertEquals(new Token("type").value, "type");
+});
+
+Deno.test("tokens are stringified based on their computed value property", () => {
+ assertEquals(new Token("type").toString(), "type");
+});
diff --git a/lib/transpile/tokenize/token.ts b/lib/transpile/tokenize/token.ts
new file mode 100644
index 0000000..9c83569
--- /dev/null
+++ b/lib/transpile/tokenize/token.ts
@@ -0,0 +1,57 @@
+import { LEXICON, Lexicon } from "./lexicon.ts";
+import {
+ checkIsIdentifier,
+ checkIsInlineComment,
+ checkIsMultilineComment,
+ checkIsTextLiteral,
+ findInLexicon,
+} from "./utils.ts";
+
+export class Token {
+ public kind: Lexicon | null = null;
+
+ constructor(
+ private raw: string,
+ public line = -1,
+ public column = -1,
+ noCheck = false,
+ ) {
+ this.kind = noCheck ? Lexicon.Identifier : Token.getKindOf(raw);
+ }
+
+ is(kind: Lexicon | null): boolean {
+ return this.kind === kind;
+ }
+
+ toString() {
+ return this.value;
+ }
+
+ get value(): string {
+ switch (this.kind) {
+ case Lexicon.TextLiteral: {
+ // strips expected text markers from beginning and end of input string
+ return this.raw.slice(1, this.raw.length - 1);
+ }
+ case Lexicon.InlineComment: {
+ return this.raw.slice(1).trim();
+ }
+ case Lexicon.MultilineComment: {
+ return this.raw.slice(2, this.raw.length - 2).trim();
+ }
+ default: {
+ return this.raw;
+ }
+ }
+ }
+
+ static getKindOf(raw: string): Lexicon {
+ const matchingKind = findInLexicon(raw, LEXICON);
+ if (matchingKind !== null) return matchingKind;
+ if (checkIsIdentifier(raw)) return Lexicon.Identifier;
+ if (checkIsTextLiteral(raw)) return Lexicon.TextLiteral;
+ if (checkIsInlineComment(raw)) return Lexicon.InlineComment;
+ if (checkIsMultilineComment(raw)) return Lexicon.MultilineComment;
+ return Lexicon.Unknown;
+ }
+}
diff --git a/lib/transpile/tokenize/tokenize.test.ts b/lib/transpile/tokenize/tokenize.test.ts
new file mode 100644
index 0000000..1256762
--- /dev/null
+++ b/lib/transpile/tokenize/tokenize.test.ts
@@ -0,0 +1,333 @@
+import { T } from "./t.ts";
+import { Token } from "./token.ts";
+import { tokenize } from "./tokenize.ts";
+import { assertEquals } from "../../../deps/std/testing.ts";
+
+Deno.test("yields no tokens given an empty string", () => {
+ const input = "";
+ const expectation: Token[] = [];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("yields a single token `type`", () => {
+ const input = "type";
+ const expectation = [T.type(1, 1)];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("yields a `type` definition (one property)", () => {
+ const input = `type Example {
+ property: string
+}`;
+ const expectation = [
+ T.type(1, 1),
+ T.id("Example", 1, 6),
+ T.nest(1, 14),
+ T.id("property", 2, 3),
+ T.setter_1(2, 11),
+ T.id("string", 2, 13),
+ T.denest(3, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("yields a `type` definition (multiple properties)", () => {
+ const input = `type Example {
+ property1: string
+ property2: number
+ property3: boolean
+}`;
+ const expectation = [
+ T.type(1, 1),
+ T.id("Example", 1, 6),
+ T.nest(1, 14),
+ T.id("property1", 2, 3),
+ T.setter_1(2, 12),
+ T.id("string", 2, 14),
+ T.id("property2", 3, 3),
+ T.setter_1(3, 12),
+ T.id("number", 3, 14),
+ T.id("property3", 4, 3),
+ T.setter_1(4, 12),
+ T.id("boolean", 4, 14),
+ T.denest(5, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("yields a `type` definition (with optional setter)", () => {
+ const input = `type Example {
+ optionalProperty?: string
+}`;
+ const expectation = [
+ T.type(1, 1),
+ T.id("Example", 1, 6),
+ T.nest(1, 14),
+ T.id("optionalProperty", 2, 3),
+ T.setter_2(2, 19),
+ T.id("string", 2, 22),
+ T.denest(3, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("yields a `type` definition (with array modifier)", () => {
+ const input = `type Example {
+ property: array % boolean
+}`;
+ const expectation = [
+ T.type(1, 1),
+ T.id("Example", 1, 6),
+ T.nest(1, 14),
+ T.id("property", 2, 3),
+ T.setter_1(2, 11),
+ T.id("array", 2, 13),
+ T.mod(2, 19),
+ T.id("boolean", 2, 21),
+ T.denest(3, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("yields a `type` definition (with array modifier and tuple)", () => {
+ const input = `type Example {
+ property: array % (boolean)
+}`;
+ const expectation = [
+ T.type(1, 1),
+ T.id("Example", 1, 6),
+ T.nest(1, 14),
+ T.id("property", 2, 3),
+ T.setter_1(2, 11),
+ T.id("array", 2, 13),
+ T.mod(2, 19),
+ T.open_tuple(2, 21),
+ T.id("boolean", 2, 22),
+ T.close_tuple(2, 29),
+ T.denest(3, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("yields a method expecting an unnamed boolean and returning void", () => {
+ const input = `type Example {
+ method: fn % (boolean)
+}`;
+ const expectation = [
+ T.type(1, 1),
+ T.id("Example", 1, 6),
+ T.nest(1, 14),
+ T.id("method", 2, 3),
+ T.setter_1(2, 9),
+ T.id("fn", 2, 11),
+ T.mod(2, 14),
+ T.open_tuple(2, 16),
+ T.id("boolean", 2, 17),
+ T.close_tuple(2, 24),
+ T.denest(3, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("`spec` can be used as an alias for keyword `type`", () => {
+ const input = `spec Example {
+ method: fn % (boolean)
+}`;
+ const expectation = [
+ T.spec(1, 1),
+ T.id("Example", 1, 6),
+ T.nest(1, 14),
+ T.id("method", 2, 3),
+ T.setter_1(2, 9),
+ T.id("fn", 2, 11),
+ T.mod(2, 14),
+ T.open_tuple(2, 16),
+ T.id("boolean", 2, 17),
+ T.close_tuple(2, 24),
+ T.denest(3, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("yields a method expecting a named input and returning a boolean", () => {
+ const input = `spec Example {
+ method: fn % (input: string, boolean)
+}`;
+ const expectation = [
+ T.spec(1, 1),
+ T.id("Example", 1, 6),
+ T.nest(1, 14),
+ T.id("method", 2, 3),
+ T.setter_1(2, 9),
+ T.id("fn", 2, 11),
+ T.mod(2, 14),
+ T.open_tuple(2, 16),
+ T.id("input", 2, 17),
+ T.setter_1(2, 22),
+ T.id("string", 2, 24),
+ T.separator(2, 30),
+ T.id("boolean", 2, 32),
+ T.close_tuple(2, 39),
+ T.denest(3, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("yields each comment as a special token", () => {
+ const input = `; this comment is above \`Example\`
+spec Example {
+ ; this comment is above \`method\`
+ method: fn % (boolean)
+}`;
+ const expectation = [
+ T.comment("; this comment is above `Example`", 1, 1),
+ T.spec(2, 1),
+ T.id("Example", 2, 6),
+ T.nest(2, 14),
+ T.comment("; this comment is above `method`", 3, 3),
+ T.id("method", 4, 3),
+ T.setter_1(4, 9),
+ T.id("fn", 4, 11),
+ T.mod(4, 14),
+ T.open_tuple(4, 16),
+ T.id("boolean", 4, 17),
+ T.close_tuple(4, 24),
+ T.denest(5, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("yields each multiline comment as a special token", () => {
+ const input = `/**
+ * this comment is above \`Example\`
+ */
+spec Example {
+ /**
+ * this comment is above \`method\`
+ */
+ method: fn % (boolean)
+}`;
+ const expectation = [
+ T.multiline_comment(
+ `/**
+ * this comment is above \`Example\`
+ */`,
+ 1,
+ 1,
+ ),
+ T.spec(4, 1),
+ T.id("Example", 4, 6),
+ T.nest(4, 14),
+ T.multiline_comment(
+ `/**
+ * this comment is above \`method\`
+ */`,
+ 5,
+ 3,
+ ),
+ T.id("method", 8, 3),
+ T.setter_1(8, 9),
+ T.id("fn", 8, 11),
+ T.mod(8, 14),
+ T.open_tuple(8, 16),
+ T.id("boolean", 8, 17),
+ T.close_tuple(8, 24),
+ T.denest(9, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("tokenizes type definition successfully given minimized input", () => {
+ const input = `spec Example{method:fn%(input:string,boolean)}`;
+ const expectation = [
+ T.spec(1, 1),
+ T.id("Example", 1, 6),
+ T.nest(1, 13),
+ T.id("method", 1, 14),
+ T.setter_1(1, 20),
+ T.id("fn", 1, 21),
+ T.mod(1, 23),
+ T.open_tuple(1, 24),
+ T.id("input", 1, 25),
+ T.setter_1(1, 30),
+ T.id("string", 1, 31),
+ T.separator(1, 37),
+ T.id("boolean", 1, 38),
+ T.close_tuple(1, 45),
+ T.denest(1, 46),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("tokenizes type definition successfully given expanded input", () => {
+ const input = `; here we will define a method with an argument 'input'
+; of type string a and return type of type boolean array
+spec Example
+{
+ method: fn % (
+ input: string, ; here is the argument type
+ array % boolean, ; here is the return type
+ )
+}`;
+ const expectation = [
+ T.comment("; here we will define a method with an argument 'input'", 1, 1),
+ T.comment("; of type string a and return type of type boolean array", 2, 1),
+ T.spec(3, 1),
+ T.id("Example", 3, 6),
+ T.nest(4, 1),
+ T.id("method", 5, 3),
+ T.setter_1(5, 9),
+ T.id("fn", 5, 11),
+ T.mod(5, 14),
+ T.open_tuple(5, 16),
+ T.id("input", 6, 5),
+ T.setter_1(6, 10),
+ T.id("string", 6, 12),
+ T.separator(6, 18),
+ T.comment("; here is the argument type", 6, 22),
+ T.id("array", 7, 5),
+ T.mod(7, 11),
+ T.id("boolean", 7, 13),
+ T.separator(7, 20),
+ T.comment("; here is the return type", 7, 22),
+ T.close_tuple(8, 3),
+ T.denest(9, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
+
+Deno.test("yields tokens of proper `load` statement", () => {
+ const input = `load "./example.fart" {
+ Example1,
+ Example2,
+ Example3,
+}`;
+ const expectation = [
+ T.load(1, 1),
+ T.text_1("./example.fart", 1, 6),
+ T.nest(1, 23),
+ T.id("Example1", 2, 3),
+ T.separator(2, 11),
+ T.id("Example2", 3, 3),
+ T.separator(3, 11),
+ T.id("Example3", 4, 3),
+ T.separator(4, 11),
+ T.denest(5, 1),
+ ];
+ const reality = [...tokenize(input)];
+ assertEquals(expectation, reality);
+});
diff --git a/lib/transpile/tokenize/tokenize.ts b/lib/transpile/tokenize/tokenize.ts
new file mode 100644
index 0000000..7b424ab
--- /dev/null
+++ b/lib/transpile/tokenize/tokenize.ts
@@ -0,0 +1,179 @@
+import { LEXICON, Lexicon, LexiconType } from "./lexicon.ts";
+import { Token } from "./token.ts";
+import { findInLexicon } from "./utils.ts";
+
+/**
+ * Object used to memoize the process of properly tokenizing
+ * Fart syntax.
+ */
+interface TokenizationState {
+ char: null | string;
+ prevChar: null | string;
+ substr: string; // contains the current keyword or identifier being tokenized
+ prevSubstr: string;
+ line: number;
+ column: number;
+ oldColumn: number | null;
+ yieldingChar: boolean; // if true, yields character as token at end of iteration
+ yieldingSubstr: boolean; // if true, yields substring as token at end of iteration
+ yieldingInlineComment: boolean; // if true, yields substring as comment at end of line
+ yieldingMultilineComment: boolean; // if true, yields substring as comment at end of comment (*/)
+ breakingLine: boolean; // if true, updates line and column counts at end of iteration
+}
+
+export type FartTokenGenerator = Generator<
+ Token,
+ undefined,
+ string | undefined
+>;
+
+const INITIAL_TOKENIZATION_STATE: Readonly = Object.freeze({
+ char: null,
+ prevChar: null,
+ substr: "",
+ prevSubstr: "",
+ line: 1,
+ column: 1,
+ oldColumn: null,
+ yieldingChar: false,
+ yieldingSubstr: false,
+ yieldingInlineComment: false,
+ yieldingMultilineComment: false,
+ breakingLine: false,
+});
+
+export function* tokenize(
+ input: string,
+ lex: LexiconType = LEXICON,
+): FartTokenGenerator {
+ const memo = { ...INITIAL_TOKENIZATION_STATE };
+
+ while (input.length > 0) {
+ memo.char = input[0];
+ memo.yieldingChar = INITIAL_TOKENIZATION_STATE.yieldingChar;
+ memo.yieldingSubstr = INITIAL_TOKENIZATION_STATE.yieldingSubstr;
+ memo.breakingLine = INITIAL_TOKENIZATION_STATE.breakingLine;
+
+ // this variable keeps track of whether or not all characters are
+ // included when building the substring or not.
+ const catchAllChars = memo.yieldingInlineComment ||
+ memo.yieldingMultilineComment;
+
+ switch (findInLexicon(memo.char, lex)) {
+ // when a line break occurs, increment the line count, set column back to initial,
+ // and the current substring should become a token.
+ case Lexicon.EOF: {
+ memo.breakingLine = true;
+ memo.yieldingSubstr = true;
+ break;
+ }
+ case Lexicon.StructOpener:
+ case Lexicon.StructCloser:
+ case Lexicon.TupleOpener:
+ case Lexicon.TupleCloser:
+ case Lexicon.PropertyDefiner:
+ case Lexicon.Modifier:
+ case Lexicon.Separator: {
+ memo.yieldingChar = true;
+ memo.yieldingSubstr = true;
+ break;
+ }
+ case Lexicon.PropertyOptionalMarker:
+ case Lexicon.Whitespace: {
+ memo.yieldingSubstr = true;
+ break;
+ }
+ default: {
+ if (!catchAllChars) memo.substr += memo.char;
+ break;
+ }
+ }
+
+ // yield and reset substring if substring is to be yielded
+ if (memo.yieldingSubstr && memo.substr.length > 0 && !catchAllChars) {
+ yield new Token(memo.substr, memo.line, memo.column - memo.substr.length);
+ memo.prevSubstr = memo.substr;
+ memo.substr = INITIAL_TOKENIZATION_STATE.substr;
+ }
+
+ // if the current character is to be yielded, it must be yielded
+ // _after_ the substring
+ if (memo.yieldingChar && memo.char !== null && !catchAllChars) {
+ // if a '?' comes before a ':', then they are combined and yielded as a `?:`
+ if (
+ findInLexicon(memo.prevChar, lex) === Lexicon.PropertyOptionalMarker &&
+ findInLexicon(memo.char, lex) === Lexicon.PropertyDefiner
+ ) {
+ yield new Token(memo.prevChar + memo.char, memo.line, memo.column - 1);
+ } else {
+ yield new Token(memo.char, memo.line, memo.column);
+ }
+ }
+
+ // if a '/*' occurs, then multiline comment mode is enabled
+ if (memo.prevChar === "/" && memo.char === "*") {
+ memo.yieldingMultilineComment = true;
+ memo.oldColumn = memo.column;
+ memo.substr = memo.substr.slice(0, memo.substr.length - 1); // offset substring
+ // if a '*/' occurs, then multiline comment mode is disabled
+ } else if (
+ memo.yieldingMultilineComment && memo.prevChar === "*" &&
+ memo.char === "/"
+ ) {
+ memo.substr += memo.char;
+ const commentLines = memo.substr.split("\n").length - 1;
+ yield new Token(
+ memo.substr,
+ memo.line - commentLines,
+ (memo.oldColumn ?? 2) - 1,
+ );
+ memo.prevSubstr = memo.substr;
+ memo.substr = INITIAL_TOKENIZATION_STATE.substr;
+ memo.oldColumn = null;
+ memo.yieldingMultilineComment = false;
+ // if a ';' occurs, then inline comment mode is enabled
+ } else if (memo.char === ";") {
+ memo.yieldingInlineComment = true;
+ memo.substr = memo.substr.slice(0, memo.substr.length - 1); // offset substring
+ }
+
+ // when a line is broken, set the column count to it's initial
+ // value and increment the line count by one
+ if (memo.breakingLine) {
+ // if a line is broken in inline comment mode, then the comment
+ // is yielded
+ if (memo.yieldingInlineComment) {
+ yield new Token(
+ memo.substr,
+ memo.line,
+ memo.column - memo.substr.length,
+ );
+ memo.prevSubstr = memo.substr;
+ memo.substr = INITIAL_TOKENIZATION_STATE.substr;
+ memo.yieldingInlineComment = false;
+ }
+ memo.column = INITIAL_TOKENIZATION_STATE.column - 1;
+ memo.line++;
+ }
+
+ // if in inline/multiline comment mode or string literal mode, all
+ // characters are unconditionally included into the substring
+ if (memo.yieldingInlineComment || memo.yieldingMultilineComment) {
+ memo.substr += memo.char;
+ }
+
+ // column count is incremented per iteration
+ memo.column++;
+
+ // current character is discarded but set as previous.
+ memo.prevChar = memo.char;
+ input = input.slice(1);
+ }
+
+ // yield substring if one is left unresolved
+ if (memo.substr.length > 0) {
+ yield new Token(memo.substr, memo.line, memo.column - memo.substr.length);
+ }
+
+ return;
+}
diff --git a/lib/transpile/tokenize/utils.test.ts b/lib/transpile/tokenize/utils.test.ts
new file mode 100644
index 0000000..36c8fa7
--- /dev/null
+++ b/lib/transpile/tokenize/utils.test.ts
@@ -0,0 +1,116 @@
+import { assert, assertEquals } from "../../../deps/std/testing.ts";
+import {
+ checkIsIdentifier,
+ checkIsInlineComment,
+ checkIsMultilineComment,
+ checkIsTextLiteral,
+ findInLexicon,
+} from "./utils.ts";
+import { LEXICON, Lexicon } from "./lexicon.ts";
+
+Deno.test("finds correct index in lexicon", () => {
+ const expectation = Lexicon.TypeDefiner;
+ const reality = findInLexicon("type", LEXICON);
+ assertEquals(expectation, reality);
+});
+
+Deno.test("returns null when not found (or null) in lexicon", () => {
+ const expectation = null;
+ const reality = findInLexicon("not_in_LEXICON", LEXICON);
+ assertEquals(expectation, reality);
+ assertEquals(expectation, findInLexicon(null, LEXICON));
+});
+
+Deno.test("correctly checks identifier", () => {
+ assert(checkIsIdentifier("good"));
+});
+
+Deno.test("correctly checks identifier (inner dots are good)", () => {
+ assert(checkIsIdentifier("inner.dots.are.good"));
+});
+
+Deno.test("correctly checks identifier ('_' is good)", () => {
+ assert(checkIsIdentifier("_underscores_are_chill_anywhere_"));
+});
+
+Deno.test("correctly checks identifier ('$' is good)", () => {
+ assert(checkIsIdentifier("$_is_good_anywhere_$_$"));
+});
+
+Deno.test("correctly checks identifier (caps are good)", () => {
+ assert(checkIsIdentifier("CAPS_are_good_ANYWHERE"));
+});
+
+Deno.test("correctly checks identifier (emojis are good)", () => {
+ assert(checkIsIdentifier("CAPS_are_good_ANYWHERE"));
+});
+
+Deno.test("correctly checks identifier (numbers are good)", () => {
+ assert(checkIsIdentifier("nums_are_good1234567890"));
+});
+
+Deno.test("correctly checks identifier (leading numbers are bad)", () => {
+ assert(!checkIsIdentifier("1leading_number_is_bad"));
+});
+
+Deno.test("correctly checks identifier (symbols are bad)", () => {
+ assert(!checkIsIdentifier("symbols_are_bad_Δ"));
+});
+
+Deno.test("correctly checks identifier (some special characters are bad)", () => {
+ assert(!checkIsIdentifier("bad!")); // contains '!'
+ assert(!checkIsIdentifier("bad@")); // contains '@'
+ assert(!checkIsIdentifier("bad#")); // contains '#'
+ assert(!checkIsIdentifier("bad^")); // contains '^'
+ assert(!checkIsIdentifier("bad&")); // contains '&'
+ assert(!checkIsIdentifier("bad*")); // contains '*'
+ assert(!checkIsIdentifier("bad|")); // contains '|'
+ assert(!checkIsIdentifier("bad+")); // contains '+'
+ assert(!checkIsIdentifier("bad=")); // contains '='
+});
+
+Deno.test("correctly checks identifier (outer dots are bad)", () => {
+ assert(!checkIsIdentifier(".outer.dots.are.bad."));
+});
+
+Deno.test("correctly checks identifier (hyphens are bad)", () => {
+ assert(!checkIsIdentifier("hyphens-are-bad"));
+});
+
+Deno.test("tokenizes text literal wrapped in backtick (`)", () => {
+ assert(checkIsTextLiteral("`example`"));
+});
+
+Deno.test("tokenizes text literal wrapped in single-quotes (')", () => {
+ assert(checkIsTextLiteral("'example'"));
+});
+
+Deno.test('tokenizes text literal wrapped in double-quotes (")', () => {
+ assert(checkIsTextLiteral('"example"'));
+});
+
+Deno.test("tokenizes multiline text literal", () => {
+ assert(
+ checkIsTextLiteral(`"example
+example
+example"`),
+ );
+});
+
+Deno.test("correctly checks text literal (non-matching quotes)", () => {
+ assert(!checkIsTextLiteral('"example`'));
+});
+
+Deno.test("correctly checks inline comment", () => {
+ assert(checkIsInlineComment("; example"));
+});
+
+Deno.test("correctly checks inline comment (not a comment)", () => {
+ assert(!checkIsInlineComment("example"));
+});
+
+Deno.test("correctly checks multiline comment", () => {
+ assert(checkIsMultilineComment(`/**
+ * example
+ */`));
+});
diff --git a/lib/transpile/tokenize/utils.ts b/lib/transpile/tokenize/utils.ts
new file mode 100644
index 0000000..4f0c1aa
--- /dev/null
+++ b/lib/transpile/tokenize/utils.ts
@@ -0,0 +1,30 @@
+import { Lexicon, LexiconType } from "./lexicon.ts";
+
+export const findInLexicon = (
+ raw: string | null,
+ lex: LexiconType,
+): Lexicon | null => {
+ if (raw === null) return null;
+ for (const [kind, value] of lex) {
+ if (Array.isArray(value) && value.includes(raw) || (raw === value)) {
+ return kind;
+ }
+ }
+ return null;
+};
+
+export const checkIsIdentifier = (candidate: string): boolean =>
+ /^[a-zA-Z_$][a-zA-Z0-9\._$]*$/.test(candidate);
+
+export const checkIsTextLiteral = (candidate: string): boolean => {
+ return (candidate.startsWith("`") && candidate.endsWith("`")) ||
+ (candidate.startsWith("'") && candidate.endsWith("'")) ||
+ (candidate.startsWith('"') && candidate.endsWith('"'));
+};
+
+export const checkIsInlineComment = (candidate: string): boolean =>
+ candidate.startsWith(";");
+
+export const checkIsMultilineComment = (candidate: string): boolean => {
+ return candidate.startsWith("/*") && candidate.endsWith("*/");
+};
diff --git a/lib/transpile/transpile.test.ts b/lib/transpile/transpile.test.ts
new file mode 100644
index 0000000..58a298f
--- /dev/null
+++ b/lib/transpile/transpile.test.ts
@@ -0,0 +1,129 @@
+import { assertEquals } from "../../deps/std/testing.ts";
+import { TranspilationContext, transpile } from "./transpile.ts";
+import { Cartridge, CartridgeEvent } from "./cartridge/mod.ts";
+import type { CartridgeEventContext } from "./cartridge/mod.ts";
+import { tokenize } from "./tokenize/mod.ts";
+
+Deno.test("create transpilation context without crashing", () => {
+ const iterator = tokenize("");
+ const cartridge = new Cartridge();
+ const ctx = new TranspilationContext(iterator, cartridge);
+ assertEquals(ctx.started, false);
+});
+
+Deno.test("empty input only fires file_start event and then file_end event", async () => {
+ const fakeCart = new Cartridge();
+ fakeCart.on(CartridgeEvent.FileStart, () => "ABC");
+ fakeCart.on(CartridgeEvent.FileEnd, () => "XYZ");
+ const result = await transpile("", fakeCart);
+ assertEquals(result, "ABC\n\nXYZ");
+});
+
+Deno.test("transpiles inline_comment event", async () => {
+ const fakeCart = new Cartridge();
+ fakeCart.on(
+ CartridgeEvent.InlineComment,
+ (event: CartridgeEventContext) => {
+ assertEquals(event.data.comments, ["hello world"]);
+ return "ABC";
+ },
+ );
+ const result = await transpile("; hello world", fakeCart);
+ assertEquals(result, "ABC");
+});
+
+Deno.test("transpiles multiline_comment event", async () => {
+ const fakeCart = new Cartridge();
+ fakeCart.on(
+ CartridgeEvent.MultilineComment,
+ (event: CartridgeEventContext) => {
+ assertEquals(event.data.comments, ["example"]);
+ return "ABC";
+ },
+ );
+ const result = await transpile(
+ `/*
+ example
+*/`,
+ fakeCart,
+ );
+ assertEquals(result, "ABC");
+});
+
+Deno.test("transpiles load event", async () => {
+ const fakeCart = new Cartridge();
+ fakeCart.on(
+ CartridgeEvent.Load,
+ (event: CartridgeEventContext) => {
+ assertEquals(event.data.source, "./example.fart");
+ assertEquals(event.data.dependencies, ["Example1", "Example2"]);
+ return "ABC";
+ },
+ );
+ const result = await transpile(
+ "load './example.fart' ( Example1, Example2 )",
+ fakeCart,
+ );
+ assertEquals(result, "ABC");
+});
+
+Deno.test("transpiles struct_open event", async () => {
+ const fakeCart = new Cartridge();
+ fakeCart.on(
+ CartridgeEvent.StructOpen,
+ (event: CartridgeEventContext) => {
+ assertEquals(event.data.name, "Example");
+ assertEquals(event.data.comments, []);
+ return "ABC";
+ },
+ );
+ const result = await transpile(`type Example {`, fakeCart);
+ assertEquals(result, "ABC");
+});
+
+Deno.test("transpiles set_property event", async () => {
+ const fakeCart = new Cartridge();
+ fakeCart.on(
+ CartridgeEvent.SetProperty,
+ (event: CartridgeEventContext) => {
+ assertEquals(event.data.name, "example");
+ assertEquals(event.data.definition.optional, false);
+ assertEquals(event.data.comments, []);
+ return "ABC";
+ },
+ );
+ const result = await transpile(
+ `type Example { example: string }`,
+ fakeCart,
+ );
+ assertEquals(result, "ABC");
+});
+// StructClose = "struct_close",
+// Deno.test("transpiles struct_open event", async () => {
+// const fakeCart = new Cartridge();
+// fakeCart.on(
+// CartridgeEvent.StructOpen,
+// (event: CartridgeEventContext) => {
+// assertEquals(event.data.name, "Example");
+// assertEquals(event.data.comments, []);
+// return "ABC";
+// },
+// );
+// const result = await transpile(`type Example {`, fakeCart);
+// assertEquals(result, "ABC");
+// });
+// FileEnd = "file_end",
+
+// Deno.test("transpiles struct_open event", async () => {
+// const fakeCart = new Cartridge();
+// fakeCart.on(
+// CartridgeEvent.StructOpen,
+// (event: CartridgeEventContext) => {
+// assertEquals(event.data.name, "Example");
+// assertEquals(event.data.comments, []);
+// return "ABC";
+// },
+// );
+// const result = await transpile(`type Example {`, fakeCart);
+// assertEquals(result, "ABC");
+// });
diff --git a/lib/transpile/transpile.ts b/lib/transpile/transpile.ts
new file mode 100644
index 0000000..71f6d32
--- /dev/null
+++ b/lib/transpile/transpile.ts
@@ -0,0 +1,270 @@
+import { Lexicon, Token, tokenize } from "./tokenize/mod.ts";
+import { Cartridge, CartridgeEvent } from "./cartridge/mod.ts";
+import type { ModHandler, PropertyDefinition } from "./cartridge/mod.ts";
+import { TextBuilder } from "./text_builder/mod.ts";
+import { assertKind } from "./utils.ts";
+import type { FartTokenGenerator } from "./tokenize/mod.ts";
+
+/**
+ * @todo rethink these options since the codeCart determines the
+ * target language. The source language will be one of few supported
+ * langs: Fart and Proto.
+ */
+export interface FartOptions {
+ targetLanguage: string; // "ts" | "go"
+ sourceLanguage: string; // "fart" | "fart-pb" | "fart-go"
+ codeCartridge: Cartridge; // TODO: allow for user to pass Cartridge[]
+ indentation: number;
+ preserveComments: boolean;
+}
+
+export class TranspilationContext {
+ public started = false;
+ public done = false;
+ public prevTokens: Token[] = [];
+ public builder: TextBuilder;
+
+ constructor(
+ public tokenizer: FartTokenGenerator,
+ public cartridge: Cartridge,
+ ) {
+ this.builder = new TextBuilder(cartridge);
+ }
+
+ public nextToken(): Token | undefined {
+ if (this.done) return undefined;
+ this.started = true;
+ const curr = this.tokenizer.next();
+ if (curr.done) this.done = true;
+ return curr.value;
+ }
+
+ public nextMod(currentToken?: Token): PropertyDefinition["value"] {
+ const initialToken = currentToken ?? this.nextToken();
+ const mods: ModHandler[] = [];
+ let mod = this.cartridge.getMod(initialToken?.value);
+ while (mod !== undefined) {
+ mods.push(mod);
+ const modSymbol = assertKind(this.nextToken(), Lexicon.Modifier);
+ const wildToken = this.nextToken();
+
+ switch (wildToken?.kind) {
+ case Lexicon.Identifier: {
+ const result = mods.reduceRight(
+ (result: string, modify: ModHandler) => modify(result),
+ wildToken.value,
+ );
+ return result;
+ }
+
+ case Lexicon.TupleOpener: {
+ const results = this.nextTuple();
+ break;
+ }
+ }
+ mod = this.cartridge.getMod(this.nextToken()?.value);
+ }
+ }
+
+ // public computeMods(
+ // tokens: Token[],
+ // ...mods: ModHandler[]
+ // ): string | undefined {
+ // return mods.reduceRight(
+ // (result: string[], mod: ModHandler) => [mod(...result)],
+ // tokens.map(({ value }) => this.cartridge.getType(value) ?? value),
+ // ).pop();
+ // }
+
+ /**
+ * Consumes the next struct, tuple, or value.
+ */
+ public async nextLiteral(currentToken?: Token): Promise {
+ const def: PropertyDefinition = {};
+ const wildToken = currentToken ?? this.nextToken();
+
+ switch (wildToken?.kind) {
+ case Lexicon.StructOpener: {
+ def.struct = await this.nextStruct();
+ break;
+ }
+
+ case Lexicon.TupleOpener: {
+ def.tuple = await this.nextTuple();
+ break;
+ }
+
+ case Lexicon.Identifier: {
+ // const modifier = await this.nextModifier(wildToken);
+ // if (modifier !== undefined) {
+ // if ident is known modifier, await nextModifier();
+ // }
+
+ def.value = wildToken.value;
+ break;
+ }
+
+ case Lexicon.TextLiteral: {
+ def.value = wildToken.value;
+ break;
+ }
+
+ default: {
+ const errMessage =
+ `Expected struct opener, tuple opener, or type value, but got '${wildToken}'`;
+ throw new Error(errMessage);
+ }
+ }
+
+ return def;
+ }
+
+ public async nextStruct(): Promise {
+ const result: PropertyDefinition["struct"] = {};
+
+ while (true) {
+ // expects identifier or '}'
+ const ident = assertKind(
+ this.nextToken(),
+ Lexicon.Identifier,
+ Lexicon.StructCloser,
+ );
+
+ if (ident.is(Lexicon.StructCloser)) {
+ break;
+ }
+
+ // expects ':' or '?:'
+ const propertyDefiner = assertKind(
+ this.nextToken(),
+ Lexicon.PropertyDefiner,
+ Lexicon.PropertyOptionalDefiner,
+ );
+
+ // 1st token of right-hand expression (e.g. identifier, text literal, or
+ // '{').
+ const wildToken = await this.nextToken();
+
+ switch (wildToken?.kind) {
+ case Lexicon.StructOpener: {
+ await this.builder.append(
+ CartridgeEvent.StructOpen,
+ [ident, propertyDefiner, wildToken],
+ [],
+ );
+ result[ident.value] = await this.nextLiteral(wildToken);
+ break;
+ }
+
+ case Lexicon.Identifier:
+ case Lexicon.TextLiteral: {
+ result[ident.value] = await this.nextLiteral(wildToken);
+ break;
+ }
+
+ default: {
+ throw new Error(
+ `Expected struct opener or type value, but got ${wildToken}`,
+ );
+ }
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * @todo implement
+ */
+ public nextTuple(): PropertyDefinition["tuple"] {
+ return [];
+ }
+}
+
+/**
+ * @todo impl options
+ * - targetLanguage: string; // "ts" | "go"
+ * - sourceLanguage: string; // "fart" | "fart-pb" | "fart-go"
+ * - codeCartridge: Cartridge; // TODO: allow for user to pass Cartridge[]
+ * - indentation: number;
+ * - preserveComments: boolean;
+ */
+export async function transpile(
+ code: string,
+ options: Cartridge | FartOptions,
+): Promise {
+ // const srcLang = (options as FartOptions).sourceLanguage ?? Lang.Fart;
+ // const targetLang = (options as FartOptions).sourceLanguage ?? Lang.TypeScript;
+ // const indentation: number | undefined = (options as FartOptions).indentation;
+ // const preserveComments = (options as FartOptions).preserveComments ?? false;
+ const cartridge = options instanceof Cartridge
+ ? options
+ : options.codeCartridge;
+ const ctx = new TranspilationContext(tokenize(code), cartridge);
+
+ // dispatch the file_start event at the start of the transpilation
+ await ctx.builder.append(CartridgeEvent.FileStart);
+
+ for (let token = ctx.nextToken(); !ctx.done; token = ctx.nextToken()) {
+ switch (token?.kind) {
+ case Lexicon.InlineComment: {
+ const comment = assertKind(token, Lexicon.InlineComment);
+ await ctx.builder.append(
+ CartridgeEvent.InlineComment,
+ [comment],
+ [comment],
+ );
+ break;
+ }
+
+ case Lexicon.MultilineComment: {
+ const comment = assertKind(token, Lexicon.MultilineComment);
+ await ctx.builder.append(
+ CartridgeEvent.MultilineComment,
+ [comment],
+ [comment],
+ );
+ break;
+ }
+
+ case Lexicon.Load: {
+ const loader = assertKind(token, Lexicon.Load);
+ const source = assertKind(ctx.nextToken(), Lexicon.TextLiteral);
+ const opener = assertKind(ctx.nextToken(), Lexicon.TupleOpener);
+ const tuple = await ctx.nextTuple();
+ if (tuple === undefined) throw new Error("Expected tuple");
+ const dependencies = tuple
+ .filter(({ value: def }) => typeof def.value === "string")
+ .map(({ value: def }) => def.value as string);
+ await ctx.builder.append(
+ CartridgeEvent.Load,
+ [loader, source, opener],
+ [],
+ undefined,
+ source.value,
+ ...dependencies,
+ );
+ break;
+ }
+
+ case Lexicon.TypeDefiner: {
+ const definer = assertKind(token, Lexicon.TypeDefiner);
+ const ident = assertKind(ctx.nextToken(), Lexicon.Identifier);
+ const opener = assertKind(ctx.nextToken(), Lexicon.StructOpener);
+ await ctx.builder.append(
+ CartridgeEvent.StructOpen,
+ [definer, ident, opener],
+ /* comments=*/ [],
+ { value: ident.value }, // pass struct name to builder
+ );
+ await ctx.nextStruct();
+ break;
+ }
+ }
+ }
+
+ // dispatch the file_end event at the end of the transpilation
+ await ctx.builder.append(CartridgeEvent.FileEnd);
+
+ return ctx.builder.export();
+}
diff --git a/lib/transpile/utils.ts b/lib/transpile/utils.ts
new file mode 100644
index 0000000..49d0c06
--- /dev/null
+++ b/lib/transpile/utils.ts
@@ -0,0 +1,17 @@
+import { Lexicon, Token } from "./tokenize/mod.ts";
+
+/**
+ * @todo write tests in utils.test.ts
+ */
+export function assertKind(
+ token?: Token,
+ ...validLex: Lexicon[]
+): Token {
+ const isValidLexeme = validLex.includes(token?.kind ?? Lexicon.Unknown);
+ if (token === undefined || !isValidLexeme) {
+ throw new Error(
+ `Expected token kind ${validLex.join(" or ")}, but got ${token}`,
+ );
+ }
+ return token;
+}
diff --git a/std/carts/deno.api.cart.ts b/std/carts/deno.api.cart.ts
deleted file mode 100644
index 3cd0c5a..0000000
--- a/std/carts/deno.api.cart.ts
+++ /dev/null
@@ -1,66 +0,0 @@
-import { Cart, CartEvent, MethodDetails } from "../../lib/gen/cart.ts";
-import { convertFilenameToTargetFilename } from "../common.ts";
-
-const denoServiceCart = new Cart();
-
-denoServiceCart.addEventListener(
- CartEvent.Import,
- (source: string, dependencies: string[]) => {
- if (dependencies.length === 0) return null;
- const targetFilename = convertFilenameToTargetFilename(source);
- const serializedDeps = dependencies.join(", ");
- return `import type { ${serializedDeps} } from "${targetFilename}";`;
- },
-);
-
-denoServiceCart.addEventListener(
- CartEvent.StructOpen,
- (identifier: string) => [
- [`export class ${identifier} {`],
- ["", `private conn: Connection;`],
- ["", `constructor(private server_id: string) {`],
- [
- "",
- "",
- `if (server_id === undefined) throw new Error("Server ID must be passed to access external services").`,
- ],
- ["", `this.conn = new Connection(server_id);`],
- ["", `}`],
- ],
-);
-
-denoServiceCart.addEventListener(
- CartEvent.SetMethod,
- (identifier: string, detail?: MethodDetails) => {
- let signature: string | null = null;
- if (
- detail === undefined ||
- (detail.input === undefined && detail.output === undefined)
- ) {
- signature = `public async ${identifier}(): Promise {`;
- } else if (detail.input !== undefined && detail.output === undefined) {
- signature =
- `public async ${identifier}(input: ${detail.input}): Promise {`;
- } else if (detail.input === undefined && detail.output !== undefined) {
- const outputType = detail.output.startsWith("Promise")
- ? detail.output
- : `Promise<${detail.output}>`;
- signature = `public async ${identifier}(): ${outputType} {`;
- } else if (detail.input !== undefined && detail.output !== undefined) {
- const outputType = detail.output.startsWith("Promise")
- ? detail.output
- : `Promise<${detail.output}>`;
- signature =
- `public async ${identifier}(input: ${detail.input}): ${outputType} {`;
- } else if (signature === null) return null;
- return [
- ["", signature],
- ["", "", `return await this.conn.query("${identifier}");`],
- ["", `}`],
- ];
- },
-);
-
-denoServiceCart.addEventListener(CartEvent.StructClose, () => `}`);
-
-export default denoServiceCart;
diff --git a/std/carts/deno.cart.test.ts b/std/carts/deno.cart.test.ts
deleted file mode 100644
index cda6273..0000000
--- a/std/carts/deno.cart.test.ts
+++ /dev/null
@@ -1,88 +0,0 @@
-import { assertEquals } from "../../deps/std/testing.ts";
-import { CartEventName } from "../../lib/gen/cart.ts";
-import DenoCart from "./deno.cart.ts";
-
-Deno.test("Passes dependencies to import event", async () => {
- const code = await DenoCart.dispatch({
- type: CartEventName.Import,
- source: "./path/to/types.fart",
- dependencies: ["Thing1", "Thing2"],
- });
- const actual = code?.export();
- const expected = `import type { Thing1, Thing2 } from "./path/to/types.ts";`;
- assertEquals(actual, expected);
-});
-
-// Deno.test("Import handler returns null without dependencies", () => {
-// const actual = DenoCart.dispatch(CartEvent.Import, "./types", []);
-// assertEquals(actual, null);
-// });
-
-// Deno.test("Successfully handles struct open event", () => {
-// const actual = DenoCart.dispatch(CartEvent.StructOpen, "Thing");
-// const expected = `export interface Thing {`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Property is assumed to be optional by default", () => {
-// const actual = DenoCart.dispatch(
-// CartEvent.SetProperty,
-// "count",
-// undefined,
-// "number",
-// );
-// const expected = `count?: number;`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Property is required when told", () => {
-// const actual = DenoCart.dispatch(
-// CartEvent.SetProperty,
-// "count",
-// true,
-// "number",
-// );
-// const expected = `count: number;`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Method resolves successfully without input or output", () => {
-// const actual = DenoCart.dispatch(CartEvent.SetMethod, "ping");
-// const expected = `ping: () => void;`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Method resolves successfully without input", () => {
-// const actual = DenoCart.dispatch(
-// CartEvent.SetMethod,
-// "ping",
-// { output: "Pong" },
-// );
-// const expected = `ping?: () => Pong;`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Method resolves successfully without output", () => {
-// const actual = DenoCart.dispatch(
-// CartEvent.SetMethod,
-// "ping",
-// { input: "Ping" },
-// );
-// const expected = `ping?: (input: Ping) => void;`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Method resolves successfully with input and output", () => {
-// const actual = DenoCart.dispatch(CartEvent.SetMethod, "ping", {
-// input: "Ping",
-// output: "Pong",
-// required: true,
-// });
-// const expected = `ping: (input: Ping) => Pong;`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully handles struct close event", () => {
-// const actual = DenoCart.dispatch(CartEvent.StructClose);
-// assertEquals(actual, `}`);
-// });
diff --git a/std/carts/deno.cart.ts b/std/carts/deno.cart.ts
deleted file mode 100644
index c039339..0000000
--- a/std/carts/deno.cart.ts
+++ /dev/null
@@ -1,60 +0,0 @@
-import { Cart, CartEventName } from "../../lib/gen/cart.ts";
-import { convertFilenameToTargetFilename } from "../common.ts";
-
-const deno = new Cart();
-
-const makeComment = () => {
- const year = new Date().getFullYear();
- const comment = `// Generated by Fart © ${year}`;
- return comment;
-};
-
-deno.on(
- CartEventName.FileStart,
- (event) => {
- event.code.append(makeComment());
- },
-);
-
-deno.on(
- CartEventName.Import,
- (event) => {
- if (event.dependencies.length === 0) return event.code.skip();
- const targetFilename = convertFilenameToTargetFilename(event.source);
- const serializedDeps = event.dependencies.join(", ");
- const importation =
- `import type { ${serializedDeps} } from "${targetFilename}";`;
- event.code.append(importation);
- },
-);
-
-deno.on(
- CartEventName.StructOpen,
- (event) => {
- event.code.append(`export interface ${event.identifier} {`);
- },
-);
-
-deno.on(
- CartEventName.SetProperty,
- (event) => {
- const assignment = event.required ? ":" : "?:";
- const property = event.value === undefined
- ? event.identifier + assignment + " {"
- : event.identifier + assignment + " " + event.value + ";";
- event.code.append(property);
- },
-);
-
-deno.on(CartEventName.StructClose, (event) => {
- event.code.append("}");
-});
-
-deno.on(
- CartEventName.FileEnd,
- (event) => {
- event.code.append(makeComment());
- },
-);
-
-export default deno;
diff --git a/std/carts/deno.cli.cart.ts b/std/carts/deno.cli.cart.ts
deleted file mode 100644
index 6955b75..0000000
--- a/std/carts/deno.cli.cart.ts
+++ /dev/null
@@ -1,56 +0,0 @@
-import { Cart, CartEventName } from "../../lib/gen/cart.ts";
-
-const denoCli = new Cart();
-
-const makeComment = () => {
- const year = new Date().getFullYear();
- return `// Generated by Fart © ${year}`;
-};
-
-const subcommands: Set = new Set([]);
-
-denoCli.on(
- CartEventName.FileStart,
- (event) => {
- event.code.append(`${makeComment()}
-import { FartDepartment } from "https://github.com/EthanThatOneKid/fart/raw/main/std/fart/ts/fart_department.ts";
-import { IODepartment } from "https://github.com/EthanThatOneKid/fart/raw/main/std/io/ts/io_department.ts";
-import { parse } from "https://deno.land/std@0.112.0/flags/mod.ts";
-
-const fart = new FartDepartment(new IODepartment());
-`);
- },
-);
-
-denoCli.on(
- CartEventName.SetProperty,
- (event) => {
- if (event.method) {
- subcommands.add(event.identifier);
- }
- },
-);
-
-denoCli.on(
- CartEventName.FileEnd,
- (event) => {
- event.code.append(`const FLAGS = parse(Deno.args);
-const [subcommand] = FLAGS._;
-
-switch (subcommand) {
- ${
- [...subcommands].map((subcommand) =>
- `case "${subcommand}": {
- console.log({FLAGS})
- console.log(await (fart.${subcommand} as any)(FLAGS));
- break;
- }`
- ).join("\n")
- }
-}
-
-${makeComment()}`);
- },
-);
-
-export default denoCli;
diff --git a/std/carts/fake.cart.ts b/std/carts/fake.cart.ts
deleted file mode 100644
index d7737eb..0000000
--- a/std/carts/fake.cart.ts
+++ /dev/null
@@ -1,39 +0,0 @@
-import { Cart, CartEventName } from "../../lib/gen/cart.ts";
-
-const fake = new Cart();
-
-export const RESULTS = {
- [CartEventName.FileStart]: "// Hello World",
- [CartEventName.Import]: "import Something from './path/to/something.ts'",
- [CartEventName.StructOpen]: "interface Thing {",
- [CartEventName.SetProperty]: "foo: string;",
- [CartEventName.StructClose]: "}",
- [CartEventName.FileEnd]: "// End of File",
-};
-
-fake.on(
- CartEventName.FileStart,
- (event) => event.code.append(RESULTS.file_start),
-);
-
-fake.on(
- CartEventName.Import,
- (event) => event.code.append(RESULTS.import),
-);
-
-fake.on(
- CartEventName.StructOpen,
- (event) => event.code.append(RESULTS.struct_open),
-);
-
-fake.on(
- CartEventName.SetProperty,
- (event) => event.code.append(RESULTS.set_property),
-);
-
-fake.on(
- CartEventName.StructClose,
- (event) => event.code.append(RESULTS.struct_close),
-);
-
-export default fake;
diff --git a/std/carts/go.cart.test.ts b/std/carts/go.cart.test.ts
deleted file mode 100644
index db17ed8..0000000
--- a/std/carts/go.cart.test.ts
+++ /dev/null
@@ -1,66 +0,0 @@
-// import { assertEquals } from "../../deps/std/testing.ts";
-// import { CartEvent } from "../../lib/gen/cart.ts";
-// import GoCart from "./go.cart.ts";
-
-// Deno.test("Successfully resolves struct open event", () => {
-// const actual = GoCart.dispatch(CartEvent.StructOpen, "Thing");
-// const expected = `type Thing interface {`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully resolves set property event", () => {
-// const actual = GoCart.dispatch(
-// CartEvent.SetProperty,
-// "foo",
-// false,
-// "float64",
-// );
-// const expected = `foo float64`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully resolves struct close event", () => {
-// const actual = GoCart.dispatch(CartEvent.StructClose);
-// const expected = `}`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully resolves import event", () => {
-// const actual = GoCart.dispatch(
-// CartEvent.Import,
-// "whatever.com/types/date",
-// [],
-// );
-// assertEquals(actual, `import "whatever.com/types/date"`);
-// });
-
-// Deno.test("Successfully resolves set method event without input or output", () => {
-// const actual = GoCart.dispatch(CartEvent.SetMethod, "ping");
-// const expected = `ping()`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully resolves set method event without input", () => {
-// const actual = GoCart.dispatch(CartEvent.SetMethod, "ping", {
-// output: "Pong",
-// });
-// const expected = `ping() (Pong)`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully resolves set method event without output", () => {
-// const actual = GoCart.dispatch(CartEvent.SetMethod, "ping", {
-// input: "Ping",
-// });
-// const expected = `ping(p Ping)`;
-// assertEquals(actual, expected);
-// });
-
-// Deno.test("Successfully resolves full set method event", () => {
-// const actual = GoCart.dispatch(CartEvent.SetMethod, "ping", {
-// input: "Ping",
-// output: "Pong",
-// });
-// const expected = `ping(p Ping) (Pong)`;
-// assertEquals(actual, expected);
-// });
diff --git a/std/carts/go.cart.ts b/std/carts/go.cart.ts
deleted file mode 100644
index f4a5303..0000000
--- a/std/carts/go.cart.ts
+++ /dev/null
@@ -1,49 +0,0 @@
-import { Cart, CartEvent, MethodDetails } from "../../lib/gen/cart.ts";
-import { convertFilenameToTargetFilename } from "../common.ts";
-
-const goCart = new Cart();
-
-goCart.addEventListener(
- CartEvent.Import,
- (source: string, dependencies: string[]) => {
- if (dependencies.length > 0) return null;
- const targetFilename = convertFilenameToTargetFilename(source, "");
- return `import "${targetFilename}"`;
- },
-);
-
-goCart.addEventListener(
- CartEvent.StructOpen,
- (identifier: string, depo = true) =>
- `type ${identifier} ${depo ? "interface" : "struct"} {`,
-);
-
-goCart.addEventListener(
- CartEvent.SetProperty,
- (identifier: string, _, type?: string) => {
- if (type !== undefined) return `${identifier} ${type}`;
- return `${identifier} interface {`;
- },
-);
-
-goCart.addEventListener(
- CartEvent.SetMethod,
- (identifier: string, detail?: MethodDetails) => {
- if (detail !== undefined) {
- if (detail.input !== undefined && detail.output !== undefined) {
- return `${identifier}(p ${detail.input}) (${detail.output})`;
- }
- if (detail.input !== undefined) {
- return `${identifier}(p ${detail.input})`;
- }
- if (detail.output !== undefined) {
- return `${identifier}() (${detail.output})`;
- }
- }
- return `${identifier}()`;
- },
-);
-
-goCart.addEventListener(CartEvent.StructClose, () => `}`);
-
-export default goCart;
diff --git a/std/carts/mod.ts b/std/carts/mod.ts
deleted file mode 100644
index 31edea0..0000000
--- a/std/carts/mod.ts
+++ /dev/null
@@ -1,39 +0,0 @@
-import { Registry } from "../../lib/fart.ts";
-import type { Cart } from "../../lib/gen/cart.ts";
-import type { TypeMap } from "../../lib/gen/typemap.ts";
-import tsTypeMap from "../typemaps/typescript.ts";
-import { Mime } from "../common.ts";
-
-import denoCart from "./deno.cart.ts";
-import denoCliCart from "./deno.cli.cart.ts";
-// import denoApiCart from "./deno.api.cart.ts";
-// import goCart from "./go.cart.ts";
-
-const base = new Registry<{
- cartridge: Cart;
- typemap: TypeMap;
- mimetype: Mime;
-}>("cartridges");
-
-// TODO: Add ts.node.*
-// TODO: Add remaining go.*
-
-base.set("ts", {
- cartridge: denoCart,
- typemap: tsTypeMap,
- mimetype: Mime.TypeScript,
-});
-
-base.set("ts.deno", {
- cartridge: denoCart,
- typemap: tsTypeMap,
- mimetype: Mime.TypeScript,
-});
-
-base.set("ts.deno.cli", {
- cartridge: denoCliCart,
- typemap: tsTypeMap,
- mimetype: Mime.TypeScript,
-});
-
-export default base;
diff --git a/std/common.ts b/std/common.ts
deleted file mode 100644
index 3ac1f12..0000000
--- a/std/common.ts
+++ /dev/null
@@ -1,36 +0,0 @@
-import { format as formatPath, parse as parsePath } from "../deps/std/path.ts";
-
-export const convertFilenameToTargetFilename = (
- filename: string,
- targetExt = ".ts",
-) => {
- const info = parsePath(filename);
- return formatPath({
- root: info.root,
- dir: info.dir,
- ext: targetExt,
- name: info.name,
- }).replace("\\", "/");
-};
-
-export enum Mime {
- TypeScript = "application/typescript; charset=UTF-8",
- JSON = "application/json; charset=UTF-8",
- HTML = "text/html; charset=UTF-8",
- CSS = "text/css; charset=UTF-8",
- Plain = "text/plain; charset=UTF-8",
- PNG = "image/png",
-}
-
-export const getMimeType = (pathname: string): Mime =>
- pathname.endsWith(".css")
- ? Mime.CSS
- : pathname.endsWith(".html")
- ? Mime.HTML
- : pathname.endsWith(".json")
- ? Mime.JSON
- : pathname.endsWith(".ts")
- ? Mime.TypeScript
- : pathname.endsWith(".png")
- ? Mime.PNG
- : Mime.Plain;
diff --git a/std/fart/fart.fart b/std/fart/fart.fart
deleted file mode 100644
index 17082f4..0000000
--- a/std/fart/fart.fart
+++ /dev/null
@@ -1,10 +0,0 @@
-type CompilerSettings {
- filepath*: string
- cartridge_id*: string
-}
-
-type FartDepartment {
- ; args: filepath, code_cart_id
- ; returns: generated_code
- compile*: fn %
-}
\ No newline at end of file
diff --git a/std/fart/ts/fart_department.test.ts b/std/fart/ts/fart_department.test.ts
deleted file mode 100644
index 7baeb18..0000000
--- a/std/fart/ts/fart_department.test.ts
+++ /dev/null
@@ -1,20 +0,0 @@
-import { FartDepartment } from "./fart_department.ts";
-import { FakeIODepartment } from "../../io/ts/fake_io_department.ts";
-import { assertEquals } from "../../../deps/std/testing.ts";
-
-Deno.test("Initializes successfully", () => {
- const fart = new FartDepartment(new FakeIODepartment());
- assertEquals(fart, fart);
-});
-
-Deno.test("Successfully compile Fart file to TypeScript", async () => {
- const fart = new FartDepartment(new FakeIODepartment());
- const result = await fart.compile({
- filepath: "ex/pokemon/mod.fart",
- cartridge_id: "ts",
- });
- assertEquals(
- result,
- "// Generated by Fart © 2021\n// Generated by Fart © 2021\n",
- );
-});
diff --git a/std/fart/ts/fart_department.ts b/std/fart/ts/fart_department.ts
deleted file mode 100644
index d29546d..0000000
--- a/std/fart/ts/fart_department.ts
+++ /dev/null
@@ -1,26 +0,0 @@
-import type {
- CompilerSettings,
- FartDepartment as fFartDepartment,
-} from "https://fart.tools/ts/EthanThatOneKid/fart/main/std/fart/fart.ts";
-import { IODepartment as fIODepartment } from "https://fart.deno.dev/ts/EthanThatOneKid/fart/main/std/io/io.ts";
-
-import { compile } from "../../../lib/fart.ts";
-import cartridges from "../../carts/mod.ts";
-
-export class FartDepartment implements fFartDepartment {
- constructor(private io: fIODepartment) {}
-
- async compile(settings: CompilerSettings): Promise {
- // deno-lint-ignore camelcase
- const { filepath, cartridge_id } = settings;
- const content = await this.io.readFile(filepath);
- const item = cartridges.vendor(cartridge_id);
- if (item === undefined) return "";
- const { cartridge, typemap } = item;
- const code = await compile(content, {
- cartridge,
- typemap,
- });
- return code;
- }
-}
diff --git a/std/gh/gh.fart b/std/gh/gh.fart
deleted file mode 100644
index cd5056a..0000000
--- a/std/gh/gh.fart
+++ /dev/null
@@ -1,28 +0,0 @@
-; All properties listed on:
-; https://docs.github.com/en/rest/reference/users
-type GitHubUser {
- id*: number
- avatar_url*: string
- html_url*: string
- name*: string
- company*: string
- blog*: url % string
- bio*: string
- public_repos*: number
- public_gists*: number
- followers*: number
- following*: number
- twitter_username: string
- created_at: date % string
- updated_at: date % string
-}
-
-type GitHubRepo {
- name*: string
-}
-
-type GitHubDepartment {
- authenticate*: fn %
- getUser*: fn %
- getRepos*: fn %
-}
\ No newline at end of file
diff --git a/std/gh/mod.ts b/std/gh/mod.ts
deleted file mode 100644
index d6cd23e..0000000
--- a/std/gh/mod.ts
+++ /dev/null
@@ -1 +0,0 @@
-export { GitHubDepartment } from "./ts/github_department.ts";
diff --git a/std/gh/ts/github_department.test.ts b/std/gh/ts/github_department.test.ts
deleted file mode 100644
index ca2718b..0000000
--- a/std/gh/ts/github_department.test.ts
+++ /dev/null
@@ -1,7 +0,0 @@
-import { GitHubDepartment } from "./github_department.ts";
-import { assertEquals } from "../../../deps/std/testing.ts";
-
-Deno.test("Initializes successfully", () => {
- const gh = new GitHubDepartment();
- assertEquals(gh.httpClient, undefined);
-});
diff --git a/std/gh/ts/github_department.ts b/std/gh/ts/github_department.ts
deleted file mode 100644
index aa9f6f4..0000000
--- a/std/gh/ts/github_department.ts
+++ /dev/null
@@ -1,35 +0,0 @@
-import type {
- GitHubDepartment as fGitHubDepartment,
- GitHubRepo,
- GitHubUser,
-} from "https://fart.tools/ts/EthanThatOneKid/fart/main/std/gh/gh.ts";
-
-// Full Octokit docs: https://octokit.github.io/rest.js
-import { Octokit } from "../../../deps/third_party/octokit/rest.ts";
-
-/**
- * Library available via:
- * ```ts
- * import { GitHubDepartment } from "https://etok.codes/fart/raw/main/std/gh/mod.ts";
- * ```
- */
-export class GitHubDepartment implements fGitHubDepartment {
- httpClient?: unknown; // typeof Octokit
-
- authenticate(accessToken: string) {
- this.httpClient = new Octokit({
- auth: accessToken,
- baseUrl: "https://api.github.com",
- });
- }
-
- async getUser(username: string): Promise {
- console.log("USER", { username });
- return {} as GitHubUser;
- }
-
- async getRepos(user: GitHubUser): Promise {
- console.log("REPOS", { user });
- return [] as GitHubRepo[];
- }
-}
diff --git a/std/io/io.fart b/std/io/io.fart
deleted file mode 100644
index 0408a89..0000000
--- a/std/io/io.fart
+++ /dev/null
@@ -1,6 +0,0 @@
-type IODepartment {
- fetchIfValidURL*: fn %
- readIfExists*: fn %
- readFile*: fn %
- writeFile*: fn %
-}
\ No newline at end of file
diff --git a/std/io/testdata/pikachu.json b/std/io/testdata/pikachu.json
deleted file mode 100644
index 8ba363f..0000000
--- a/std/io/testdata/pikachu.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "name": "Pikachu",
- "num": 25,
- "img": "http://www.serebii.net/pokemongo/pokemon/025.png",
- "types": { "type1": "Electric" }
-}
diff --git a/std/io/ts/fake_io_department.test.ts b/std/io/ts/fake_io_department.test.ts
deleted file mode 100644
index aa7b08a..0000000
--- a/std/io/ts/fake_io_department.test.ts
+++ /dev/null
@@ -1,30 +0,0 @@
-import { assertEquals } from "../../../deps/std/testing.ts";
-import { FakeIODepartment } from "./fake_io_department.ts";
-
-const assertReturnsVoid = (value: unknown) => assertEquals(value, undefined);
-
-const TESTDATA_PATH = "std/io/testdata/pikachu.json";
-
-Deno.test("Returns the path.", async () => {
- const io = new FakeIODepartment();
- const file = await io.readFile(TESTDATA_PATH);
- assertEquals(file, TESTDATA_PATH);
-});
-
-Deno.test("Returns the path when asked to read if exists.", async () => {
- const io = new FakeIODepartment();
- const file = await io.readIfExists(TESTDATA_PATH);
- assertEquals(file, TESTDATA_PATH);
-});
-
-Deno.test("Returns the path when asked to fetch if valid.", async () => {
- const testdataPath = "https://example.com/pikachu.json";
- const io = new FakeIODepartment();
- const file = await io.fetchIfValidURL(testdataPath);
- assertEquals(file, testdataPath);
-});
-
-Deno.test("Does nothing when asked to write.", async () => {
- const io = new FakeIODepartment();
- assertReturnsVoid(await io.writeFile("/example.txt", "nothingness"));
-});
diff --git a/std/io/ts/fake_io_department.ts b/std/io/ts/fake_io_department.ts
deleted file mode 100644
index 82ea33e..0000000
--- a/std/io/ts/fake_io_department.ts
+++ /dev/null
@@ -1,20 +0,0 @@
-// deno-lint-ignore-file
-import type {
- IODepartment as fIODepartment,
-} from "https://fart.tools/ts/EthanThatOneKid/fart/main/std/io/io.ts";
-
-export class FakeIODepartment implements fIODepartment {
- async readFile(path: string): Promise {
- return path;
- }
-
- async writeFile(path: string, content: string): Promise {}
-
- async fetchIfValidURL(url: string): Promise {
- return url;
- }
-
- async readIfExists(path: string): Promise {
- return path;
- }
-}
diff --git a/std/io/ts/io_department.test.ts b/std/io/ts/io_department.test.ts
deleted file mode 100644
index 24e8325..0000000
--- a/std/io/ts/io_department.test.ts
+++ /dev/null
@@ -1,50 +0,0 @@
-import { assertEquals } from "../../../deps/std/testing.ts";
-import { IODepartment } from "./io_department.ts";
-
-const TESTDATA_PATH = "std/io/testdata/pikachu.json";
-
-Deno.test("Reads text file from file system successfully.", async () => {
- const io = new IODepartment();
- const file = await io.readFile(TESTDATA_PATH);
- const pikachu = JSON.parse(file);
- assertEquals(pikachu.name, "Pikachu");
-});
-
-Deno.test("Reads text file from Internet successfully.", async () => {
- const io = new IODepartment();
- const file = await io.readFile(
- "https://github.com/EthanThatOneKid/fart/raw/main/std/io/testdata/pikachu.json",
- );
- const pikachu = JSON.parse(file);
- assertEquals(pikachu.name, "Pikachu");
-});
-
-Deno.test("Returns empty string when no media can be found.", async () => {
- const io = new IODepartment();
- const file = await io.readFile("/where/in/the/flip/am/i.json");
- assertEquals(file, "");
-});
-
-Deno.test("Reads text file from file system successfully.", async () => {
- const io = new IODepartment();
- const file = await io.readIfExists(TESTDATA_PATH);
- if (file === undefined) return;
- const pikachu = JSON.parse(file);
- assertEquals(pikachu.name, "Pikachu");
-});
-
-Deno.test("Returns undefined when file does not exist.", async () => {
- const io = new IODepartment();
- const file = await io.readIfExists("/where/in/the/flip/am/i.json");
- assertEquals(file, "");
-});
-
-Deno.test("Reads text file from Internet successfully.", async () => {
- const io = new IODepartment();
- const file = await io.fetchIfValidURL(
- "https://github.com/EthanThatOneKid/fart/raw/main/std/io/testdata/pikachu.json",
- );
- if (file === undefined) return;
- const pikachu = JSON.parse(file);
- assertEquals(pikachu.name, "Pikachu");
-});
diff --git a/std/io/ts/io_department.ts b/std/io/ts/io_department.ts
deleted file mode 100644
index ddcc9ea..0000000
--- a/std/io/ts/io_department.ts
+++ /dev/null
@@ -1,34 +0,0 @@
-import type {
- IODepartment as fIODepartment,
-} from "https://fart.tools/ts/EthanThatOneKid/fart/main/std/io/io.ts";
-
-export class IODepartment implements fIODepartment {
- async fetchIfValidURL(path: string): Promise {
- try {
- const url = new URL(path);
- const response = await fetch(url);
- if (response.status === 200) {
- return await response.text();
- }
- // deno-lint-ignore no-empty
- } catch {}
- return "";
- }
-
- async readIfExists(path: string): Promise {
- try {
- return await Deno.readTextFile(path);
- // deno-lint-ignore no-empty
- } catch {}
- return "";
- }
-
- async readFile(path: string): Promise {
- const fetchedFile = await this.fetchIfValidURL(path);
- return fetchedFile.length > 0 ? fetchedFile : await this.readIfExists(path);
- }
-
- async writeFile(path: string, content: string): Promise {
- await Deno.writeTextFile(path, content);
- }
-}
diff --git a/std/legacy_cli/cli.ts b/std/legacy_cli/cli.ts
deleted file mode 100644
index 5c95312..0000000
--- a/std/legacy_cli/cli.ts
+++ /dev/null
@@ -1,30 +0,0 @@
-import { parse as parseFlags } from "../../deps/std/flags.ts";
-import { compile } from "../../lib/fart.ts";
-import { getFlags } from "./common.ts";
-import cartridges from "../carts/mod.ts";
-
-export const cli = async (args: string[]) => {
- const flags = parseFlags(args);
- const { source, target, indentation, output, help } = getFlags(
- flags,
- cartridges,
- );
- if (help) {
- console.log(`Help coming soon!`);
- Deno.exit(0);
- }
- const content = await Deno.readTextFile(source);
- const item = cartridges.vendor(target);
- if (item === undefined) Deno.exit(1);
- const { cartridge, typemap } = item;
- const code = await compile(content, {
- cartridge,
- typemap,
- indentation,
- });
- if (output !== undefined) {
- await Deno.writeTextFile(output, code);
- Deno.exit(1);
- }
- console.log(code);
-};
diff --git a/std/legacy_cli/common.ts b/std/legacy_cli/common.ts
deleted file mode 100644
index 5eb5533..0000000
--- a/std/legacy_cli/common.ts
+++ /dev/null
@@ -1,46 +0,0 @@
-import type { Registry } from "../../lib/fart.ts";
-
-const getTarget = (
- // deno-lint-ignore no-explicit-any
- flags: any,
- // deno-lint-ignore no-explicit-any
- registry: Registry,
-): string | undefined => {
- const target = flags.target ?? flags.lang ?? flags.reg;
- const exists = registry.has(target);
- if (typeof target === "string" && exists) {
- return target;
- }
- return undefined;
-};
-
-// deno-lint-ignore no-explicit-any
-const getSource = (flags: any) => {
- const source = flags._[0];
- if (typeof source === "string") return source;
- console.log("Please include a source file.");
- Deno.exit();
-};
-
-// deno-lint-ignore no-explicit-any
-const getIndentation = (flags: any) => {
- const indent = flags.indent;
- if (typeof indent === "string") return indent;
- return undefined;
-};
-
-// deno-lint-ignore no-explicit-any
-const getOutput = (flags: any) => {
- const output = flags.output ?? flags.out ?? flags.o;
- if (typeof output === "string") return output;
- return undefined;
-};
-
-// deno-lint-ignore no-explicit-any
-export const getFlags = (flags: any, registry: Registry) => ({
- source: getSource(flags),
- target: getTarget(flags, registry),
- indentation: getIndentation(flags),
- help: Boolean(flags.help),
- output: getOutput(flags),
-});
diff --git a/std/legacy_cli/mod.ts b/std/legacy_cli/mod.ts
deleted file mode 100644
index d084de2..0000000
--- a/std/legacy_cli/mod.ts
+++ /dev/null
@@ -1 +0,0 @@
-export { cli } from "./cli.ts";
diff --git a/std/legacy_cli/run.ts b/std/legacy_cli/run.ts
deleted file mode 100644
index 6d15a96..0000000
--- a/std/legacy_cli/run.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-import { cli } from "./mod.ts";
-
-if (import.meta.main) {
- await cli(Deno.args);
-}
diff --git a/std/local_storage/local_storage.fart b/std/local_storage/local_storage.fart
deleted file mode 100644
index c2676e9..0000000
--- a/std/local_storage/local_storage.fart
+++ /dev/null
@@ -1,6 +0,0 @@
-type LocalStorage {
- setItem*: fn %
- getItem*: fn %
- removeItem*: fn %
- clear*: fn % <_, _>
-}
\ No newline at end of file
diff --git a/std/local_storage/ts/local_storage_department.ts b/std/local_storage/ts/local_storage_department.ts
deleted file mode 100644
index 2740b5e..0000000
--- a/std/local_storage/ts/local_storage_department.ts
+++ /dev/null
@@ -1 +0,0 @@
-// Reference: https://deno.land/manual@v1.14.3/runtime/web_storage_api
diff --git a/std/server/README.md b/std/server/README.md
deleted file mode 100644
index f5260ec..0000000
--- a/std/server/README.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# Server Architecture
-
-Please refer to .
diff --git a/std/server/common.ts b/std/server/common.ts
deleted file mode 100644
index 658781c..0000000
--- a/std/server/common.ts
+++ /dev/null
@@ -1,53 +0,0 @@
-import { Mime } from "../common.ts";
-
-export const makeError = (message: string, status = 400): Response =>
- new Response(JSON.stringify({ error: message }), {
- status,
- headers: { "Content-Type": Mime.JSON },
- });
-
-const transformPathname = (pathname: string) =>
- pathname.replace(/\.[^.]*$/, "") + ".fart";
-
-export const fetchGitHubFile = async (pathname: string, transform = false) => {
- try {
- const GITHUB_BASE = "https://raw.githubusercontent.com/";
- const url = GITHUB_BASE +
- (transform ? transformPathname(pathname) : pathname);
- const response = await fetch(url);
- return await response.text();
- } catch {
- return undefined;
- }
-};
-
-// TODO: Allow user-defined `getSize` function to help calculate the size
-// of a given cache item. That way we can start removing some entries once
-// we reach a given threshold.
-export const makeCacheLayer = (
- download: (k: string) => Promise,
- expirationTimeout: number,
-) => {
- const cache = new Map<
- string,
- { value: T; lastUpdated: number }
- >([]);
- return async (key = "", currentTimestamp = new Date().valueOf()) => {
- const cacheEntry = cache.get(key);
- if (
- cacheEntry !== undefined &&
- cacheEntry.lastUpdated + expirationTimeout > currentTimestamp
- ) {
- return cacheEntry.value;
- }
- const updatedEntry = {
- value: await download(key),
- lastUpdated: currentTimestamp,
- };
- cache.set(key, updatedEntry);
- return updatedEntry.value;
- };
-};
-
-export const removeFrontmatter = (md: string) =>
- md.replace(/^\-\-\-[\r\n]*(.*?)[\r\n]*\-\-\-/g, "");
diff --git a/std/server/handle_request.ts b/std/server/handle_request.ts
deleted file mode 100644
index 82987f2..0000000
--- a/std/server/handle_request.ts
+++ /dev/null
@@ -1,22 +0,0 @@
-import * as middleware from "./middleware/mod.ts";
-import { makeError } from "./common.ts";
-
-const getHash = (request: Request): string | undefined => {
- // The hash is not sent to the server, so we emulate the hash with searchParams :/
- const { searchParams } = new URL(request.url);
- const hash = [...searchParams].shift()?.shift();
- if (hash !== undefined) return "#" + hash;
-};
-
-export const handleRequest = async (request: Request): Promise => {
- const { pathname } = new URL(request.url);
- if (pathname === "/") return await middleware.home();
- if (request.method === "GET") {
- const staticFile = await middleware.static(pathname);
- if (staticFile !== undefined) return staticFile;
- const ghDoc = await middleware.gh_docs(pathname, getHash(request));
- if (ghDoc !== undefined) return ghDoc;
- return await middleware.compile(request);
- }
- return makeError("Requested an unknown resource.", 404);
-};
diff --git a/std/server/middleware/compile.ts b/std/server/middleware/compile.ts
deleted file mode 100644
index a7456d6..0000000
--- a/std/server/middleware/compile.ts
+++ /dev/null
@@ -1,31 +0,0 @@
-import { fetchGitHubFile, makeError } from "../common.ts";
-import cartridges from "../../carts/mod.ts";
-import { compile } from "../../../lib/fart.ts";
-
-const processRequest = async (request: Request) => {
- const { pathname } = new URL(request.url);
- const [id, ...path] = pathname.split("/").slice(1);
- const file = await fetchGitHubFile(path.join("/"), /*transform=*/ true);
- const content = file ?? await request.text();
- return { id, content };
-};
-
-export default async (request: Request): Promise => {
- const { id: cartridgeId, content } = await processRequest(request);
- const item = cartridges.vendor(cartridgeId);
- if (cartridgeId === undefined || item === undefined) {
- return makeError(`No such language target ${cartridgeId}.`);
- }
- const { cartridge, typemap, mimetype } = item;
- try {
- return new Response(
- await compile(content, { cartridge, typemap }),
- {
- status: 200,
- headers: { "Content-Type": mimetype },
- }
- );
- } catch (error) {
- return makeError(error.message, 500);
- }
-};
diff --git a/std/server/middleware/gh_docs.ts b/std/server/middleware/gh_docs.ts
deleted file mode 100644
index f5fcb06..0000000
--- a/std/server/middleware/gh_docs.ts
+++ /dev/null
@@ -1,75 +0,0 @@
-import {
- dirname,
- fromFileUrl,
- join,
- normalize,
-} from "../../../deps/std/path.ts";
-import { marked as parse } from "../../../deps/third_party/marked.ts";
-import { Time } from "../../../lib/consts/time.ts";
-import { makeCacheLayer, removeFrontmatter } from "../common.ts";
-import { convertFilenameToTargetFilename } from "../../common.ts";
-import { Mime } from "../../common.ts";
-
-const fetchDoc = async (pathname: string): Promise => {
- // TODO: Do not check if --allow-env is unspecified.
- const deployed = Deno.env.get("DENO_DEPLOYMENT_ID") !== undefined;
- const targetName = convertFilenameToTargetFilename(pathname, ".md");
- const docPath = deployed ? join("./docs/", targetName) : fromFileUrl(
- normalize(
- join(
- dirname(import.meta.url),
- "../../../docs/",
- targetName,
- ),
- ),
- );
- try {
- const decoder = new TextDecoder("utf-8");
- const docFile = await Deno.readFile(docPath);
- return removeFrontmatter(decoder.decode(docFile));
- // deno-lint-ignore no-empty
- } catch {}
-};
-
-const processUrl = (pathname: string, hash = "#readme"): string => {
- const BASE_URL = "https://etok.codes/fart/blob/main/docs/";
- const targetName = convertFilenameToTargetFilename(pathname, ".md");
- hash = hash !== undefined && hash.length > 0 ? hash : "#readme";
- return BASE_URL + targetName + hash;
-};
-
-const cache = makeCacheLayer(async (pathname: string) => {
- const doc = await fetchDoc(pathname);
- if (doc !== undefined) {
- const html = parse(doc);
- const ghLink = processUrl(pathname);
- return `
-
-
-
-
-
-
-
-
- ${html}
-
-
-`;
- }
-}, Time.Hour);
-
-export default async (
- pathname: string,
-): Promise => {
- try {
- const result = await cache(pathname);
- if (result !== undefined) {
- return new Response(result, {
- status: 200,
- headers: { "Content-Type": Mime.HTML },
- });
- }
- } // deno-lint-ignore no-empty
- catch {}
-};
diff --git a/std/server/middleware/home.ts b/std/server/middleware/home.ts
deleted file mode 100644
index a663b20..0000000
--- a/std/server/middleware/home.ts
+++ /dev/null
@@ -1,40 +0,0 @@
-import {
- fetchGitHubFile,
- makeCacheLayer,
- removeFrontmatter,
-} from "../common.ts";
-import { Mime } from "../../common.ts";
-import { Time } from "../../../lib/consts/time.ts";
-import { marked as parse } from "../../../deps/third_party/marked.ts";
-
-const fetchPageBody = async (): Promise => {
- const readmePath = "EthanThatOneKid/fart/main/README.md";
- const readmeText = await fetchGitHubFile(readmePath);
- if (readmeText === undefined) return "";
- const html = parse(removeFrontmatter(readmeText));
- return html;
-};
-
-const cache = makeCacheLayer(async () =>
- `
-
-
-
-
-
-
-
-
- ${await fetchPageBody()}
-
-
-`, Time.Hour);
-
-export default async (): Promise =>
- new Response(
- await cache(),
- {
- status: 200,
- headers: { "Content-Type": Mime.HTML },
- },
- );
diff --git a/std/server/middleware/mod.ts b/std/server/middleware/mod.ts
deleted file mode 100644
index db1c9cc..0000000
--- a/std/server/middleware/mod.ts
+++ /dev/null
@@ -1,4 +0,0 @@
-export { default as home } from "./home.ts";
-export { default as compile } from "./compile.ts";
-export { default as static } from "./static.ts";
-export { default as gh_docs } from "./gh_docs.ts";
diff --git a/std/server/middleware/static.ts b/std/server/middleware/static.ts
deleted file mode 100644
index 26524b7..0000000
--- a/std/server/middleware/static.ts
+++ /dev/null
@@ -1,37 +0,0 @@
-import { getMimeType } from "../../common.ts";
-import { makeCacheLayer } from "../common.ts";
-import { Time } from "../../../lib/consts/time.ts";
-import {
- dirname,
- fromFileUrl,
- join,
- normalize,
-} from "../../../deps/std/path.ts";
-
-const processPathname = (pathname: string): string => {
- // TODO: Do not check if --allow-env is unspecified.
- const deployed = Deno.env.get("DENO_DEPLOYMENT_ID") !== undefined;
- if (deployed) {
- return join("./std/server/static/", pathname);
- }
- return fromFileUrl(
- normalize(join(dirname(import.meta.url), "../static/", pathname)),
- );
-};
-
-const cache = makeCacheLayer(
- async (key: string) => await Deno.readFile(key),
- Time.Day,
-);
-
-export default async (pathname: string): Promise => {
- try {
- const filename = processPathname(pathname);
- return new Response(await cache(filename), {
- headers: {
- "Content-Type": getMimeType(pathname),
- },
- });
- // deno-lint-ignore no-empty
- } catch {}
-};
diff --git a/std/server/middleware/tuts.ts b/std/server/middleware/tuts.ts
deleted file mode 100644
index f3173e8..0000000
--- a/std/server/middleware/tuts.ts
+++ /dev/null
@@ -1,2 +0,0 @@
-// Todo: Create UI with live code editor that shows result of all available output formats with how they connect to each line in the source Fart.
-// include a side bar with a brief explanation of the example code.
diff --git a/std/server/serve_http.ts b/std/server/serve_http.ts
deleted file mode 100644
index 31fd18c..0000000
--- a/std/server/serve_http.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-import { handleRequest } from "./handle_request.ts";
-
-export const serveHttp = async () => {
- const server = Deno.listen({ port: 8080 });
- console.log(`HTTP webserver running. Access it at: http://localhost:8080/`);
- // Connections to the server will be yielded up as an async iterable.
- for await (const connection of server) {
- // In order to not be blocking, we need to handle each connection individually
- // without awaiting the function.
- for await (const event of Deno.serveHttp(connection)) {
- const { request, respondWith } = event;
- respondWith(await handleRequest(request));
- }
- }
-};
-
-if (import.meta.main) {
- await serveHttp();
-}
diff --git a/std/server/static/fart-logo.png b/std/server/static/fart-logo.png
deleted file mode 100644
index 4f46133..0000000
Binary files a/std/server/static/fart-logo.png and /dev/null differ
diff --git a/std/server/static/favicon.ico b/std/server/static/favicon.ico
deleted file mode 100644
index 1815e3f..0000000
Binary files a/std/server/static/favicon.ico and /dev/null differ
diff --git a/std/server/static/pokemon-example-stdout.png b/std/server/static/pokemon-example-stdout.png
deleted file mode 100644
index 22b6af3..0000000
Binary files a/std/server/static/pokemon-example-stdout.png and /dev/null differ
diff --git a/std/server/static/style.css b/std/server/static/style.css
deleted file mode 100644
index 4c68d34..0000000
--- a/std/server/static/style.css
+++ /dev/null
@@ -1,992 +0,0 @@
-/*
-This document has been created with Marked.app
-Content is property of the document author
-Please leave this notice in place, along with any additional credits below.
----------------------------------------------------------------
-Title: GitHub Updated
-Author: Luis Puerto
-Description: Github README style. Includes theme for Pygmentized code blocks.
----
-*/
-#wrapper {
- color: #24292e;
- font-family: Consolas, -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica, Arial,
- sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
- margin: 0 auto;
- text-size-adjust: 100%;
- max-width: 980px !important;
- padding: 40px 48px 48px !important;
-}
-#wrapper aside,
-#wrapper article,
-#wrapper details,
-#wrapper figcaption,
-#wrapper figure,
-#wrapper footer,
-#wrapper header,
-#wrapper hgroup,
-#wrapper main,
-#wrapper menu,
-#wrapper nav,
-#wrapper section,
-#wrapper summary {
- display: block;
-}
-#wrapper audio,
-#wrapper canvas,
-#wrapper progress,
-#wrapper video {
- display: inline-block;
- vertical-align: baseline;
-}
-#wrapper audio:not([controls]) {
- display: none;
- height: 0;
-}
-#wrapper [hidden],
-#wrapper template {
- display: none;
-}
-#wrapper a {
- background-color: transparent;
-}
-#wrapper a:active,
-#wrapper a:hover {
- outline: 0;
-}
-#wrapper abbr[title] {
- border-bottom: 1px dotted;
-}
-#wrapper b,
-#wrapper strong {
- font-weight: bold;
-}
-#wrapper dfn {
- font-style: italic;
-}
-#wrapper mark {
- background: #ff0;
- color: #000;
-}
-#wrapper small {
- font-size: 80%;
-}
-#wrapper sub,
-#wrapper sup {
- font-size: 75%;
- line-height: 0;
- position: relative;
- vertical-align: baseline;
-}
-#wrapper sup {
- top: -0.5em;
-}
-#wrapper sub {
- bottom: -0.25em;
-}
-#wrapper img {
- border: 0;
-}
-#wrapper svg:not(:root) {
- overflow: hidden;
-}
-#wrapper figure {
- margin: 1em 40px;
-}
-#wrapper hr {
- box-sizing: content-box;
- height: 0;
-}
-#wrapper pre {
- overflow: auto;
-}
-#wrapper code,
-#wrapper kbd,
-#wrapper pre,
-#wrapper samp {
- font-family: monospace, monospace;
- font-size: 1em;
-}
-#wrapper button,
-#wrapper input,
-#wrapper optgroup,
-#wrapper select,
-#wrapper textarea {
- color: inherit;
- font: inherit;
- margin: 0;
-}
-#wrapper button {
- overflow: visible;
-}
-#wrapper button,
-#wrapper select {
- text-transform: none;
-}
-#wrapper button,
-#wrapper input[type="button"],
-#wrapper input[type="reset"],
-#wrapper input[type="submit"] {
- -webkit-appearance: button;
- cursor: pointer;
-}
-#wrapper button[disabled] {
- cursor: default;
-}
-#wrapper button::-moz-focus-inner,
-#wrapper input::-moz-focus-inner {
- border: 0;
- padding: 0;
-}
-#wrapper input {
- line-height: normal;
-}
-#wrapper input[type="checkbox"],
-#wrapper input[type="radio"] {
- box-sizing: border-box;
- padding: 0;
-}
-#wrapper input[type="number"]::-webkit-inner-spin-button,
-#wrapper input[type="number"]::-webkit-outer-spin-button {
- height: auto;
-}
-#wrapper input[type="search"] {
- -webkit-appearance: textfield;
- box-sizing: content-box;
-}
-#wrapper input[type="search"]::-webkit-search-cancel-button,
-#wrapper input[type="search"]::-webkit-search-decoration {
- -webkit-appearance: none;
-}
-#wrapper fieldset {
- border: 1px solid #c0c0c0;
- margin: 0 2px;
- padding: 0.35em 0.625em 0.75em;
-}
-#wrapper legend {
- border: 0;
- padding: 0;
-}
-#wrapper textarea {
- overflow: auto;
-}
-#wrapper optgroup {
- font-weight: bold;
-}
-#wrapper table {
- border-collapse: collapse;
- border-spacing: 0;
-}
-#wrapper * {
- box-sizing: border-box;
-}
-#wrapper input,
-#wrapper select,
-#wrapper textarea,
-#wrapper button {
- font: 14px/21px -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica,
- Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
-}
-#wrapper body {
- font: 14px/21px -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica,
- Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
- color: #333;
- background-color: #fff;
-}
-#wrapper a {
- color: #4078c0;
- text-decoration: none;
-}
-#wrapper a:hover,
-#wrapper a:active {
- text-decoration: underline;
-}
-#wrapper hr,
-#wrapper .rule {
- background: transparent;
- border: 0;
- border-bottom: 1px solid #ddd;
- height: 0;
- margin: 15px 0;
- overflow: hidden;
-}
-#wrapper hr:before,
-#wrapper .rule:before {
- content: "";
- display: table;
-}
-#wrapper hr:after,
-#wrapper .rule:after {
- clear: both;
- content: "";
- display: table;
-}
-#wrapper h1,
-#wrapper h2,
-#wrapper h3,
-#wrapper h4,
-#wrapper h5,
-#wrapper h6 {
- font-weight: 600;
- line-height: 1.1;
- margin: 24px 0 16px;
- padding: 0;
-}
-
-#wrapper h1 code,
-#wrapper h2 code,
-#wrapper h3 code,
-#wrapper h4 code,
-#wrapper h5 code,
-#wrapper h6 code {
- font-size: inherit;
-}
-#wrapper h1,
-#wrapper h2 {
- border-bottom: 1px solid #eaecef;
-}
-#wrapper h1 {
- font-size: 32px;
- line-height: 40px;
- margin: 0 0 16px;
- padding: 0 0 9.600000381469727px;
-}
-#wrapper h2 {
- font-size: 24px;
- line-height: 30px;
- padding: 0 0 7.199999809265137px;
-}
-#wrapper h3 {
- font-size: 20px;
- line-height: 25px;
-}
-#wrapper h4 {
- font-size: 16px;
- line-height: 20px;
- margin: 24px 0 16px;
- padding: 0;
-}
-#wrapper h5 {
- font-size: 14px;
- line-height: 17px;
-}
-#wrapper h6 {
- font-size: 13.600000381469727px;
- line-height: 17px;
-}
-#wrapper small {
- font-size: 90%;
-}
-#wrapper blockquote {
- margin: 0;
-}
-#wrapper ol ol,
-#wrapper ul ol {
- list-style-type: lower-roman;
-}
-#wrapper ul ul ol,
-#wrapper ul ol ol,
-#wrapper ol ul ol,
-#wrapper ol ol ol {
- list-style-type: lower-alpha;
-}
-#wrapper dd {
- margin-left: 0;
-}
-#wrapper tt,
-#wrapper code {
- font-family: SFMono-Regular, Consolas, Liberation Mono, Menlo, monospace;
- font-size: 85%;
-}
-#wrapper pre {
- font: 100% SFMono-Regular, Consolas, Liberation Mono, Menlo, monospace;
- margin-bottom: 0;
- margin-top: 16px;
- line-height: 1.45;
- padding: 10px;
- border-radius: 3px;
-}
-
-#wrapper {
- -webkit-font-smoothing: antialiased;
- background: #fff;
- border: solid 1px #dddddd !important;
- border-radius: 3px;
- color: #333;
- font: 14px -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica, Arial,
- sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
- line-height: 1.6;
- padding: 3px;
-}
-
-p {
- margin: 1em 0;
-}
-
-a {
- color: #4183c4;
- text-decoration: none;
-}
-
-#wrapper {
- background-color: #fff;
- font-size: 16px;
- line-height: 1.6;
- margin: 15px;
- padding: 30px;
-}
-#wrapper > *:first-child {
- margin-top: 0 !important;
-}
-#wrapper > *:last-child {
- margin-bottom: 0 !important;
-}
-
-@media screen {
- #wrapper {
- border: solid 1px #ddd;
- }
-}
-p,
-blockquote,
-ul,
-ol,
-dl,
-table,
-pre {
- margin-bottom: 16px;
-}
-
-hr {
- height: 4px;
- padding: 0;
- margin: 16px 0;
- background-color: #e7e7e7;
- border: 0 none;
-}
-
-ul,
-ol {
- padding-left: 2em;
-}
-
-ul.no-list,
-ol.no-list {
- padding: 0;
- list-style-type: none;
-}
-
-ul ul,
-ul ol {
- margin-top: 0;
- margin-bottom: 0;
-}
-
-ol ol,
-ol ul {
- margin-top: 0;
- margin-bottom: 0;
-}
-
-li > p {
- margin-bottom: 0;
-}
-li p + p {
- margin-top: 16px;
-}
-
-dl {
- padding: 0;
-}
-dl dt {
- padding: 0;
- margin-top: 16px;
- font-size: 1em;
- font-style: italic;
- font-weight: 700;
-}
-dl dd {
- padding: 0 16px;
- margin-bottom: 16px;
-}
-
-blockquote {
- padding: 0 15px;
- margin-left: 0;
- color: #777;
- border-left: 4px solid #ddd;
-}
-blockquote > :first-child {
- margin-top: 0;
-}
-blockquote > :last-child {
- margin-bottom: 0;
-}
-
-table {
- display: block;
- width: 100%;
- overflow: auto;
-}
-table th {
- font-weight: 700;
- padding: 6px 13px;
- border: 1px solid #ddd;
-}
-table td {
- padding: 6px 13px;
- border: 1px solid #ddd;
-}
-table tr {
- background-color: #fff;
- border-top: 1px solid #ccc;
-}
-table tr:nth-child(2n) {
- background-color: #f8f8f8;
-}
-
-img {
- max-width: 100%;
- -moz-box-sizing: border-box;
- box-sizing: border-box;
-}
-
-span.frame {
- display: block;
- overflow: hidden;
-}
-span.frame > span {
- display: block;
- float: left;
- width: auto;
- padding: 7px;
- margin: 13px 0 0;
- overflow: hidden;
- border: 1px solid #ddd;
-}
-span.frame span img {
- display: block;
- float: left;
-}
-span.frame span span {
- display: block;
- padding: 5px 0 0;
- clear: both;
- color: #333;
-}
-span.align-center {
- display: block;
- overflow: hidden;
- clear: both;
-}
-span.align-center > span {
- display: block;
- margin: 13px auto 0;
- overflow: hidden;
- text-align: center;
-}
-span.align-center span img {
- margin: 0 auto;
- text-align: center;
-}
-span.align-right {
- display: block;
- overflow: hidden;
- clear: both;
-}
-span.align-right > span {
- display: block;
- margin: 13px 0 0;
- overflow: hidden;
- text-align: right;
-}
-span.align-right span img {
- margin: 0;
- text-align: right;
-}
-span.float-left {
- display: block;
- float: left;
- margin-right: 13px;
- overflow: hidden;
-}
-span.float-left span {
- margin: 13px 0 0;
-}
-span.float-right {
- display: block;
- float: right;
- margin-left: 13px;
- overflow: hidden;
-}
-span.float-right > span {
- display: block;
- margin: 13px auto 0;
- overflow: hidden;
- text-align: right;
-}
-
-code,
-tt {
- background-color: rgba(0, 0, 0, 0.04);
- border-radius: 3px;
- font-size: 85%;
- margin: 0;
- padding-bottom: 0.2em;
- padding-top: 0.2em;
- padding: 0.4 0.2;
-}
-
-code::before,
-code::after {
- content: "\00a0";
- letter-spacing: -0.2em;
-}
-
-tt:before,
-tt:after {
- content: "\00a0";
- letter-spacing: -0.2em;
-}
-
-code br,
-tt br {
- display: none;
-}
-
-del code {
- text-decoration: inherit;
- vertical-align: text-top;
-}
-
-pre > code {
- padding: 0;
- margin: 0;
- font-size: 100%;
- white-space: pre;
- background: transparent;
- border: 0;
-}
-
-.highlight {
- margin-bottom: 16px;
-}
-.highlight pre {
- padding: 16px;
- margin-bottom: 0;
- overflow: auto;
- font-size: 85%;
- line-height: 1.45;
- background-color: #f6f8fa;
- border-radius: 3px;
-}
-
-pre {
- padding: 16px;
- margin-bottom: 16px;
- overflow: auto;
- font-size: 85%;
- line-height: 1.45;
- background-color: #f7f7f7;
- border-radius: 3px;
- word-wrap: normal;
-}
-pre code,
-pre tt {
- display: inline;
- max-width: initial;
- padding: 0;
- margin: 0;
- overflow: initial;
- line-height: inherit;
- word-wrap: normal;
- background-color: transparent;
- border: 0;
-}
-pre code:before,
-pre code:after {
- content: normal;
-}
-pre tt:before,
-pre tt:after {
- content: normal;
-}
-
-.poetry pre {
- font-family: Georgia, Garamond, serif !important;
- font-style: italic;
- font-size: 110% !important;
- line-height: 1.6em;
- display: block;
- margin-left: 1em;
-}
-.poetry pre code {
- font-family: Georgia, Garamond, serif !important;
- word-break: break-all;
- word-break: break-word;
- /* Non standard for webkit */
- -webkit-hyphens: auto;
- -moz-hyphens: auto;
- hyphens: auto;
- white-space: pre-wrap;
-}
-
-sup,
-sub,
-a.footnote {
- font-size: 1.4ex;
- height: 0;
- line-height: 1;
- vertical-align: super;
- position: relative;
-}
-
-sub {
- vertical-align: sub;
- top: -1px;
-}
-
-@media print {
- body {
- background: #fff;
- }
-
- img,
- table,
- figure {
- page-break-inside: avoid;
- }
-
- #wrapper {
- background: #fff;
- border: none !important;
- font-size: 12px;
- }
-
- pre code {
- overflow: visible;
- }
-}
-@media screen {
- body.inverted {
- border-color: #555;
- box-shadow: none;
- color: #eee !important;
- }
-
- .inverted #wrapper,
- .inverted hr,
- .inverted p,
- .inverted td,
- .inverted li,
- .inverted h1,
- .inverted h2,
- .inverted h3,
- .inverted h4,
- .inverted h5,
- .inverted h6,
- .inverted th,
- .inverted .math,
- .inverted caption,
- .inverted dd,
- .inverted dt,
- .inverted blockquote {
- border-color: #555;
- box-shadow: none;
- color: #eee !important;
- }
- .inverted td,
- .inverted th {
- background: #333;
- }
- .inverted pre,
- .inverted code,
- .inverted tt {
- background: #eeeeee !important;
- color: #111;
- }
- .inverted h2 {
- border-color: #555555;
- }
- .inverted hr {
- border-color: #777;
- border-width: 1px !important;
- }
-
- ::selection {
- background: rgba(157, 193, 200, 0.5);
- }
-
- h1::selection {
- background-color: rgba(45, 156, 208, 0.3);
- }
-
- h2::selection {
- background-color: rgba(90, 182, 224, 0.3);
- }
-
- h3::selection,
- h4::selection,
- h5::selection,
- h6::selection,
- li::selection,
- ol::selection {
- background-color: rgba(133, 201, 232, 0.3);
- }
-
- code::selection {
- background-color: rgba(0, 0, 0, 0.7);
- color: #eeeeee;
- }
- code span::selection {
- background-color: rgba(0, 0, 0, 0.7) !important;
- color: #eeeeee !important;
- }
-
- a::selection {
- background-color: rgba(255, 230, 102, 0.2);
- }
-
- .inverted a::selection {
- background-color: rgba(255, 230, 102, 0.6);
- }
-
- td::selection,
- th::selection,
- caption::selection {
- background-color: rgba(180, 237, 95, 0.5);
- }
-
- .inverted {
- background: #0b2531;
- background: #252a2a;
- }
- .inverted #wrapper {
- background: #252a2a;
- }
- .inverted a {
- color: #acd1d5;
- }
-}
-.highlight {
- background: #fff;
-}
-.highlight .c {
- color: #998;
- font-style: italic;
-}
-.highlight .err {
- color: #a61717;
- background-color: #e3d2d2;
-}
-.highlight .k,
-.highlight .o {
- font-weight: 700;
-}
-.highlight .cm {
- color: #998;
- font-style: italic;
-}
-.highlight .cp {
- color: #999;
- font-weight: 700;
-}
-.highlight .c1 {
- color: #998;
- font-style: italic;
-}
-.highlight .cs {
- color: #999;
- font-weight: 700;
- font-style: italic;
-}
-.highlight .gd {
- color: #000;
- background-color: #fdd;
-}
-.highlight .gd .x {
- color: #000;
- background-color: #faa;
-}
-.highlight .ge {
- font-style: italic;
-}
-.highlight .gr {
- color: #a00;
-}
-.highlight .gh {
- color: #999;
-}
-.highlight .gi {
- color: #000;
- background-color: #dfd;
-}
-.highlight .gi .x {
- color: #000;
- background-color: #afa;
-}
-.highlight .go {
- color: #888;
-}
-.highlight .gp {
- color: #555;
-}
-.highlight .gs {
- font-weight: 700;
-}
-.highlight .gu {
- color: purple;
- font-weight: 700;
-}
-.highlight .gt {
- color: #a00;
-}
-.highlight .kc,
-.highlight .kd,
-.highlight .kn,
-.highlight .kp,
-.highlight .kr {
- font-weight: 700;
-}
-.highlight .kt {
- color: #458;
- font-weight: 700;
-}
-.highlight .m {
- color: #099;
-}
-.highlight .s {
- color: #d14;
-}
-.highlight .n {
- color: #333;
-}
-.highlight .na {
- color: teal;
-}
-.highlight .nb {
- color: #0086b3;
-}
-.highlight .nc {
- color: #458;
- font-weight: 700;
-}
-.highlight .no {
- color: teal;
-}
-.highlight .ni {
- color: purple;
-}
-.highlight .ne,
-.highlight .nf {
- color: #900;
- font-weight: 700;
-}
-.highlight .nn {
- color: #555;
-}
-.highlight .nt {
- color: navy;
-}
-.highlight .nv {
- color: teal;
-}
-.highlight .ow {
- font-weight: 700;
-}
-.highlight .w {
- color: #bbb;
-}
-.highlight .mf,
-.highlight .mh,
-.highlight .mi,
-.highlight .mo {
- color: #099;
-}
-.highlight .sb,
-.highlight .sc,
-.highlight .sd,
-.highlight .s2,
-.highlight .se,
-.highlight .sh,
-.highlight .si,
-.highlight .sx {
- color: #d14;
-}
-.highlight .sr {
- color: #009926;
-}
-.highlight .s1 {
- color: #d14;
-}
-.highlight .ss {
- color: #990073;
-}
-.highlight .bp {
- color: #999;
-}
-.highlight .vc,
-.highlight .vg,
-.highlight .vi {
- color: teal;
-}
-.highlight .il {
- color: #099;
-}
-.highlight .gc {
- color: #999;
- background-color: #eaf2f5;
-}
-
-.type-csharp .highlight .k,
-.type-csharp .highlight .kt {
- color: blue;
-}
-.type-csharp .highlight .nf {
- color: #000;
- font-weight: 400;
-}
-.type-csharp .highlight .nc {
- color: #2b91af;
-}
-.type-csharp .highlight .nn {
- color: #000;
-}
-.type-csharp .highlight .s,
-.type-csharp .highlight .sc {
- color: #a31515;
-}
-
-.type-csharp .highlight .k,
-.type-csharp .highlight .kt {
- color: #00f;
-}
-.type-csharp .highlight .nf {
- color: #000;
- font-weight: normal;
-}
-.type-csharp .highlight .nc {
- color: #2b91af;
-}
-.type-csharp .highlight .nn {
- color: #000;
-}
-.type-csharp .highlight .s,
-.type-csharp .highlight .sc {
- color: #a31515;
-}
-
-body {
- display: flex;
- justify-content: center;
-}
-
-body #wrapper {
- padding-bottom: 33vh;
- height: max-content !important;
-}
-
-body.dark #wrapper {
- background: transparent !important;
- box-shadow: none !important;
-}
diff --git a/std/server/worker.ts b/std/server/worker.ts
index f1128fd..27761cc 100644
--- a/std/server/worker.ts
+++ b/std/server/worker.ts
@@ -1,4 +1,7 @@
-import { handleRequest } from "./handle_request.ts";
+// TODO: Move to fart_server/worker.ts on
+// https://dash.deno.com/projects/fart/settings/git
+
+import { handleRequest } from "../../fart_server/handle_request.ts";
addEventListener("fetch", async (event) => {
const response = await handleRequest(event.request);
diff --git a/std/typemaps/common.ts b/std/typemaps/common.ts
deleted file mode 100644
index fda74dc..0000000
--- a/std/typemaps/common.ts
+++ /dev/null
@@ -1,8 +0,0 @@
-// TODO: Create a more _infinite_ algorithm.
-export function* genUniqueNames() {
- let count = 0;
- while (count < 26) {
- yield String.fromCharCode(count + 97);
- count++;
- }
-}
diff --git a/std/typemaps/fake.ts b/std/typemaps/fake.ts
deleted file mode 100644
index 0faf20f..0000000
--- a/std/typemaps/fake.ts
+++ /dev/null
@@ -1,11 +0,0 @@
-import { ReservedType, TypeMap } from "../../lib/gen/typemap.ts";
-
-const fake: TypeMap = {
- [ReservedType.Omit]: "_",
- [ReservedType.Default]: "any",
- [ReservedType.Number]: "num",
- [ReservedType.String]: "str",
- [ReservedType.Boolean]: "boo",
-};
-
-export default fake;
diff --git a/std/typemaps/typescript.ts b/std/typemaps/typescript.ts
deleted file mode 100644
index 2a3c7ad..0000000
--- a/std/typemaps/typescript.ts
+++ /dev/null
@@ -1,41 +0,0 @@
-import {
- ModifierType,
- OMIT_PATTERN,
- ReservedType,
- TypeMap,
-} from "../../lib/gen/typemap.ts";
-import { genUniqueNames } from "./common.ts";
-
-const typescript: TypeMap = {
- [ReservedType.Omit]: "_",
- [ReservedType.Number]: "number",
- [ReservedType.String]: "string",
- [ReservedType.Boolean]: "boolean",
- [ReservedType.Default]: "any",
-
- [ModifierType.Array]: (t: string) => `Array<${t}>`, // foos: array % Foo
- [ModifierType.Async]: (t: string) => `Promise<${t}>`, // bar: async % Bar
- [ModifierType.Dictionary]: (t1: string, t2: string) => `Record<${t1}, ${t2}>`, // dex: dict %
- [ModifierType.Function]: (...t: string[]) => { // catch: func %
- let result = "(";
- const gimmeName = genUniqueNames();
- while (t.length > 1) {
- const { value: name } = gimmeName.next();
- if (!name) break; // break in case of emergency
- const argType = t.shift();
- const omitted = argType !== undefined && OMIT_PATTERN.test(argType);
- if (omitted) continue;
- const secondToLast = t.length === 1;
- result += `${name}: ${argType}` +
- (secondToLast ? "" : ", ");
- }
- const returnType = t.pop()?.replace(OMIT_PATTERN, "void") ?? "void";
- return result + `) => ${returnType}`;
- },
- [ModifierType.Date]: (t: string) =>
- // created_at*: date % string
- t === "string" || t === "number" ? "Date" : t,
- [ModifierType.URL]: (t: string) => t === "string" ? "URL" : t, // avatar_url*: url % string
-};
-
-export default typescript;