diff options
| author | Kai Stevenson <kai@kaistevenson.com> | 2025-11-02 18:08:16 -0800 |
|---|---|---|
| committer | Kai Stevenson <kai@kaistevenson.com> | 2025-11-02 18:09:14 -0800 |
| commit | e9f3c782bc10d4c5c44faf768aa60cd6bcc66574 (patch) | |
| tree | cb4e447a5ce5deffe989a65ff774e90f0e8ad518 /src/lib/core | |
| parent | f53622d63c74a1e2dd0397f4a26f31aa72dea60b (diff) | |
refactor
Diffstat (limited to 'src/lib/core')
| -rw-r--r-- | src/lib/core/common.ts | 57 | ||||
| -rw-r--r-- | src/lib/core/eval.ts | 126 | ||||
| -rw-r--r-- | src/lib/core/index.ts | 0 | ||||
| -rw-r--r-- | src/lib/core/lexer.ts | 62 | ||||
| -rw-r--r-- | src/lib/core/parser.ts | 220 |
5 files changed, 0 insertions, 465 deletions
diff --git a/src/lib/core/common.ts b/src/lib/core/common.ts deleted file mode 100644 index c1a1dc3..0000000 --- a/src/lib/core/common.ts +++ /dev/null @@ -1,57 +0,0 @@ -export enum TokenType { - OPEN_PAREN = "(", - CLOSE_PAREN = ")", - SPACE = " ", - SEMICOLON = ";", - COMMA = ",", - NAME = "NAME", -} - -export enum TokenSubType { - NA = "NA", - LITERAL = "LITERAL", - REFERENCE = "REFERENCE", -} - -export type Token< - Type extends TokenType = TokenType, - Name extends string = string -> = { - type: Type; - name: Name; -}; - -export type LexerCtx = { - next: string; - nameCollection: string; - tokens: readonly Token[]; -}; - -export enum NodeType { - INT = "INT", - EXT = "EXT", - PARSER_ERROR = "PARSER_ERROR", -} - -export type ASTNode< - Type extends NodeType = NodeType, - Name extends string = string, - Value extends any = any, - Children extends readonly ASTNode[] = readonly ASTNode< - NodeType, - string, - any, - any - >[] -> = { - type: Type; - name: Name; - value: Value; - children: Children; -}; - -export type ParserCtx = { - remainingTokens: readonly Token[]; - lastToken: Token | null; - stack: readonly ASTNode[]; -}; diff --git a/src/lib/core/eval.ts b/src/lib/core/eval.ts deleted file mode 100644 index 59cda1d..0000000 --- a/src/lib/core/eval.ts +++ /dev/null @@ -1,126 +0,0 @@ -import { ASTNode, NodeType } from "./common"; -import { Lex } from "./lexer"; -import { Parse } from "./parser"; - -export type FnError<T extends string> = `Function execution error: ${T}`; - -export type ToStringInner<T, Carry extends string = ""> = T extends - | string - | number - ? `${T}` - : T extends readonly any[] - ? T extends readonly [infer Head, ...infer Tail] - ? `${ToStringInner< - Tail, - `${Carry extends "" ? "" : `${Carry}, `}${ToStringInner<Head>}` - >}` - : `[${Carry}]` - : FnError<`Can't stringify`>; - -export type UnarrayIfOnlyHead<T extends readonly any[]> = T extends [ - infer Head, - infer Next -] - ? T - : T extends [infer Head] - ? Head - : T; - -export type NumberToArray< - Number extends number, - Carry extends readonly any[] = [] -> = Number extends Carry["length"] - ? Carry - : NumberToArray<Number, [...Carry, any]>; - -export type NumbersToArray< - Numbers extends readonly number[], - Carry extends readonly any[] = [] -> = Numbers extends [ - infer Head extends number, - ...infer Tail extends readonly number[] -] - ? NumbersToArray<Tail, [...Carry, ...NumberToArray<Head>]> - : Carry; - -export type AddNumbers<Numbers extends readonly number[]> = - NumbersToArray<Numbers> extends infer T extends readonly any[] - ? T["length"] - : never; - -export type AddStrings< - Strings extends readonly string[], - Carry extends string = "" -> = Strings extends [infer Head extends string, ...infer Tail extends string[]] - ? AddStrings<Tail, `${Carry}${Head}`> - : Carry; - -export type MultiplyInner< - N extends number, - MS extends readonly any[], - Carry extends number = 0 -> = MS extends [infer Head extends number, ...infer Tail extends readonly any[]] - ? MultiplyInner<N, Tail, AddNumbers<[Carry, N]>> - : Carry; -export type Multiply<M extends number, N extends number> = MultiplyInner< - M, - NumberToArray<N> ->; - -export type BUILTIN_Arr<Args extends readonly any[]> = Args; - -export type BUILTIN_ToString<Args extends readonly any[]> = ToStringInner< - UnarrayIfOnlyHead<{ - [Idx in keyof Args]: ToStringInner<Args[Idx]>; - }> ->; - -export type BUILTIN_Add<Args extends readonly any[]> = - Args extends readonly string[] - ? AddStrings<Args> - : Args extends readonly number[] - ? AddNumbers<Args> - : FnError<`Cannot add operands ${ToStringInner<Args>}`>; - -export type BUILTIN_Mul<Args extends readonly any[]> = Args extends [ - infer A, - infer B, - infer C -] - ? FnError<`Can only multiply [number, number], but got ${ToStringInner<Args>}`> - : Args extends [infer M extends number, infer N extends number] - ? Multiply<M, N> - : FnError<`Can only multiply [number, number], but got ${ToStringInner<Args>}`>; - -export type SENTINEL_NO_BUILTIN = "__NO_BUILTIN__"; -export type MapBuiltins<Node extends ASTNode> = - Node["children"] extends infer Children extends readonly ASTNode[] - ? { - [Idx in keyof Children]: Children[Idx] extends ASTNode - ? Evaluate<Children[Idx]> - : never; - } extends infer Args extends readonly any[] - ? Node["name"] extends "tostring" - ? BUILTIN_ToString<Args> - : Node["name"] extends "arr" - ? BUILTIN_Arr<Args> - : Node["name"] extends "add" - ? BUILTIN_Add<Args> - : Node["name"] extends "mul" - ? BUILTIN_Mul<Args> - : SENTINEL_NO_BUILTIN - : never - : never; - -export type EvalError<T extends string> = `Eval error: ${T}`; - -export type Evaluate<Node extends ASTNode> = Node["type"] extends NodeType.INT - ? Node["value"] - : Node["type"] extends NodeType.EXT - ? MapBuiltins<Node> - : EvalError<`Unhandled node type ${Node["type"]}`>; - -const input = `` as const; -const lex_result = null as unknown as Lex<typeof input>; -const parse_result = null as unknown as Parse<typeof lex_result>; -const eval_result = null as unknown as Evaluate<typeof parse_result>; diff --git a/src/lib/core/index.ts b/src/lib/core/index.ts deleted file mode 100644 index e69de29..0000000 --- a/src/lib/core/index.ts +++ /dev/null diff --git a/src/lib/core/lexer.ts b/src/lib/core/lexer.ts deleted file mode 100644 index 33a408a..0000000 --- a/src/lib/core/lexer.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { LexerCtx, Token, TokenSubType, TokenType } from "./common"; - -export type BreakingToken = - | TokenType.OPEN_PAREN - | TokenType.CLOSE_PAREN - | TokenType.COMMA - | TokenType.SEMICOLON - | TokenType.SPACE; - -export type IsWhitespace<T extends string> = T extends `${TokenType.SPACE}` - ? true - : T extends `${TokenType.COMMA}` - ? true - : T extends `${TokenType.SEMICOLON}` - ? true - : false; - -export type ProcessNameCollection< - Ctx extends LexerCtx, - Tail extends string, - _Token extends Token | null -> = { - next: Tail; - nameCollection: ""; - tokens: _Token extends null - ? [ - ...Ctx["tokens"], - ...(Ctx["nameCollection"] extends "" - ? [] - : [Token<TokenType.NAME, Ctx["nameCollection"]>]) - ] - : [ - ...Ctx["tokens"], - ...(Ctx["nameCollection"] extends "" - ? [_Token] - : [Token<TokenType.NAME, Ctx["nameCollection"]>, _Token]) - ]; -}; - -export type IsOpen<T> = T extends `${TokenType.OPEN_PAREN}` ? true : false; -export type IsClose<T> = T extends `${TokenType.CLOSE_PAREN}` ? true : false; - -export type _Lex<Ctx extends LexerCtx> = - Ctx["next"] extends `${infer Head}${infer Tail}` - ? IsWhitespace<Head> extends true - ? _Lex<ProcessNameCollection<Ctx, Tail, null>> - : IsOpen<Head> extends true - ? _Lex<ProcessNameCollection<Ctx, Tail, Token<TokenType.OPEN_PAREN>>> - : IsClose<Head> extends true - ? _Lex<ProcessNameCollection<Ctx, Tail, Token<TokenType.CLOSE_PAREN>>> - : _Lex<{ - next: Tail; - nameCollection: `${Ctx["nameCollection"]}${Head}`; - tokens: Ctx["tokens"]; - }> - : Ctx["tokens"]; - -export type Lex<Raw extends string> = _Lex<{ - next: `${Raw};`; - tokens: []; - nameCollection: ""; -}>; diff --git a/src/lib/core/parser.ts b/src/lib/core/parser.ts deleted file mode 100644 index 79218e9..0000000 --- a/src/lib/core/parser.ts +++ /dev/null @@ -1,220 +0,0 @@ -import { - ASTNode, - NodeType, - ParserCtx, - Token, - TokenSubType, - TokenType, -} from "./common"; -import { Lex } from "./lexer"; - -/* -start -if no 'lastName' -then: - expect nextToken to be a name - lastName = nextToken - goto start - -else: - if nextToken is name - then: - // we already have a lastName - mutate last element of stack to push lastName as child - lastName = nextToken - goto start - - else: - //nextToken is openParen or close paren - if nextToken is closeParen - then: - set last element of stack as child of prev element on stack - pop stack - // [stack[last - 1].children.push(stack.pop) - goto start - else if nextToken is openParen: - push lastName onto stack - goto start - - -finally: - // only one element remains on the stack - return stack[0] - - - CALL ( param, CALL2 ( param2 ) ) - - param2 ret call2 param ret call - - | call - |-- param - |-- | call2 - |-- param2 - - */ - -export type Error<T extends string> = ASTNode< - NodeType.PARSER_ERROR, - "Error", - T, - [] ->; - -export type PushChild<Node extends ASTNode, Child extends ASTNode> = { - type: Node["type"]; - value: Node["value"]; - name: Node["name"]; - children: [...Node["children"], Child]; -}; - -export type PushChildToLastElementOfStack< - Stack extends ParserCtx["stack"], - Child extends ASTNode -> = Stack extends [...infer Head, infer Tail extends ASTNode] - ? [...Head, PushChild<Tail, Child>] - : Stack extends [infer Only extends ASTNode] - ? [PushChild<Only, Child>] - : never; - -export type PushChildToSecondLastElementOfStack< - Stack extends ParserCtx["stack"], - Child extends ASTNode -> = Stack extends [ - ...infer Head, - infer Tail extends ASTNode, - infer Final extends ASTNode -] - ? [...Head, PushChild<Tail, Child>, Final] - : Stack extends [infer Only extends ASTNode, infer Final extends ASTNode] - ? [PushChild<Only, Child>, Final] - : never; - -export type GetLastOnStack<Stack extends ParserCtx["stack"]> = Stack extends [ - ...infer Head, - infer Tail extends ASTNode -] - ? Tail - : Stack extends [infer Only extends ASTNode] - ? Only - : never; - -export type StackWithoutLast<Stack extends ParserCtx["stack"]> = Stack extends [ - ...infer Head extends ASTNode[], - infer Tail -] - ? [...Head] - : Stack extends [infer Only extends ASTNode] - ? [] - : never; - -type NULL_SENTINEL = { - NULL: true; -}; - -export type ParseNumberLiteral<T extends string> = - T extends `${infer Inner extends number}` ? Inner : NULL_SENTINEL; - -export type ParseStringLiteral<T extends string> = - T extends `"${infer Inner extends string}"` ? Inner : NULL_SENTINEL; - -export type ResolveNodeFromToken<_Token extends Token> = ParseNumberLiteral< - _Token["name"] -> extends number - ? ASTNode<NodeType.INT, "", ParseNumberLiteral<_Token["name"]>, []> - : ParseStringLiteral<_Token["name"]> extends string - ? ASTNode<NodeType.INT, "", ParseStringLiteral<_Token["name"]>, []> - : ASTNode<NodeType.EXT, _Token["name"], null, []>; - -export type _Parse<Ctx extends ParserCtx> = Ctx["remainingTokens"] extends [ - infer Head extends Token, - ...infer Tail extends readonly Token[] -] - ? Ctx["lastToken"] extends Token - ? Head["type"] extends TokenType.NAME - ? // we already have a lastName - // mutate last element of stack to push lastName as child - // lastName = nextToken - // goto start - _Parse<{ - lastToken: Head; - remainingTokens: Tail; - stack: PushChildToLastElementOfStack< - Ctx["stack"], - ResolveNodeFromToken<Ctx["lastToken"]> - >; - }> - : //nextToken is openParen or close paren - Head["type"] extends TokenType.CLOSE_PAREN - ? // handle lastName - // set last element of stack as child of prev element on stack - // pop stack - // [stack[last - 1].children.push(stack.pop) - // goto start - _Parse<{ - lastToken: null; - remainingTokens: Tail; - // first push the last name onto the children of the top - // then push the top onto the children of the next - // then remove the top - stack: StackWithoutLast< - PushChildToSecondLastElementOfStack< - Ctx["stack"], - PushChild< - GetLastOnStack<Ctx["stack"]>, - ResolveNodeFromToken<Ctx["lastToken"]> - > - > - >; - }> - : Head["type"] extends TokenType.OPEN_PAREN - ? // push lastName onto stack - // goto start - _Parse<{ - lastToken: null; - remainingTokens: Tail; - stack: [...Ctx["stack"], ResolveNodeFromToken<Ctx["lastToken"]>]; - }> - : Ctx & Error<`Was not expecting ${Head["type"]}`> - : // expect nextToken to be a name or close paren - Head["type"] extends TokenType.NAME - ? // lastName = nextToken - // goto start - _Parse<{ - lastToken: Head; - remainingTokens: Tail; - stack: Ctx["stack"]; - }> - : Head["type"] extends TokenType.CLOSE_PAREN - ? _Parse<{ - lastToken: null; - remainingTokens: Tail; - // push the top onto the children of the next - // then remove the top - stack: StackWithoutLast< - PushChildToSecondLastElementOfStack< - Ctx["stack"], - GetLastOnStack<Ctx["stack"]> - > - >; - }> - : Ctx & - Error<`Expected nextToken to be a name or close paren at ${Head["type"]}`> - : Ctx["lastToken"] extends Token - ? // case where we ended with a name - _Parse<{ - lastToken: null; - remainingTokens: []; - stack: PushChildToLastElementOfStack< - Ctx["stack"], - ResolveNodeFromToken<Ctx["lastToken"]> - >; - }> - : Ctx["stack"][0]; - -export type Parse<Raw extends readonly Token[]> = _Parse<{ - lastToken: null; - remainingTokens: Raw; - stack: [ASTNode<NodeType.EXT, "arr", null, []>]; -}>; - -const test_result = null as unknown as Parse<Lex<`test(135)`>>; |
