summaryrefslogtreecommitdiff
path: root/src/lang/core/lexer.ts
diff options
context:
space:
mode:
authorKai Stevenson <kai@kaistevenson.com>2025-11-03 23:40:02 -0800
committerKai Stevenson <kai@kaistevenson.com>2025-11-03 23:40:02 -0800
commit56040f3ff85e77311f0c864a89afd63fcf1bdb50 (patch)
tree2eb0166756e76b0483692e79830329c92e7fdcf3 /src/lang/core/lexer.ts
parenta11e6780fbb8bd4143dfec44e2ce147b795772d8 (diff)
add js-lang, refactor some ts-lang stuff
Diffstat (limited to 'src/lang/core/lexer.ts')
-rw-r--r--src/lang/core/lexer.ts91
1 files changed, 0 insertions, 91 deletions
diff --git a/src/lang/core/lexer.ts b/src/lang/core/lexer.ts
deleted file mode 100644
index 567964f..0000000
--- a/src/lang/core/lexer.ts
+++ /dev/null
@@ -1,91 +0,0 @@
-import { LexerCtx, Token, TokenSubType, TokenType } from "./common";
-
-export type BreakingToken =
- | TokenType.OPEN_PAREN
- | TokenType.CLOSE_PAREN
- | TokenType.COMMA
- | TokenType.SEMICOLON
- | TokenType.SPACE;
-
-export type IsWhitespace<T extends string> = T extends `${TokenType.SPACE}`
- ? true
- : T extends `${TokenType.COMMA}`
- ? true
- : T extends `${TokenType.SEMICOLON}`
- ? true
- : false;
-
-export type ProcessNameCollection<
- Ctx extends LexerCtx,
- Tail extends string,
- _Token extends Token | null
-> = {
- next: Tail;
- nameCollection: "";
- tokens: _Token extends null
- ? [
- ...Ctx["tokens"],
- ...(Ctx["nameCollection"] extends ""
- ? []
- : [Token<TokenType.NAME, Ctx["nameCollection"]>])
- ]
- : [
- ...Ctx["tokens"],
- ...(Ctx["nameCollection"] extends ""
- ? [_Token]
- : [Token<TokenType.NAME, Ctx["nameCollection"]>, _Token])
- ];
-};
-
-export type IsOpen<T> = T extends `${TokenType.OPEN_PAREN}` ? true : false;
-export type IsClose<T> = T extends `${TokenType.CLOSE_PAREN}` ? true : false;
-
-export type ChunkedLex<
- Ctx extends LexerCtx,
- Depth extends any[] = []
-> = Depth["length"] extends 50
- ? Ctx & {
- endChunk: true;
- }
- : Ctx["next"] extends `${infer Head}${infer Tail}`
- ? IsWhitespace<Head> extends true
- ? ChunkedLex<ProcessNameCollection<Ctx, Tail, null>, [0, ...Depth]>
- : IsOpen<Head> extends true
- ? ChunkedLex<
- ProcessNameCollection<Ctx, Tail, Token<TokenType.OPEN_PAREN>>,
- [0, ...Depth]
- >
- : IsClose<Head> extends true
- ? ChunkedLex<
- ProcessNameCollection<Ctx, Tail, Token<TokenType.CLOSE_PAREN>>,
- [0, ...Depth]
- >
- : ChunkedLex<
- {
- next: Tail;
- nameCollection: `${Ctx["nameCollection"]}${Head}`;
- tokens: Ctx["tokens"];
- },
- [0, ...Depth]
- >
- : Ctx;
-
-export type InnerLex<
- Next extends string,
- NameCollection extends LexerCtx["nameCollection"] = "",
- AccTokens extends Token[] = []
-> = Next extends ""
- ? AccTokens
- : ChunkedLex<{
- next: Next;
- tokens: [];
- nameCollection: NameCollection;
- }> extends infer U
- ? U extends LexerCtx & { endChunk: true }
- ? InnerLex<U["next"], U["nameCollection"], [...AccTokens, ...U["tokens"]]>
- : U extends LexerCtx
- ? [...AccTokens, ...U["tokens"]]
- : never
- : never;
-
-export type Lex<Raw extends string> = InnerLex<Raw>;