summaryrefslogtreecommitdiff
path: root/src/lang/ts-lang/core/lexer.ts
diff options
context:
space:
mode:
authorKai Stevenson <kai@kaistevenson.com>2025-11-03 23:41:31 -0800
committerKai Stevenson <kai@kaistevenson.com>2025-11-03 23:41:31 -0800
commit8b610f2bcfc223333254ce9679730c42dce6d26e (patch)
treeac1eab726395523f8725bda3d040e22214cba409 /src/lang/ts-lang/core/lexer.ts
parent56040f3ff85e77311f0c864a89afd63fcf1bdb50 (diff)
add createFn
Diffstat (limited to 'src/lang/ts-lang/core/lexer.ts')
-rw-r--r--src/lang/ts-lang/core/lexer.ts84
1 files changed, 84 insertions, 0 deletions
diff --git a/src/lang/ts-lang/core/lexer.ts b/src/lang/ts-lang/core/lexer.ts
new file mode 100644
index 0000000..bcd5785
--- /dev/null
+++ b/src/lang/ts-lang/core/lexer.ts
@@ -0,0 +1,84 @@
+import { LexerCtx, Token, TokenType } from "./common";
+
+export type IsWhitespace<T extends string> = T extends `${TokenType.SPACE}`
+ ? true
+ : T extends `${TokenType.COMMA}`
+ ? true
+ : T extends `${TokenType.SEMICOLON}`
+ ? true
+ : false;
+
+export type ProcessNameCollection<
+ Ctx extends LexerCtx,
+ Tail extends string,
+ _Token extends Token | null
+> = {
+ next: Tail;
+ nameCollection: "";
+ tokens: _Token extends null
+ ? [
+ ...Ctx["tokens"],
+ ...(Ctx["nameCollection"] extends ""
+ ? []
+ : [Token<TokenType.NAME, Ctx["nameCollection"]>])
+ ]
+ : [
+ ...Ctx["tokens"],
+ ...(Ctx["nameCollection"] extends ""
+ ? [_Token]
+ : [Token<TokenType.NAME, Ctx["nameCollection"]>, _Token])
+ ];
+};
+
+export type IsOpen<T> = T extends `${TokenType.OPEN_PAREN}` ? true : false;
+export type IsClose<T> = T extends `${TokenType.CLOSE_PAREN}` ? true : false;
+
+export type ChunkedLex<
+ Ctx extends LexerCtx,
+ Depth extends any[] = []
+> = Depth["length"] extends 50
+ ? Ctx & {
+ endChunk: true;
+ }
+ : Ctx["next"] extends `${infer Head}${infer Tail}`
+ ? IsWhitespace<Head> extends true
+ ? ChunkedLex<ProcessNameCollection<Ctx, Tail, null>, [0, ...Depth]>
+ : IsOpen<Head> extends true
+ ? ChunkedLex<
+ ProcessNameCollection<Ctx, Tail, Token<TokenType.OPEN_PAREN>>,
+ [0, ...Depth]
+ >
+ : IsClose<Head> extends true
+ ? ChunkedLex<
+ ProcessNameCollection<Ctx, Tail, Token<TokenType.CLOSE_PAREN>>,
+ [0, ...Depth]
+ >
+ : ChunkedLex<
+ {
+ next: Tail;
+ nameCollection: `${Ctx["nameCollection"]}${Head}`;
+ tokens: Ctx["tokens"];
+ },
+ [0, ...Depth]
+ >
+ : Ctx;
+
+export type InnerLex<
+ Next extends string,
+ NameCollection extends LexerCtx["nameCollection"] = "",
+ AccTokens extends Token[] = []
+> = Next extends ""
+ ? AccTokens
+ : ChunkedLex<{
+ next: Next;
+ tokens: [];
+ nameCollection: NameCollection;
+ }> extends infer U
+ ? U extends LexerCtx & { endChunk: true }
+ ? InnerLex<U["next"], U["nameCollection"], [...AccTokens, ...U["tokens"]]>
+ : U extends LexerCtx
+ ? [...AccTokens, ...U["tokens"]]
+ : never
+ : never;
+
+export type Lex<Raw extends string> = InnerLex<`${Raw};`>;