diff options
| -rw-r--r-- | src/lang/core/eval.ts | 2 | ||||
| -rw-r--r-- | src/lang/core/lexer.ts | 61 |
2 files changed, 46 insertions, 17 deletions
diff --git a/src/lang/core/eval.ts b/src/lang/core/eval.ts index 981aef8..9181c81 100644 --- a/src/lang/core/eval.ts +++ b/src/lang/core/eval.ts @@ -105,7 +105,7 @@ export type GetEvaluatedChildren< : never; const input = - `map(arr("hello","world"),fn(s,i,add(tostring(i),":",s)))` as const; + `map(arr("hello", "world"), fn(s, i, add(tostring(i), ":", s)))` as const; const lex_result = null as unknown as Lex<typeof input>; const parse_result = null as unknown as Parse<typeof lex_result>; const eval_result = null as unknown as Evaluate<typeof parse_result>; diff --git a/src/lang/core/lexer.ts b/src/lang/core/lexer.ts index 33a408a..567964f 100644 --- a/src/lang/core/lexer.ts +++ b/src/lang/core/lexer.ts @@ -40,23 +40,52 @@ export type ProcessNameCollection< export type IsOpen<T> = T extends `${TokenType.OPEN_PAREN}` ? true : false; export type IsClose<T> = T extends `${TokenType.CLOSE_PAREN}` ? true : false; -export type _Lex<Ctx extends LexerCtx> = - Ctx["next"] extends `${infer Head}${infer Tail}` - ? IsWhitespace<Head> extends true - ? _Lex<ProcessNameCollection<Ctx, Tail, null>> - : IsOpen<Head> extends true - ? _Lex<ProcessNameCollection<Ctx, Tail, Token<TokenType.OPEN_PAREN>>> - : IsClose<Head> extends true - ? _Lex<ProcessNameCollection<Ctx, Tail, Token<TokenType.CLOSE_PAREN>>> - : _Lex<{ +export type ChunkedLex< + Ctx extends LexerCtx, + Depth extends any[] = [] +> = Depth["length"] extends 50 + ? Ctx & { + endChunk: true; + } + : Ctx["next"] extends `${infer Head}${infer Tail}` + ? IsWhitespace<Head> extends true + ? ChunkedLex<ProcessNameCollection<Ctx, Tail, null>, [0, ...Depth]> + : IsOpen<Head> extends true + ? ChunkedLex< + ProcessNameCollection<Ctx, Tail, Token<TokenType.OPEN_PAREN>>, + [0, ...Depth] + > + : IsClose<Head> extends true + ? ChunkedLex< + ProcessNameCollection<Ctx, Tail, Token<TokenType.CLOSE_PAREN>>, + [0, ...Depth] + > + : ChunkedLex< + { next: Tail; nameCollection: `${Ctx["nameCollection"]}${Head}`; tokens: Ctx["tokens"]; - }> - : Ctx["tokens"]; + }, + [0, ...Depth] + > + : Ctx; -export type Lex<Raw extends string> = _Lex<{ - next: `${Raw};`; - tokens: []; - nameCollection: ""; -}>; +export type InnerLex< + Next extends string, + NameCollection extends LexerCtx["nameCollection"] = "", + AccTokens extends Token[] = [] +> = Next extends "" + ? AccTokens + : ChunkedLex<{ + next: Next; + tokens: []; + nameCollection: NameCollection; + }> extends infer U + ? U extends LexerCtx & { endChunk: true } + ? InnerLex<U["next"], U["nameCollection"], [...AccTokens, ...U["tokens"]]> + : U extends LexerCtx + ? [...AccTokens, ...U["tokens"]] + : never + : never; + +export type Lex<Raw extends string> = InnerLex<Raw>; |
