Skip to content

Commit 0bd6b33

Browse files
committed
update TypeMapped ast
1 parent b8b76e3 commit 0bd6b33

File tree

4 files changed

+123
-117
lines changed

4 files changed

+123
-117
lines changed

crates/emmylua_parser/src/grammar/doc/test.rs

Lines changed: 37 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -3076,21 +3076,18 @@ Syntax(Chunk)@0..102
30763076
fn test_generic_in() {
30773077
let code: &str = r#"
30783078
---@alias Pick1<T, K extends keyof T> {
3079-
--- [P in K]: T[P];
3079+
--- readonly [P in K]+?: T[P];
30803080
---}
30813081
"#;
30823082
// print_ast(code);
3083-
// print_ast(r#"
3084-
// ---@alias Pick1<T: table>
3085-
// "#);
30863083
let result = r#"
3087-
Syntax(Chunk)@0..101
3088-
Syntax(Block)@0..101
3084+
Syntax(Chunk)@0..110
3085+
Syntax(Block)@0..110
30893086
Token(TkEndOfLine)@0..1 "\n"
30903087
Token(TkWhitespace)@1..9 " "
3091-
Syntax(Comment)@9..92
3088+
Syntax(Comment)@9..101
30923089
Token(TkDocStart)@9..13 "---@"
3093-
Syntax(DocTagAlias)@13..92
3090+
Syntax(DocTagAlias)@13..101
30943091
Token(TkTagAlias)@13..18 "alias"
30953092
Token(TkWhitespace)@18..19 " "
30963093
Token(TkName)@19..24 "Pick1"
@@ -3112,39 +3109,42 @@ Syntax(Chunk)@0..101
31123109
Token(TkName)@44..45 "T"
31133110
Token(TkGt)@45..46 ">"
31143111
Token(TkWhitespace)@46..47 " "
3115-
Syntax(TypeMapped)@47..92
3112+
Syntax(TypeMapped)@47..101
31163113
Token(TkLeftBrace)@47..48 "{"
31173114
Token(TkEndOfLine)@48..49 "\n"
31183115
Token(TkWhitespace)@49..57 " "
3119-
Token(TkDocContinue)@57..64 "--- "
3120-
Syntax(DocMappedKey)@64..72
3121-
Token(TkLeftBracket)@64..65 "["
3122-
Syntax(DocGenericParameter)@65..71
3123-
Token(TkName)@65..66 "P"
3124-
Token(TkWhitespace)@66..67 " "
3125-
Token(TkIn)@67..69 "in"
3126-
Token(TkWhitespace)@69..70 " "
3127-
Syntax(TypeName)@70..71
3128-
Token(TkName)@70..71 "K"
3129-
Token(TkRightBracket)@71..72 "]"
3130-
Token(TkColon)@72..73 ":"
3131-
Token(TkWhitespace)@73..74 " "
3132-
Syntax(TypeIndexAccess)@74..78
3133-
Syntax(TypeName)@74..75
3134-
Token(TkName)@74..75 "T"
3135-
Token(TkLeftBracket)@75..76 "["
3136-
Syntax(TypeName)@76..77
3137-
Token(TkName)@76..77 "P"
3138-
Token(TkRightBracket)@77..78 "]"
3139-
Token(TkSemicolon)@78..79 ";"
3140-
Token(TkEndOfLine)@79..80 "\n"
3141-
Token(TkWhitespace)@80..88 " "
3142-
Token(TkDocContinue)@88..91 "---"
3143-
Token(TkRightBrace)@91..92 "}"
3144-
Token(TkEndOfLine)@92..93 "\n"
3145-
Token(TkWhitespace)@93..101 " "
3116+
Token(TkDocContinue)@57..62 "--- "
3117+
Token(TkDocReadonly)@62..70 "readonly"
3118+
Token(TkWhitespace)@70..71 " "
3119+
Syntax(DocMappedKey)@71..79
3120+
Token(TkLeftBracket)@71..72 "["
3121+
Syntax(DocGenericParameter)@72..78
3122+
Token(TkName)@72..73 "P"
3123+
Token(TkWhitespace)@73..74 " "
3124+
Token(TkIn)@74..76 "in"
3125+
Token(TkWhitespace)@76..77 " "
3126+
Syntax(TypeName)@77..78
3127+
Token(TkName)@77..78 "K"
3128+
Token(TkRightBracket)@78..79 "]"
3129+
Token(TkPlus)@79..80 "+"
3130+
Token(TkDocQuestion)@80..81 "?"
3131+
Token(TkColon)@81..82 ":"
3132+
Token(TkWhitespace)@82..83 " "
3133+
Syntax(TypeIndexAccess)@83..87
3134+
Syntax(TypeName)@83..84
3135+
Token(TkName)@83..84 "T"
3136+
Token(TkLeftBracket)@84..85 "["
3137+
Syntax(TypeName)@85..86
3138+
Token(TkName)@85..86 "P"
3139+
Token(TkRightBracket)@86..87 "]"
3140+
Token(TkSemicolon)@87..88 ";"
3141+
Token(TkEndOfLine)@88..89 "\n"
3142+
Token(TkWhitespace)@89..97 " "
3143+
Token(TkDocContinue)@97..100 "---"
3144+
Token(TkRightBrace)@100..101 "}"
3145+
Token(TkEndOfLine)@101..102 "\n"
3146+
Token(TkWhitespace)@102..110 " "
31463147
"#;
3147-
31483148
assert_ast_eq!(code, result);
31493149
}
31503150
}

crates/emmylua_parser/src/grammar/doc/types.rs

Lines changed: 65 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use crate::{
33
grammar::DocParseResult,
44
kind::{LuaOpKind, LuaSyntaxKind, LuaTokenKind, LuaTypeBinaryOperator, LuaTypeUnaryOperator},
55
lexer::LuaDocLexerState,
6-
parser::{CompleteMarker, LuaDocParser, LuaDocParserState, MarkerEventContainer},
6+
parser::{CompleteMarker, LuaDocParser, LuaDocParserState, Marker, MarkerEventContainer},
77
parser_error::LuaParseError,
88
};
99

@@ -142,13 +142,7 @@ fn parse_simple_type(p: &mut LuaDocParser) -> DocParseResult {
142142

143143
fn parse_primary_type(p: &mut LuaDocParser) -> DocParseResult {
144144
match p.current_token() {
145-
LuaTokenKind::TkLeftBrace => {
146-
if is_mapped_type_start(p) {
147-
parse_mapped_type(p)
148-
} else {
149-
parse_object_type(p)
150-
}
151-
}
145+
LuaTokenKind::TkLeftBrace => parse_object_or_mapped_type(p),
152146
LuaTokenKind::TkLeftBracket => parse_tuple_type(p),
153147
LuaTokenKind::TkLeftParen => parse_paren_type(p),
154148
LuaTokenKind::TkString
@@ -171,87 +165,45 @@ fn parse_primary_type(p: &mut LuaDocParser) -> DocParseResult {
171165
}
172166
}
173167

174-
/// 判断是否是映射类型.
175-
///
176-
/// 这里与`TS`保持一致, 即第一个 key 必须为`[]` 且内部必须包含 `in`.
177-
fn is_mapped_type_start(p: &LuaDocParser) -> bool {
178-
let text = p.origin_text();
179-
let start = p.current_token_range().end_offset();
180-
let rest = match text.get(start..) {
181-
Some(value) => value,
182-
None => return false,
183-
};
184-
185-
let mut trimmed = rest.trim_start_matches(char::is_whitespace);
186-
187-
if let Some(after_dashes) = trimmed.strip_prefix("---") {
188-
trimmed = after_dashes.trim_start_matches(char::is_whitespace);
189-
}
190-
191-
if !trimmed.starts_with('[') {
192-
return false;
193-
}
194-
195-
let after_left_bracket = &trimmed[1..];
196-
let rb_pos = match after_left_bracket.find(']') {
197-
Some(pos) => pos,
198-
None => return false,
199-
};
200-
201-
let content = &after_left_bracket[..rb_pos];
202-
let mut seen_identifier = false;
203-
let mut token_start: Option<usize> = None;
168+
// [Property in Type]: Type;
169+
// [Property in keyof Type]: Type;
170+
fn parse_mapped_type(p: &mut LuaDocParser, m: Marker) -> DocParseResult {
171+
p.set_parser_state(LuaDocParserState::Mapped);
204172

205-
for (idx, ch) in content.char_indices() {
206-
if ch == '_' || ch.is_alphanumeric() {
207-
if token_start.is_none() {
208-
token_start = Some(idx);
209-
}
210-
continue;
173+
match p.current_token() {
174+
LuaTokenKind::TkPlus | LuaTokenKind::TkMinus => {
175+
p.bump();
176+
expect_token(p, LuaTokenKind::TkDocReadonly)?;
211177
}
212-
213-
if let Some(start_idx) = token_start.take() {
214-
let token = &content[start_idx..idx];
215-
if token == "in" {
216-
return seen_identifier;
217-
}
218-
seen_identifier = true;
178+
LuaTokenKind::TkDocReadonly => {
179+
p.bump();
219180
}
220-
}
221-
222-
if let Some(start_idx) = token_start {
223-
if &content[start_idx..] == "in" {
224-
return seen_identifier;
181+
LuaTokenKind::TkLeftBracket => {}
182+
_ => {
183+
return Err(LuaParseError::doc_error_from(
184+
&t!("expect mapped field"),
185+
p.current_token_range(),
186+
));
225187
}
226188
}
227189

228-
false
229-
}
230-
231-
// [Property in Type]: Type;
232-
// [Property in keyof Type]: Type;
233-
fn parse_mapped_type(p: &mut LuaDocParser) -> DocParseResult {
234-
p.set_parser_state(LuaDocParserState::Mapped);
235-
let m = p.mark(LuaSyntaxKind::TypeMapped);
236-
p.bump();
237-
238-
if p.current_token() != LuaTokenKind::TkLeftBracket {
239-
return Err(LuaParseError::doc_error_from(
240-
&t!("expect mapped field"),
241-
p.current_token_range(),
242-
));
243-
}
244-
// key
245190
parse_mapped_key(p)?;
246-
if p.current_token() == LuaTokenKind::TkDocQuestion {
247-
p.bump();
191+
192+
match p.current_token() {
193+
LuaTokenKind::TkPlus | LuaTokenKind::TkMinus => {
194+
p.bump();
195+
expect_token(p, LuaTokenKind::TkDocQuestion)?;
196+
}
197+
LuaTokenKind::TkDocQuestion => {
198+
p.bump();
199+
}
200+
_ => {}
248201
}
202+
249203
expect_token(p, LuaTokenKind::TkColon)?;
250204

251-
// value
252205
parse_type(p)?;
253206

254-
// end
255207
expect_token(p, LuaTokenKind::TkSemicolon)?;
256208
expect_token(p, LuaTokenKind::TkRightBrace)?;
257209

@@ -281,11 +233,27 @@ fn parse_mapped_key(p: &mut LuaDocParser) -> DocParseResult {
281233

282234
// { <name>: <type>, ... }
283235
// { <name> : <type>, ... }
284-
fn parse_object_type(p: &mut LuaDocParser) -> DocParseResult {
285-
let m = p.mark(LuaSyntaxKind::TypeObject);
236+
fn parse_object_or_mapped_type(p: &mut LuaDocParser) -> DocParseResult {
237+
p.set_lexer_state(LuaDocLexerState::Mapped);
238+
let mut m = p.mark(LuaSyntaxKind::TypeObject);
286239
p.bump();
240+
p.set_lexer_state(LuaDocLexerState::Normal);
287241

288242
if p.current_token() != LuaTokenKind::TkRightBrace {
243+
match p.current_token() {
244+
LuaTokenKind::TkPlus | LuaTokenKind::TkMinus | LuaTokenKind::TkDocReadonly => {
245+
m.set_kind(p, LuaSyntaxKind::TypeMapped);
246+
return parse_mapped_type(p, m);
247+
}
248+
LuaTokenKind::TkLeftBracket => {
249+
if is_mapped_type(p) {
250+
m.set_kind(p, LuaSyntaxKind::TypeMapped);
251+
return parse_mapped_type(p, m);
252+
}
253+
}
254+
_ => {}
255+
}
256+
289257
parse_typed_field(p)?;
290258
while p.current_token() == LuaTokenKind::TkComma {
291259
p.bump();
@@ -301,6 +269,24 @@ fn parse_object_type(p: &mut LuaDocParser) -> DocParseResult {
301269
Ok(m.complete(p))
302270
}
303271

272+
/// 判断是否为 mapped type
273+
fn is_mapped_type(p: &LuaDocParser) -> bool {
274+
let mut lexer = p.lexer.clone();
275+
276+
loop {
277+
let kind = lexer.lex();
278+
match kind {
279+
LuaTokenKind::TkIn => return true,
280+
LuaTokenKind::TkLeftBracket | LuaTokenKind::TkRightBracket => return false,
281+
LuaTokenKind::TkEof => return false,
282+
LuaTokenKind::TkWhitespace
283+
| LuaTokenKind::TkDocContinue
284+
| LuaTokenKind::TkEndOfLine => {}
285+
_ => {}
286+
}
287+
}
288+
}
289+
304290
// <name> : <type>
305291
// [<number>] : <type>
306292
// [<string>] : <type>

crates/emmylua_parser/src/lexer/lua_doc_lexer.rs

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ pub enum LuaDocLexerState {
2727
Source,
2828
NormalDescription,
2929
CastExpr,
30+
Mapped,
3031
}
3132

3233
impl LuaDocLexer<'_> {
@@ -73,6 +74,7 @@ impl LuaDocLexer<'_> {
7374
LuaDocLexerState::Source => self.lex_source(),
7475
LuaDocLexerState::NormalDescription => self.lex_normal_description(),
7576
LuaDocLexerState::CastExpr => self.lex_cast_expr(),
77+
LuaDocLexerState::Mapped => self.lex_mapped(),
7678
}
7779
}
7880

@@ -581,6 +583,24 @@ impl LuaDocLexer<'_> {
581583
_ => self.lex_normal(),
582584
}
583585
}
586+
587+
fn lex_mapped(&mut self) -> LuaTokenKind {
588+
let reader = self.reader.as_mut().unwrap();
589+
match reader.current_char() {
590+
ch if is_doc_whitespace(ch) => {
591+
reader.eat_while(is_doc_whitespace);
592+
LuaTokenKind::TkWhitespace
593+
}
594+
ch if is_name_start(ch) => {
595+
let (text, _) = read_doc_name(reader);
596+
match text {
597+
"readonly" => LuaTokenKind::TkDocReadonly,
598+
_ => LuaTokenKind::TkName,
599+
}
600+
}
601+
_ => self.lex_normal(),
602+
}
603+
}
584604
}
585605

586606
fn to_tag(text: &str) -> LuaTokenKind {

crates/emmylua_parser/src/parser/lua_doc_parser.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ impl<'b> LuaDocParser<'_, 'b> {
9191
}
9292

9393
match self.lexer.state {
94-
LuaDocLexerState::Normal | LuaDocLexerState::Version => {
94+
LuaDocLexerState::Normal | LuaDocLexerState::Version | LuaDocLexerState::Mapped => {
9595
while matches!(
9696
self.current_token,
9797
LuaTokenKind::TkDocContinue

0 commit comments

Comments
 (0)