mirror of
https://github.com/odin-lang/Odin.git
synced 2026-01-04 12:07:45 +00:00
Correct consume comment groups in both parsers
This commit is contained in:
@@ -348,27 +348,30 @@ consume_comment_group :: proc(p: ^Parser, n: int) -> (comments: ^ast.Comment_Gro
|
||||
}
|
||||
|
||||
consume_comment_groups :: proc(p: ^Parser, prev: tokenizer.Token) {
|
||||
if p.curr_tok.kind == .Comment {
|
||||
comment: ^ast.Comment_Group
|
||||
end_line := 0
|
||||
|
||||
if p.curr_tok.pos.line == prev.pos.line {
|
||||
comment, end_line = consume_comment_group(p, 0)
|
||||
if p.curr_tok.pos.line != end_line || p.curr_tok.kind == .EOF {
|
||||
p.line_comment = comment
|
||||
}
|
||||
}
|
||||
|
||||
end_line = -1
|
||||
for p.curr_tok.kind == .Comment {
|
||||
comment, end_line = consume_comment_group(p, 1)
|
||||
}
|
||||
if end_line+1 >= p.curr_tok.pos.line || end_line < 0 {
|
||||
p.lead_comment = comment
|
||||
}
|
||||
|
||||
assert(p.curr_tok.kind != .Comment)
|
||||
if p.curr_tok.kind != .Comment {
|
||||
return
|
||||
}
|
||||
comment: ^ast.Comment_Group
|
||||
end_line := 0
|
||||
|
||||
if p.curr_tok.pos.line == prev.pos.line {
|
||||
comment, end_line = consume_comment_group(p, 0)
|
||||
if p.curr_tok.pos.line != end_line ||
|
||||
p.curr_tok.pos.line == prev.pos.line+1 ||
|
||||
p.curr_tok.kind == .EOF {
|
||||
p.line_comment = comment
|
||||
}
|
||||
}
|
||||
|
||||
end_line = -1
|
||||
for p.curr_tok.kind == .Comment {
|
||||
comment, end_line = consume_comment_group(p, 1)
|
||||
}
|
||||
if end_line+1 >= p.curr_tok.pos.line || end_line < 0 {
|
||||
p.lead_comment = comment
|
||||
}
|
||||
|
||||
assert(p.curr_tok.kind != .Comment)
|
||||
}
|
||||
|
||||
advance_token :: proc(p: ^Parser) -> tokenizer.Token {
|
||||
|
||||
@@ -1436,27 +1436,30 @@ gb_internal CommentGroup *consume_comment_group(AstFile *f, isize n, isize *end_
|
||||
}
|
||||
|
||||
gb_internal void consume_comment_groups(AstFile *f, Token prev) {
|
||||
if (f->curr_token.kind == Token_Comment) {
|
||||
CommentGroup *comment = nullptr;
|
||||
isize end_line = 0;
|
||||
|
||||
if (f->curr_token.pos.line == prev.pos.line) {
|
||||
comment = consume_comment_group(f, 0, &end_line);
|
||||
if (f->curr_token.pos.line != end_line || f->curr_token.kind == Token_EOF) {
|
||||
f->line_comment = comment;
|
||||
}
|
||||
}
|
||||
|
||||
end_line = -1;
|
||||
while (f->curr_token.kind == Token_Comment) {
|
||||
comment = consume_comment_group(f, 1, &end_line);
|
||||
}
|
||||
if (end_line+1 == f->curr_token.pos.line || end_line < 0) {
|
||||
f->lead_comment = comment;
|
||||
}
|
||||
|
||||
GB_ASSERT(f->curr_token.kind != Token_Comment);
|
||||
if (f->curr_token.kind != Token_Comment) {
|
||||
return;
|
||||
}
|
||||
CommentGroup *comment = nullptr;
|
||||
isize end_line = 0;
|
||||
|
||||
if (f->curr_token.pos.line == prev.pos.line) {
|
||||
comment = consume_comment_group(f, 0, &end_line);
|
||||
if (f->curr_token.pos.line != end_line ||
|
||||
f->curr_token.pos.line == prev.pos.line+1 ||
|
||||
f->curr_token.kind == Token_EOF) {
|
||||
f->line_comment = comment;
|
||||
}
|
||||
}
|
||||
|
||||
end_line = -1;
|
||||
while (f->curr_token.kind == Token_Comment) {
|
||||
comment = consume_comment_group(f, 1, &end_line);
|
||||
}
|
||||
if (end_line+1 == f->curr_token.pos.line || end_line < 0) {
|
||||
f->lead_comment = comment;
|
||||
}
|
||||
|
||||
GB_ASSERT(f->curr_token.kind != Token_Comment);
|
||||
}
|
||||
|
||||
gb_internal gb_inline bool ignore_newlines(AstFile *f) {
|
||||
|
||||
Reference in New Issue
Block a user