Skip to content
This repository was archived by the owner on May 28, 2025. It is now read-only.

Commit fcbbec6

Browse files
committed
Auto merge of rust-lang#14625 - jhgg:fix/token-conversion-for-doc-comments, r=Veykril
mbe: fix token conversion for doc comments fixes rust-lang#14611 when creating token trees for the converted doc comment, we should use the correct span in all places, rather than allowing some to remain unspecified. otherwise, things behave incorrectly.
2 parents af3b6a0 + a497e9a commit fcbbec6

File tree

2 files changed

+64
-36
lines changed

2 files changed

+64
-36
lines changed

crates/ide/src/goto_definition.rs

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -850,6 +850,32 @@ fn foo() {}
850850
);
851851
}
852852

853+
#[test]
854+
fn goto_through_included_file_struct_with_doc_comment() {
855+
check(
856+
r#"
857+
//- /main.rs
858+
#[rustc_builtin_macro]
859+
macro_rules! include {}
860+
861+
include!("foo.rs");
862+
863+
fn f() {
864+
let x = Foo$0;
865+
}
866+
867+
mod confuse_index {
868+
pub struct Foo;
869+
}
870+
871+
//- /foo.rs
872+
/// This is a doc comment
873+
pub struct Foo;
874+
//^^^
875+
"#,
876+
);
877+
}
878+
853879
#[test]
854880
fn goto_for_type_param() {
855881
check(

crates/mbe/src/syntax_bridge.rs

Lines changed: 38 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -190,20 +190,13 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
190190

191191
let kind = token.kind(conv);
192192
if kind == COMMENT {
193-
if let Some(tokens) = conv.convert_doc_comment(&token) {
194-
// FIXME: There has to be a better way to do this
195-
// Add the comments token id to the converted doc string
193+
// Since `convert_doc_comment` can fail, we need to peek the next id, so that we can
194+
// figure out which token id to use for the doc comment, if it is converted successfully.
195+
let next_id = conv.id_alloc().peek_next_id();
196+
if let Some(tokens) = conv.convert_doc_comment(&token, next_id) {
196197
let id = conv.id_alloc().alloc(range, synth_id);
197-
result.extend(tokens.into_iter().map(|mut tt| {
198-
if let tt::TokenTree::Subtree(sub) = &mut tt {
199-
if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
200-
sub.token_trees.get_mut(2)
201-
{
202-
lit.span = id
203-
}
204-
}
205-
tt
206-
}));
198+
debug_assert_eq!(id, next_id);
199+
result.extend(tokens);
207200
}
208201
continue;
209202
}
@@ -382,49 +375,46 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
382375
text.into()
383376
}
384377

385-
fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
378+
fn convert_doc_comment(
379+
token: &syntax::SyntaxToken,
380+
span: tt::TokenId,
381+
) -> Option<Vec<tt::TokenTree>> {
386382
cov_mark::hit!(test_meta_doc_comments);
387383
let comment = ast::Comment::cast(token.clone())?;
388384
let doc = comment.kind().doc?;
389385

390386
// Make `doc="\" Comments\""
391-
let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
387+
let meta_tkns =
388+
vec![mk_ident("doc", span), mk_punct('=', span), mk_doc_literal(&comment, span)];
392389

393390
// Make `#![]`
394391
let mut token_trees = Vec::with_capacity(3);
395-
token_trees.push(mk_punct('#'));
392+
token_trees.push(mk_punct('#', span));
396393
if let ast::CommentPlacement::Inner = doc {
397-
token_trees.push(mk_punct('!'));
394+
token_trees.push(mk_punct('!', span));
398395
}
399396
token_trees.push(tt::TokenTree::from(tt::Subtree {
400-
delimiter: tt::Delimiter {
401-
open: tt::TokenId::UNSPECIFIED,
402-
close: tt::TokenId::UNSPECIFIED,
403-
kind: tt::DelimiterKind::Bracket,
404-
},
397+
delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
405398
token_trees: meta_tkns,
406399
}));
407400

408401
return Some(token_trees);
409402

410403
// Helper functions
411-
fn mk_ident(s: &str) -> tt::TokenTree {
412-
tt::TokenTree::from(tt::Leaf::from(tt::Ident {
413-
text: s.into(),
414-
span: tt::TokenId::unspecified(),
415-
}))
404+
fn mk_ident(s: &str, span: tt::TokenId) -> tt::TokenTree {
405+
tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), span }))
416406
}
417407

418-
fn mk_punct(c: char) -> tt::TokenTree {
408+
fn mk_punct(c: char, span: tt::TokenId) -> tt::TokenTree {
419409
tt::TokenTree::from(tt::Leaf::from(tt::Punct {
420410
char: c,
421411
spacing: tt::Spacing::Alone,
422-
span: tt::TokenId::unspecified(),
412+
span,
423413
}))
424414
}
425415

426-
fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
427-
let lit = tt::Literal { text: doc_comment_text(comment), span: tt::TokenId::unspecified() };
416+
fn mk_doc_literal(comment: &ast::Comment, span: tt::TokenId) -> tt::TokenTree {
417+
let lit = tt::Literal { text: doc_comment_text(comment), span };
428418

429419
tt::TokenTree::from(tt::Leaf::from(lit))
430420
}
@@ -480,6 +470,10 @@ impl TokenIdAlloc {
480470
}
481471
}
482472
}
473+
474+
fn peek_next_id(&self) -> tt::TokenId {
475+
tt::TokenId(self.next_id)
476+
}
483477
}
484478

485479
/// A raw token (straight from lexer) converter
@@ -502,7 +496,11 @@ trait SrcToken<Ctx>: std::fmt::Debug {
502496
trait TokenConverter: Sized {
503497
type Token: SrcToken<Self>;
504498

505-
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
499+
fn convert_doc_comment(
500+
&self,
501+
token: &Self::Token,
502+
span: tt::TokenId,
503+
) -> Option<Vec<tt::TokenTree>>;
506504

507505
fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
508506

@@ -532,9 +530,9 @@ impl<'a> SrcToken<RawConverter<'a>> for usize {
532530
impl<'a> TokenConverter for RawConverter<'a> {
533531
type Token = usize;
534532

535-
fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
533+
fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option<Vec<tt::TokenTree>> {
536534
let text = self.lexed.text(token);
537-
convert_doc_comment(&doc_comment(text))
535+
convert_doc_comment(&doc_comment(text), span)
538536
}
539537

540538
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
@@ -681,8 +679,12 @@ impl SrcToken<Converter> for SynToken {
681679

682680
impl TokenConverter for Converter {
683681
type Token = SynToken;
684-
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
685-
convert_doc_comment(token.token()?)
682+
fn convert_doc_comment(
683+
&self,
684+
token: &Self::Token,
685+
span: tt::TokenId,
686+
) -> Option<Vec<tt::TokenTree>> {
687+
convert_doc_comment(token.token()?, span)
686688
}
687689

688690
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {

0 commit comments

Comments
 (0)