@@ -86,7 +86,7 @@ impl Lit {
8686 }
8787 }
8888
89- // See comments in `interpolated_to_tokenstream ` for why we care about
89+ // See comments in `Nonterminal::to_tokenstream ` for why we care about
9090 // *probably* equal here rather than actual equality
9191 fn probably_equal_for_proc_macro ( & self , other : & Lit ) -> bool {
9292 mem:: discriminant ( self ) == mem:: discriminant ( other)
@@ -502,87 +502,7 @@ impl Token {
502502 }
503503 }
504504
505- pub fn interpolated_to_tokenstream ( sess : & ParseSess , nt : Lrc < Nonterminal > , span : Span )
506- -> TokenStream {
507- // An `Interpolated` token means that we have a `Nonterminal`
508- // which is often a parsed AST item. At this point we now need
509- // to convert the parsed AST to an actual token stream, e.g.
510- // un-parse it basically.
511- //
512- // Unfortunately there's not really a great way to do that in a
513- // guaranteed lossless fashion right now. The fallback here is
514- // to just stringify the AST node and reparse it, but this loses
515- // all span information.
516- //
517- // As a result, some AST nodes are annotated with the token
518- // stream they came from. Here we attempt to extract these
519- // lossless token streams before we fall back to the
520- // stringification.
521- let tokens = match * nt {
522- Nonterminal :: NtItem ( ref item) => {
523- prepend_attrs ( sess, & item. attrs , item. tokens . as_ref ( ) , span)
524- }
525- Nonterminal :: NtTraitItem ( ref item) => {
526- prepend_attrs ( sess, & item. attrs , item. tokens . as_ref ( ) , span)
527- }
528- Nonterminal :: NtImplItem ( ref item) => {
529- prepend_attrs ( sess, & item. attrs , item. tokens . as_ref ( ) , span)
530- }
531- Nonterminal :: NtIdent ( ident, is_raw) => {
532- let token = Token :: Ident ( ident, is_raw) ;
533- Some ( TokenTree :: Token ( ident. span , token) . into ( ) )
534- }
535- Nonterminal :: NtLifetime ( ident) => {
536- let token = Token :: Lifetime ( ident) ;
537- Some ( TokenTree :: Token ( ident. span , token) . into ( ) )
538- }
539- Nonterminal :: NtTT ( ref tt) => {
540- Some ( tt. clone ( ) . into ( ) )
541- }
542- _ => None ,
543- } ;
544-
545- // FIXME(#43081): Avoid this pretty-print + reparse hack
546- let source = pprust:: nonterminal_to_string ( & nt) ;
547- let filename = FileName :: macro_expansion_source_code ( & source) ;
548- let ( tokens_for_real, errors) =
549- parse_stream_from_source_str ( filename, source, sess, Some ( span) ) ;
550- emit_unclosed_delims ( & errors, & sess. span_diagnostic ) ;
551-
552- // During early phases of the compiler the AST could get modified
553- // directly (e.g., attributes added or removed) and the internal cache
554- // of tokens my not be invalidated or updated. Consequently if the
555- // "lossless" token stream disagrees with our actual stringification
556- // (which has historically been much more battle-tested) then we go
557- // with the lossy stream anyway (losing span information).
558- //
559- // Note that the comparison isn't `==` here to avoid comparing spans,
560- // but it *also* is a "probable" equality which is a pretty weird
561- // definition. We mostly want to catch actual changes to the AST
562- // like a `#[cfg]` being processed or some weird `macro_rules!`
563- // expansion.
564- //
565- // What we *don't* want to catch is the fact that a user-defined
566- // literal like `0xf` is stringified as `15`, causing the cached token
567- // stream to not be literal `==` token-wise (ignoring spans) to the
568- // token stream we got from stringification.
569- //
570- // Instead the "probably equal" check here is "does each token
571- // recursively have the same discriminant?" We basically don't look at
572- // the token values here and assume that such fine grained token stream
573- // modifications, including adding/removing typically non-semantic
574- // tokens such as extra braces and commas, don't happen.
575- if let Some ( tokens) = tokens {
576- if tokens. probably_equal_for_proc_macro ( & tokens_for_real) {
577- return tokens
578- }
579- info ! ( "cached tokens found, but they're not \" probably equal\" , \
580- going with stringified version") ;
581- }
582- return tokens_for_real
583- }
584-
585- // See comments in `interpolated_to_tokenstream` for why we care about
505+ // See comments in `Nonterminal::to_tokenstream` for why we care about
586506 // *probably* equal here rather than actual equality
587507 crate fn probably_equal_for_proc_macro ( & self , other : & Token ) -> bool {
588508 if mem:: discriminant ( self ) != mem:: discriminant ( other) {
@@ -714,6 +634,85 @@ impl fmt::Debug for Nonterminal {
714634 }
715635}
716636
637+ impl Nonterminal {
638+ pub fn to_tokenstream ( & self , sess : & ParseSess , span : Span ) -> TokenStream {
639+ // A `Nonterminal` is often a parsed AST item. At this point we now
640+ // need to convert the parsed AST to an actual token stream, e.g.
641+ // un-parse it basically.
642+ //
643+ // Unfortunately there's not really a great way to do that in a
644+ // guaranteed lossless fashion right now. The fallback here is to just
645+ // stringify the AST node and reparse it, but this loses all span
646+ // information.
647+ //
648+ // As a result, some AST nodes are annotated with the token stream they
649+ // came from. Here we attempt to extract these lossless token streams
650+ // before we fall back to the stringification.
651+ let tokens = match * self {
652+ Nonterminal :: NtItem ( ref item) => {
653+ prepend_attrs ( sess, & item. attrs , item. tokens . as_ref ( ) , span)
654+ }
655+ Nonterminal :: NtTraitItem ( ref item) => {
656+ prepend_attrs ( sess, & item. attrs , item. tokens . as_ref ( ) , span)
657+ }
658+ Nonterminal :: NtImplItem ( ref item) => {
659+ prepend_attrs ( sess, & item. attrs , item. tokens . as_ref ( ) , span)
660+ }
661+ Nonterminal :: NtIdent ( ident, is_raw) => {
662+ let token = Token :: Ident ( ident, is_raw) ;
663+ Some ( TokenTree :: Token ( ident. span , token) . into ( ) )
664+ }
665+ Nonterminal :: NtLifetime ( ident) => {
666+ let token = Token :: Lifetime ( ident) ;
667+ Some ( TokenTree :: Token ( ident. span , token) . into ( ) )
668+ }
669+ Nonterminal :: NtTT ( ref tt) => {
670+ Some ( tt. clone ( ) . into ( ) )
671+ }
672+ _ => None ,
673+ } ;
674+
675+ // FIXME(#43081): Avoid this pretty-print + reparse hack
676+ let source = pprust:: nonterminal_to_string ( self ) ;
677+ let filename = FileName :: macro_expansion_source_code ( & source) ;
678+ let ( tokens_for_real, errors) =
679+ parse_stream_from_source_str ( filename, source, sess, Some ( span) ) ;
680+ emit_unclosed_delims ( & errors, & sess. span_diagnostic ) ;
681+
682+ // During early phases of the compiler the AST could get modified
683+ // directly (e.g., attributes added or removed) and the internal cache
684+ // of tokens my not be invalidated or updated. Consequently if the
685+ // "lossless" token stream disagrees with our actual stringification
686+ // (which has historically been much more battle-tested) then we go
687+ // with the lossy stream anyway (losing span information).
688+ //
689+ // Note that the comparison isn't `==` here to avoid comparing spans,
690+ // but it *also* is a "probable" equality which is a pretty weird
691+ // definition. We mostly want to catch actual changes to the AST
692+ // like a `#[cfg]` being processed or some weird `macro_rules!`
693+ // expansion.
694+ //
695+ // What we *don't* want to catch is the fact that a user-defined
696+ // literal like `0xf` is stringified as `15`, causing the cached token
697+ // stream to not be literal `==` token-wise (ignoring spans) to the
698+ // token stream we got from stringification.
699+ //
700+ // Instead the "probably equal" check here is "does each token
701+ // recursively have the same discriminant?" We basically don't look at
702+ // the token values here and assume that such fine grained token stream
703+ // modifications, including adding/removing typically non-semantic
704+ // tokens such as extra braces and commas, don't happen.
705+ if let Some ( tokens) = tokens {
706+ if tokens. probably_equal_for_proc_macro ( & tokens_for_real) {
707+ return tokens
708+ }
709+ info ! ( "cached tokens found, but they're not \" probably equal\" , \
710+ going with stringified version") ;
711+ }
712+ return tokens_for_real
713+ }
714+ }
715+
717716crate fn is_op ( tok : & Token ) -> bool {
718717 match * tok {
719718 OpenDelim ( ..) | CloseDelim ( ..) | Literal ( ..) | DocComment ( ..) |
0 commit comments