@@ -892,29 +892,8 @@ impl<'db> SemanticsImpl<'db> {
892892 f : & mut dyn FnMut ( InFile < SyntaxToken > , SyntaxContextId ) -> ControlFlow < T > ,
893893 ) -> Option < T > {
894894 let _p = tracing:: info_span!( "descend_into_macros_impl" ) . entered ( ) ;
895- let ( sa, span, file_id) = token
896- . parent ( )
897- . and_then ( |parent| {
898- self . analyze_impl ( InRealFile :: new ( file_id, & parent) . into ( ) , None , false )
899- } )
900- . and_then ( |sa| {
901- let file_id = sa. file_id . file_id ( ) ?;
902- Some ( (
903- sa,
904- self . db . real_span_map ( file_id) . span_for_range ( token. text_range ( ) ) ,
905- HirFileId :: from ( file_id) ,
906- ) )
907- } ) ?;
908895
909- let mut m_cache = self . macro_call_cache . borrow_mut ( ) ;
910- let def_map = sa. resolver . def_map ( ) ;
911-
912- // A stack of tokens to process, along with the file they came from
913- // These are tracked to know which macro calls we still have to look into
914- // the tokens themselves aren't that interesting as the span that is being used to map
915- // things down never changes.
916- let mut stack: Vec < ( _ , SmallVec < [ _ ; 2 ] > ) > =
917- vec ! [ ( file_id, smallvec![ ( token, SyntaxContextId :: ROOT ) ] ) ] ;
896+ let span = self . db . real_span_map ( file_id) . span_for_range ( token. text_range ( ) ) ;
918897
919898 // Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
920899 let process_expansion_for_token = |stack : & mut Vec < _ > , macro_file| {
@@ -926,14 +905,40 @@ impl<'db> SemanticsImpl<'db> {
926905 . map ( SmallVec :: < [ _ ; 2 ] > :: from_iter) ,
927906 )
928907 } ) ?;
929-
930908 // we have found a mapping for the token if the vec is non-empty
931909 let res = mapped_tokens. is_empty ( ) . not ( ) . then_some ( ( ) ) ;
932910 // requeue the tokens we got from mapping our current token down
933911 stack. push ( ( HirFileId :: from ( file_id) , mapped_tokens) ) ;
934912 res
935913 } ;
936914
915+ // A stack of tokens to process, along with the file they came from
916+ // These are tracked to know which macro calls we still have to look into
917+ // the tokens themselves aren't that interesting as the span that is being used to map
918+ // things down never changes.
919+ let mut stack: Vec < ( _ , SmallVec < [ _ ; 2 ] > ) > = vec ! [ ] ;
920+ let include = self . s2d_cache . borrow_mut ( ) . get_or_insert_include_for ( self . db , file_id) ;
921+ match include {
922+ Some ( include) => {
923+ // include! inputs are always from real files, so they only need to be handled once upfront
924+ process_expansion_for_token ( & mut stack, include) ?;
925+ }
926+ None => {
927+ stack. push ( ( file_id. into ( ) , smallvec ! [ ( token, SyntaxContextId :: ROOT ) ] ) ) ;
928+ }
929+ }
930+
931+ let ( file_id, tokens) = stack. first ( ) ?;
932+ // make sure we pick the token in the expanded include if we encountered an include,
933+ // otherwise we'll get the wrong semantics
934+ let sa =
935+ tokens. first ( ) ?. 0 . parent ( ) . and_then ( |parent| {
936+ self . analyze_impl ( InFile :: new ( * file_id, & parent) , None , false )
937+ } ) ?;
938+
939+ let mut m_cache = self . macro_call_cache . borrow_mut ( ) ;
940+ let def_map = sa. resolver . def_map ( ) ;
941+
937942 // Filters out all tokens that contain the given range (usually the macro call), any such
938943 // token is redundant as the corresponding macro call has already been processed
939944 let filter_duplicates = |tokens : & mut SmallVec < _ > , range : TextRange | {
@@ -1011,6 +1016,7 @@ impl<'db> SemanticsImpl<'db> {
10111016 ) {
10121017 call. as_macro_file ( )
10131018 } else {
1019+ // FIXME: This is wrong, the SourceAnalyzer might be invalid here
10141020 sa. expand ( self . db , mcall. as_ref ( ) ) ?
10151021 } ;
10161022 m_cache. insert ( mcall, it) ;
0 commit comments