@@ -107,25 +107,30 @@ where
107107 }
108108}
109109
110- pub trait ToAttrTokenStream : sync:: DynSend + sync:: DynSync {
111- fn to_attr_token_stream ( & self ) -> AttrTokenStream ;
112- }
113-
114- impl ToAttrTokenStream for AttrTokenStream {
115- fn to_attr_token_stream ( & self ) -> AttrTokenStream {
116- self . clone ( )
117- }
118- }
119-
120- /// A lazy version of [`TokenStream`], which defers creation
121- /// of an actual `TokenStream` until it is needed.
122- /// `Box` is here only to reduce the structure size.
110+ /// A lazy version of [`AttrTokenStream`], which defers creation of an actual
111+ /// `AttrTokenStream` until it is needed.
123112#[ derive( Clone ) ]
124- pub struct LazyAttrTokenStream ( Lrc < Box < dyn ToAttrTokenStream > > ) ;
113+ pub struct LazyAttrTokenStream ( Lrc < LazyAttrTokenStreamInner > ) ;
125114
126115impl LazyAttrTokenStream {
127- pub fn new ( inner : impl ToAttrTokenStream + ' static ) -> LazyAttrTokenStream {
128- LazyAttrTokenStream ( Lrc :: new ( Box :: new ( inner) ) )
116+ pub fn new_direct ( stream : AttrTokenStream ) -> LazyAttrTokenStream {
117+ LazyAttrTokenStream ( Lrc :: new ( LazyAttrTokenStreamInner :: Direct ( stream) ) )
118+ }
119+
120+ pub fn new_pending (
121+ start_token : ( Token , Spacing ) ,
122+ cursor_snapshot : TokenCursor ,
123+ num_calls : u32 ,
124+ break_last_token : bool ,
125+ replace_ranges : Box < [ ReplaceRange ] > ,
126+ ) -> LazyAttrTokenStream {
127+ LazyAttrTokenStream ( Lrc :: new ( LazyAttrTokenStreamInner :: Pending {
128+ start_token,
129+ cursor_snapshot,
130+ num_calls,
131+ break_last_token,
132+ replace_ranges,
133+ } ) )
129134 }
130135
131136 pub fn to_attr_token_stream ( & self ) -> AttrTokenStream {
@@ -174,90 +179,108 @@ impl<CTX> HashStable<CTX> for LazyAttrTokenStream {
174179/// attributes get inserted into the proper place in the token stream.
175180pub type ReplaceRange = ( Range < u32 > , Option < AttrsTarget > ) ;
176181
177- // Produces a `TokenStream` on-demand. Using `cursor_snapshot` and `num_calls`,
178- // we can reconstruct the `TokenStream` seen by the callback. This allows us to
179- // avoid producing a `TokenStream` if it is never needed - for example, a
180- // captured `macro_rules!` argument that is never passed to a proc macro. In
181- // practice token stream creation happens rarely compared to calls to
182- // `collect_tokens` (see some statistics in #78736), so we are doing as little
183- // up-front work as possible.
184- //
185- // This also makes `Parser` very cheap to clone, since there is no intermediate
186- // collection buffer to clone.
187- pub struct LazyAttrTokenStreamImpl {
188- pub start_token : ( Token , Spacing ) ,
189- pub cursor_snapshot : TokenCursor ,
190- pub num_calls : u32 ,
191- pub break_last_token : bool ,
192- pub replace_ranges : Box < [ ReplaceRange ] > ,
182+ enum LazyAttrTokenStreamInner {
183+ // The token stream has already been produced.
184+ Direct ( AttrTokenStream ) ,
185+
186+ // Produces a `TokenStream` on-demand. Using `cursor_snapshot` and `num_calls`,
187+ // we can reconstruct the `TokenStream` seen by the callback. This allows us to
188+ // avoid producing a `TokenStream` if it is never needed - for example, a
189+ // captured `macro_rules!` argument that is never passed to a proc macro. In
190+ // practice token stream creation happens rarely compared to calls to
191+ // `collect_tokens` (see some statistics in #78736), so we are doing as little
192+ // up-front work as possible.
193+ //
194+ // This also makes `Parser` very cheap to clone, since there is no intermediate
195+ // collection buffer to clone.
196+ Pending {
197+ start_token : ( Token , Spacing ) ,
198+ cursor_snapshot : TokenCursor ,
199+ num_calls : u32 ,
200+ break_last_token : bool ,
201+ replace_ranges : Box < [ ReplaceRange ] > ,
202+ } ,
193203}
194204
195- impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
205+ impl LazyAttrTokenStreamInner {
196206 fn to_attr_token_stream ( & self ) -> AttrTokenStream {
197- // The token produced by the final call to `{,inlined_}next` was not
198- // actually consumed by the callback. The combination of chaining the
199- // initial token and using `take` produces the desired result - we
200- // produce an empty `TokenStream` if no calls were made, and omit the
201- // final token otherwise.
202- let mut cursor_snapshot = self . cursor_snapshot . clone ( ) ;
203- let tokens = iter:: once ( FlatToken :: Token ( self . start_token . clone ( ) ) )
204- . chain ( iter:: repeat_with ( || FlatToken :: Token ( cursor_snapshot. next ( ) ) ) )
205- . take ( self . num_calls as usize ) ;
206-
207- if self . replace_ranges . is_empty ( ) {
208- make_attr_token_stream ( tokens, self . break_last_token )
209- } else {
210- let mut tokens: Vec < _ > = tokens. collect ( ) ;
211- let mut replace_ranges = self . replace_ranges . to_vec ( ) ;
212- replace_ranges. sort_by_key ( |( range, _) | range. start ) ;
213-
214- #[ cfg( debug_assertions) ]
215- {
216- for [ ( range, tokens) , ( next_range, next_tokens) ] in replace_ranges. array_windows ( ) {
217- assert ! (
218- range. end <= next_range. start || range. end >= next_range. end,
219- "Replace ranges should either be disjoint or nested: \
220- ({:?}, {:?}) ({:?}, {:?})",
221- range,
222- tokens,
223- next_range,
224- next_tokens,
225- ) ;
226- }
227- }
207+ match self {
208+ LazyAttrTokenStreamInner :: Direct ( stream) => stream. clone ( ) ,
209+ LazyAttrTokenStreamInner :: Pending {
210+ start_token,
211+ cursor_snapshot,
212+ num_calls,
213+ break_last_token,
214+ replace_ranges,
215+ } => {
216+ // The token produced by the final call to `{,inlined_}next`
217+ // was not actually consumed by the callback. The combination
218+ // of chaining the initial token and using `take` produces the
219+ // desired result - we produce an empty `TokenStream` if no
220+ // calls were made, and omit the final token otherwise.
221+ let mut cursor_snapshot = cursor_snapshot. clone ( ) ;
222+ let tokens = iter:: once ( FlatToken :: Token ( start_token. clone ( ) ) )
223+ . chain ( iter:: repeat_with ( || FlatToken :: Token ( cursor_snapshot. next ( ) ) ) )
224+ . take ( * num_calls as usize ) ;
225+
226+ if replace_ranges. is_empty ( ) {
227+ make_attr_token_stream ( tokens, * break_last_token)
228+ } else {
229+ let mut tokens: Vec < _ > = tokens. collect ( ) ;
230+ let mut replace_ranges = replace_ranges. to_vec ( ) ;
231+ replace_ranges. sort_by_key ( |( range, _) | range. start ) ;
232+
233+ #[ cfg( debug_assertions) ]
234+ {
235+ for [ ( range, tokens) , ( next_range, next_tokens) ] in
236+ replace_ranges. array_windows ( )
237+ {
238+ assert ! (
239+ range. end <= next_range. start || range. end >= next_range. end,
240+ "Replace ranges should either be disjoint or nested: \
241+ ({:?}, {:?}) ({:?}, {:?})",
242+ range,
243+ tokens,
244+ next_range,
245+ next_tokens,
246+ ) ;
247+ }
248+ }
228249
229- // Process the replace ranges, starting from the highest start
230- // position and working our way back. If have tokens like:
231- //
232- // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
233- //
234- // Then we will generate replace ranges for both the `#[cfg(FALSE)]
235- // field: bool` and the entire `#[cfg(FALSE)] struct Foo {
236- // #[cfg(FALSE)] field: bool }`
237- //
238- // By starting processing from the replace range with the greatest
239- // start position, we ensure that any replace range which encloses
240- // another replace range will capture the *replaced* tokens for the
241- // inner range, not the original tokens.
242- for ( range, target) in replace_ranges. into_iter ( ) . rev ( ) {
243- assert ! ( !range. is_empty( ) , "Cannot replace an empty range: {range:?}" ) ;
244-
245- // Replace the tokens in range with zero or one
246- // `FlatToken::AttrsTarget`s, plus enough `FlatToken::Empty`s
247- // to fill up the rest of the range. This keeps the total
248- // length of `tokens` constant throughout the replacement
249- // process, allowing us to use all of the `ReplaceRanges`
250- // entries without adjusting indices.
251- let target_len = target. is_some ( ) as usize ;
252- tokens. splice (
253- ( range. start as usize ) ..( range. end as usize ) ,
254- target
255- . into_iter ( )
256- . map ( |target| FlatToken :: AttrsTarget ( target) )
257- . chain ( iter:: repeat ( FlatToken :: Empty ) . take ( range. len ( ) - target_len) ) ,
258- ) ;
250+ // Process the replace ranges, starting from the highest
251+ // start position and working our way back. If have tokens
252+ // like:
253+ //
254+ // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
255+ //
256+ // Then we will generate replace ranges for both the
257+ // `#[cfg(FALSE)] field: bool` and the entire
258+ // `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
259+ //
260+ // By starting processing from the replace range with the
261+ // greatest start position, we ensure that any replace
262+ // range which encloses another replace range will capture
263+ // the *replaced* tokens for the inner range, not the
264+ // original tokens.
265+ for ( range, target) in replace_ranges. into_iter ( ) . rev ( ) {
266+ assert ! ( !range. is_empty( ) , "Cannot replace an empty range: {range:?}" ) ;
267+
268+ // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s,
269+ // plus enough `FlatToken::Empty`s to fill up the rest of the range. This
270+ // keeps the total length of `tokens` constant throughout the replacement
271+ // process, allowing us to use all of the `ReplaceRanges` entries without
272+ // adjusting indices.
273+ let target_len = target. is_some ( ) as usize ;
274+ tokens. splice (
275+ ( range. start as usize ) ..( range. end as usize ) ,
276+ target. into_iter ( ) . map ( |target| FlatToken :: AttrsTarget ( target) ) . chain (
277+ iter:: repeat ( FlatToken :: Empty ) . take ( range. len ( ) - target_len) ,
278+ ) ,
279+ ) ;
280+ }
281+ make_attr_token_stream ( tokens. into_iter ( ) , * break_last_token)
282+ }
259283 }
260- make_attr_token_stream ( tokens. into_iter ( ) , self . break_last_token )
261284 }
262285 }
263286}
@@ -1025,7 +1048,7 @@ mod size_asserts {
10251048 static_assert_size ! ( AttrTokenStream , 8 ) ;
10261049 static_assert_size ! ( AttrTokenTree , 32 ) ;
10271050 static_assert_size ! ( LazyAttrTokenStream , 8 ) ;
1028- static_assert_size ! ( LazyAttrTokenStreamImpl , 96 ) ;
1051+ static_assert_size ! ( LazyAttrTokenStreamInner , 96 ) ;
10291052 static_assert_size ! ( Option <LazyAttrTokenStream >, 8 ) ; // must be small, used in many AST nodes
10301053 static_assert_size ! ( TokenStream , 8 ) ;
10311054 static_assert_size ! ( TokenTree , 32 ) ;
0 commit comments