1#![doc(html_root_url = "https://docs.rs/paste/1.0.15")]
141#![allow(
142    clippy::derive_partial_eq_without_eq,
143    clippy::doc_markdown,
144    clippy::match_same_arms,
145    clippy::module_name_repetitions,
146    clippy::needless_doctest_main,
147    clippy::too_many_lines
148)]
149
150extern crate proc_macro;
151
152mod attr;
153mod error;
154mod segment;
155
156use crate::attr::expand_attr;
157use crate::error::{Error, Result};
158use crate::segment::Segment;
159use proc_macro::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
160use std::char;
161use std::iter;
162use std::panic;
163
164#[proc_macro]
165pub fn paste(input: TokenStream) -> TokenStream {
166    let mut contains_paste = false;
167    let flatten_single_interpolation = true;
168    match expand(
169        input.clone(),
170        &mut contains_paste,
171        flatten_single_interpolation,
172    ) {
173        Ok(expanded) => {
174            if contains_paste {
175                expanded
176            } else {
177                input
178            }
179        }
180        Err(err) => err.to_compile_error(),
181    }
182}
183
184#[doc(hidden)]
185#[proc_macro]
186pub fn item(input: TokenStream) -> TokenStream {
187    paste(input)
188}
189
190#[doc(hidden)]
191#[proc_macro]
192pub fn expr(input: TokenStream) -> TokenStream {
193    paste(input)
194}
195
196fn expand(
197    input: TokenStream,
198    contains_paste: &mut bool,
199    flatten_single_interpolation: bool,
200) -> Result<TokenStream> {
201    let mut expanded = TokenStream::new();
202    let mut lookbehind = Lookbehind::Other;
203    let mut prev_none_group = None::<Group>;
204    let mut tokens = input.into_iter().peekable();
205    loop {
206        let token = tokens.next();
207        if let Some(group) = prev_none_group.take() {
208            if match (&token, tokens.peek()) {
209                (Some(TokenTree::Punct(fst)), Some(TokenTree::Punct(snd))) => {
210                    fst.as_char() == ':' && snd.as_char() == ':' && fst.spacing() == Spacing::Joint
211                }
212                _ => false,
213            } {
214                expanded.extend(group.stream());
215                *contains_paste = true;
216            } else {
217                expanded.extend(iter::once(TokenTree::Group(group)));
218            }
219        }
220        match token {
221            Some(TokenTree::Group(group)) => {
222                let delimiter = group.delimiter();
223                let content = group.stream();
224                let span = group.span();
225                if delimiter == Delimiter::Bracket && is_paste_operation(&content) {
226                    let segments = parse_bracket_as_segments(content, span)?;
227                    let pasted = segment::paste(&segments)?;
228                    let tokens = pasted_to_tokens(pasted, span)?;
229                    expanded.extend(tokens);
230                    *contains_paste = true;
231                } else if flatten_single_interpolation
232                    && delimiter == Delimiter::None
233                    && is_single_interpolation_group(&content)
234                {
235                    expanded.extend(content);
236                    *contains_paste = true;
237                } else {
238                    let mut group_contains_paste = false;
239                    let is_attribute = delimiter == Delimiter::Bracket
240                        && (lookbehind == Lookbehind::Pound || lookbehind == Lookbehind::PoundBang);
241                    let mut nested = expand(
242                        content,
243                        &mut group_contains_paste,
244                        flatten_single_interpolation && !is_attribute,
245                    )?;
246                    if is_attribute {
247                        nested = expand_attr(nested, span, &mut group_contains_paste)?;
248                    }
249                    let group = if group_contains_paste {
250                        let mut group = Group::new(delimiter, nested);
251                        group.set_span(span);
252                        *contains_paste = true;
253                        group
254                    } else {
255                        group.clone()
256                    };
257                    if delimiter != Delimiter::None {
258                        expanded.extend(iter::once(TokenTree::Group(group)));
259                    } else if lookbehind == Lookbehind::DoubleColon {
260                        expanded.extend(group.stream());
261                        *contains_paste = true;
262                    } else {
263                        prev_none_group = Some(group);
264                    }
265                }
266                lookbehind = Lookbehind::Other;
267            }
268            Some(TokenTree::Punct(punct)) => {
269                lookbehind = match punct.as_char() {
270                    ':' if lookbehind == Lookbehind::JointColon => Lookbehind::DoubleColon,
271                    ':' if punct.spacing() == Spacing::Joint => Lookbehind::JointColon,
272                    '#' => Lookbehind::Pound,
273                    '!' if lookbehind == Lookbehind::Pound => Lookbehind::PoundBang,
274                    _ => Lookbehind::Other,
275                };
276                expanded.extend(iter::once(TokenTree::Punct(punct)));
277            }
278            Some(other) => {
279                lookbehind = Lookbehind::Other;
280                expanded.extend(iter::once(other));
281            }
282            None => return Ok(expanded),
283        }
284    }
285}
286
287#[derive(PartialEq)]
288enum Lookbehind {
289    JointColon,
290    DoubleColon,
291    Pound,
292    PoundBang,
293    Other,
294}
295
296fn is_single_interpolation_group(input: &TokenStream) -> bool {
298    #[derive(PartialEq)]
299    enum State {
300        Init,
301        Ident,
302        Literal,
303        Apostrophe,
304        Lifetime,
305        Colon1,
306        Colon2,
307    }
308
309    let mut state = State::Init;
310    for tt in input.clone() {
311        state = match (state, &tt) {
312            (State::Init, TokenTree::Ident(_)) => State::Ident,
313            (State::Init, TokenTree::Literal(_)) => State::Literal,
314            (State::Init, TokenTree::Punct(punct)) if punct.as_char() == '\'' => State::Apostrophe,
315            (State::Apostrophe, TokenTree::Ident(_)) => State::Lifetime,
316            (State::Ident, TokenTree::Punct(punct))
317                if punct.as_char() == ':' && punct.spacing() == Spacing::Joint =>
318            {
319                State::Colon1
320            }
321            (State::Colon1, TokenTree::Punct(punct))
322                if punct.as_char() == ':' && punct.spacing() == Spacing::Alone =>
323            {
324                State::Colon2
325            }
326            (State::Colon2, TokenTree::Ident(_)) => State::Ident,
327            _ => return false,
328        };
329    }
330
331    state == State::Ident || state == State::Literal || state == State::Lifetime
332}
333
334fn is_paste_operation(input: &TokenStream) -> bool {
335    let mut tokens = input.clone().into_iter();
336
337    match &tokens.next() {
338        Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
339        _ => return false,
340    }
341
342    let mut has_token = false;
343    loop {
344        match &tokens.next() {
345            Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {
346                return has_token && tokens.next().is_none();
347            }
348            Some(_) => has_token = true,
349            None => return false,
350        }
351    }
352}
353
354fn parse_bracket_as_segments(input: TokenStream, scope: Span) -> Result<Vec<Segment>> {
355    let mut tokens = input.into_iter().peekable();
356
357    match &tokens.next() {
358        Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
359        Some(wrong) => return Err(Error::new(wrong.span(), "expected `<`")),
360        None => return Err(Error::new(scope, "expected `[< ... >]`")),
361    }
362
363    let mut segments = segment::parse(&mut tokens)?;
364
365    match &tokens.next() {
366        Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {}
367        Some(wrong) => return Err(Error::new(wrong.span(), "expected `>`")),
368        None => return Err(Error::new(scope, "expected `[< ... >]`")),
369    }
370
371    if let Some(unexpected) = tokens.next() {
372        return Err(Error::new(
373            unexpected.span(),
374            "unexpected input, expected `[< ... >]`",
375        ));
376    }
377
378    for segment in &mut segments {
379        if let Segment::String(string) = segment {
380            if string.value.starts_with("'\\u{") {
381                let hex = &string.value[4..string.value.len() - 2];
382                if let Ok(unsigned) = u32::from_str_radix(hex, 16) {
383                    if let Some(ch) = char::from_u32(unsigned) {
384                        string.value.clear();
385                        string.value.push(ch);
386                        continue;
387                    }
388                }
389            }
390            if string.value.contains(&['#', '\\', '.', '+'][..])
391                || string.value.starts_with("b'")
392                || string.value.starts_with("b\"")
393                || string.value.starts_with("br\"")
394            {
395                return Err(Error::new(string.span, "unsupported literal"));
396            }
397            let mut range = 0..string.value.len();
398            if string.value.starts_with("r\"") {
399                range.start += 2;
400                range.end -= 1;
401            } else if string.value.starts_with(&['"', '\''][..]) {
402                range.start += 1;
403                range.end -= 1;
404            }
405            string.value = string.value[range].replace('-', "_");
406        }
407    }
408
409    Ok(segments)
410}
411
412fn pasted_to_tokens(mut pasted: String, span: Span) -> Result<TokenStream> {
413    let mut tokens = TokenStream::new();
414
415    #[cfg(not(no_literal_fromstr))]
416    {
417        use proc_macro::{LexError, Literal};
418        use std::str::FromStr;
419
420        if pasted.starts_with(|ch: char| ch.is_ascii_digit()) {
421            let literal = match panic::catch_unwind(|| Literal::from_str(&pasted)) {
422                Ok(Ok(literal)) => TokenTree::Literal(literal),
423                Ok(Err(LexError { .. })) | Err(_) => {
424                    return Err(Error::new(
425                        span,
426                        &format!("`{:?}` is not a valid literal", pasted),
427                    ));
428                }
429            };
430            tokens.extend(iter::once(literal));
431            return Ok(tokens);
432        }
433    }
434
435    if pasted.starts_with('\'') {
436        let mut apostrophe = TokenTree::Punct(Punct::new('\'', Spacing::Joint));
437        apostrophe.set_span(span);
438        tokens.extend(iter::once(apostrophe));
439        pasted.remove(0);
440    }
441
442    let ident = match panic::catch_unwind(|| Ident::new(&pasted, span)) {
443        Ok(ident) => TokenTree::Ident(ident),
444        Err(_) => {
445            return Err(Error::new(
446                span,
447                &format!("`{:?}` is not a valid identifier", pasted),
448            ));
449        }
450    };
451
452    tokens.extend(iter::once(ident));
453    Ok(tokens)
454}