1 //! Rustc proc-macro server implementation with tt
3 //! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
4 //! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
5 //! we could provide any TokenStream implementation.
6 //! The original idea from fedochet is using proc-macro2 as backend,
7 //! we use tt instead for better integration with RA.
9 //! FIXME: No span and source file information is implemented yet
11 use super::proc_macro::bridge::{self, server};
13 use std::collections::HashMap;
15 use std::iter::FromIterator;
17 use std::{ascii, vec::IntoIter};
19 type Group = tt::Subtree;
20 type TokenTree = tt::TokenTree;
21 type Punct = tt::Punct;
22 type Spacing = tt::Spacing;
23 type Literal = tt::Literal;
24 type Span = tt::TokenId;
26 #[derive(Debug, Clone)]
27 pub struct TokenStream {
28 pub token_trees: Vec<TokenTree>,
32 pub fn new() -> Self {
33 TokenStream { token_trees: Default::default() }
36 pub fn with_subtree(subtree: tt::Subtree) -> Self {
37 if subtree.delimiter.is_some() {
38 TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
40 TokenStream { token_trees: subtree.token_trees }
44 pub fn into_subtree(self) -> tt::Subtree {
45 tt::Subtree { delimiter: None, token_trees: self.token_trees }
48 pub fn is_empty(&self) -> bool {
49 self.token_trees.is_empty()
53 /// Creates a token stream containing a single token tree.
54 impl From<TokenTree> for TokenStream {
55 fn from(tree: TokenTree) -> TokenStream {
56 TokenStream { token_trees: vec![tree] }
60 /// Collects a number of token trees into a single stream.
61 impl FromIterator<TokenTree> for TokenStream {
62 fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
63 trees.into_iter().map(TokenStream::from).collect()
67 /// A "flattening" operation on token streams, collects token trees
68 /// from multiple token streams into a single stream.
69 impl FromIterator<TokenStream> for TokenStream {
70 fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
71 let mut builder = TokenStreamBuilder::new();
72 streams.into_iter().for_each(|stream| builder.push(stream));
77 impl Extend<TokenTree> for TokenStream {
78 fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
79 self.extend(trees.into_iter().map(TokenStream::from));
83 impl Extend<TokenStream> for TokenStream {
84 fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
88 tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
89 self.token_trees.extend(subtree.token_trees);
92 self.token_trees.push(tkn);
101 pub struct SourceFile {
105 type Level = super::proc_macro::Level;
106 type LineColumn = super::proc_macro::LineColumn;
108 /// A structure representing a diagnostic message and associated children
110 #[derive(Clone, Debug)]
111 pub struct Diagnostic {
115 children: Vec<Diagnostic>,
119 /// Creates a new diagnostic with the given `level` and `message`.
120 pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
121 Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
125 // Rustc Server Ident has to be `Copyable`
126 // We use a stub here for bypassing
127 #[derive(Hash, Eq, PartialEq, Copy, Clone)]
128 pub struct IdentId(u32);
130 #[derive(Clone, Hash, Eq, PartialEq)]
131 struct IdentData(tt::Ident);
134 struct IdentInterner {
135 idents: HashMap<IdentData, u32>,
136 ident_data: Vec<IdentData>,
140 fn intern(&mut self, data: &IdentData) -> u32 {
141 if let Some(index) = self.idents.get(data) {
145 let index = self.idents.len() as u32;
146 self.ident_data.push(data.clone());
147 self.idents.insert(data.clone(), index);
151 fn get(&self, index: u32) -> &IdentData {
152 &self.ident_data[index as usize]
156 fn get_mut(&mut self, index: u32) -> &mut IdentData {
157 self.ident_data.get_mut(index as usize).expect("Should be consistent")
161 pub struct TokenStreamBuilder {
165 /// Public implementation details for the `TokenStream` type, such as iterators.
166 pub mod token_stream {
167 use std::str::FromStr;
169 use super::{TokenStream, TokenTree};
171 /// An iterator over `TokenStream`'s `TokenTree`s.
172 /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
173 /// and returns whole groups as token trees.
174 impl IntoIterator for TokenStream {
175 type Item = TokenTree;
176 type IntoIter = super::IntoIter<TokenTree>;
178 fn into_iter(self) -> Self::IntoIter {
179 self.token_trees.into_iter()
183 type LexError = String;
185 /// Attempts to break the string into tokens and parse those tokens into a token stream.
186 /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
187 /// or characters not existing in the language.
188 /// All tokens in the parsed stream get `Span::call_site()` spans.
190 /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
191 /// change these errors into `LexError`s later.
192 impl FromStr for TokenStream {
195 fn from_str(src: &str) -> Result<TokenStream, LexError> {
196 let (subtree, _token_map) =
197 mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
199 let subtree = subtree_replace_token_ids_with_unspecified(subtree);
200 Ok(TokenStream::with_subtree(subtree))
204 impl ToString for TokenStream {
205 fn to_string(&self) -> String {
206 tt::pretty(&self.token_trees)
210 fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
214 .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
218 .map(token_tree_replace_token_ids_with_unspecified)
223 fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
225 tt::TokenTree::Leaf(leaf) => {
226 tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
228 tt::TokenTree::Subtree(subtree) => {
229 tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
234 fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
236 tt::Leaf::Literal(lit) => {
237 tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
239 tt::Leaf::Punct(punct) => {
240 tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
242 tt::Leaf::Ident(ident) => {
243 tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
249 impl TokenStreamBuilder {
250 fn new() -> TokenStreamBuilder {
251 TokenStreamBuilder { acc: TokenStream::new() }
254 fn push(&mut self, stream: TokenStream) {
255 self.acc.extend(stream.into_iter())
258 fn build(self) -> TokenStream {
263 pub struct FreeFunctions;
266 pub struct TokenStreamIter {
267 trees: IntoIter<TokenTree>,
271 pub struct RustAnalyzer {
272 ident_interner: IdentInterner,
273 // FIXME: store span information here.
276 impl server::Types for RustAnalyzer {
277 type FreeFunctions = FreeFunctions;
278 type TokenStream = TokenStream;
279 type TokenStreamBuilder = TokenStreamBuilder;
280 type TokenStreamIter = TokenStreamIter;
283 type Ident = IdentId;
284 type Literal = Literal;
285 type SourceFile = SourceFile;
286 type Diagnostic = Diagnostic;
288 type MultiSpan = Vec<Span>;
291 impl server::FreeFunctions for RustAnalyzer {
292 fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
293 // FIXME: track env var accesses
294 // https://github.com/rust-lang/rust/pull/71858
296 fn track_path(&mut self, _path: &str) {}
299 impl server::TokenStream for RustAnalyzer {
300 fn new(&mut self) -> Self::TokenStream {
301 Self::TokenStream::new()
304 fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
307 fn from_str(&mut self, src: &str) -> Self::TokenStream {
308 use std::str::FromStr;
310 Self::TokenStream::from_str(src).expect("cannot parse string")
312 fn to_string(&mut self, stream: &Self::TokenStream) -> String {
317 tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
318 ) -> Self::TokenStream {
320 bridge::TokenTree::Group(group) => {
321 let tree = TokenTree::from(group);
322 Self::TokenStream::from_iter(vec![tree])
325 bridge::TokenTree::Ident(IdentId(index)) => {
326 let IdentData(ident) = self.ident_interner.get(index).clone();
327 let ident: tt::Ident = ident;
328 let leaf = tt::Leaf::from(ident);
329 let tree = TokenTree::from(leaf);
330 Self::TokenStream::from_iter(vec![tree])
333 bridge::TokenTree::Literal(literal) => {
334 let leaf = tt::Leaf::from(literal);
335 let tree = TokenTree::from(leaf);
336 Self::TokenStream::from_iter(vec![tree])
339 bridge::TokenTree::Punct(p) => {
340 let leaf = tt::Leaf::from(p);
341 let tree = TokenTree::from(leaf);
342 Self::TokenStream::from_iter(vec![tree])
347 fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
348 let trees: Vec<TokenTree> = stream.into_iter().collect();
349 TokenStreamIter { trees: trees.into_iter() }
352 fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
357 impl server::TokenStreamBuilder for RustAnalyzer {
358 fn new(&mut self) -> Self::TokenStreamBuilder {
359 Self::TokenStreamBuilder::new()
361 fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
364 fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
369 impl server::TokenStreamIter for RustAnalyzer {
372 iter: &mut Self::TokenStreamIter,
373 ) -> Option<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
374 iter.trees.next().map(|tree| match tree {
375 TokenTree::Subtree(group) => bridge::TokenTree::Group(group),
376 TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
377 bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
379 TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal),
380 TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
385 fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
387 bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
388 bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
389 bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
390 bridge::Delimiter::None => return None,
392 Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
395 fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
396 match d.map(|it| it.kind) {
397 Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
398 Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
399 Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
400 None => bridge::Delimiter::None,
404 fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
406 bridge::Spacing::Alone => Spacing::Alone,
407 bridge::Spacing::Joint => Spacing::Joint,
411 fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
413 Spacing::Alone => bridge::Spacing::Alone,
414 Spacing::Joint => bridge::Spacing::Joint,
418 impl server::Group for RustAnalyzer {
419 fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group {
420 Self::Group { delimiter: delim_to_internal(delimiter), token_trees: stream.token_trees }
422 fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
423 delim_to_external(group.delimiter)
426 // NOTE: Return value of do not include delimiter
427 fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
428 TokenStream { token_trees: group.token_trees.clone() }
431 fn span(&mut self, group: &Self::Group) -> Self::Span {
432 group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
435 fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
436 if let Some(delim) = &mut group.delimiter {
441 fn span_open(&mut self, group: &Self::Group) -> Self::Span {
442 // FIXME we only store one `TokenId` for the delimiters
443 group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
446 fn span_close(&mut self, group: &Self::Group) -> Self::Span {
447 // FIXME we only store one `TokenId` for the delimiters
448 group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
452 impl server::Punct for RustAnalyzer {
453 fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
456 spacing: spacing_to_internal(spacing),
457 id: tt::TokenId::unspecified(),
460 fn as_char(&mut self, punct: Self::Punct) -> char {
463 fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
464 spacing_to_external(punct.spacing)
466 fn span(&mut self, punct: Self::Punct) -> Self::Span {
469 fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
470 tt::Punct { id: span, ..punct }
474 impl server::Ident for RustAnalyzer {
475 fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident {
476 IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span })))
479 fn span(&mut self, ident: Self::Ident) -> Self::Span {
480 self.ident_interner.get(ident.0).0.id
482 fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
483 let data = self.ident_interner.get(ident.0);
484 let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
485 IdentId(self.ident_interner.intern(&new))
489 impl server::Literal for RustAnalyzer {
490 fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
491 // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
492 // They must still be present to be ABI-compatible and work with upstream proc_macro.
495 fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
496 Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
498 fn symbol(&mut self, literal: &Self::Literal) -> String {
499 literal.text.to_string()
501 fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
505 fn to_string(&mut self, literal: &Self::Literal) -> String {
509 fn integer(&mut self, n: &str) -> Self::Literal {
510 let n = match n.parse::<i128>() {
511 Ok(n) => n.to_string(),
512 Err(_) => n.parse::<u128>().unwrap().to_string(),
514 Literal { text: n.into(), id: tt::TokenId::unspecified() }
517 fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
518 macro_rules! def_suffixed_integer {
519 ($kind:ident, $($ty:ty),*) => {
523 let n: $ty = n.parse().unwrap();
524 format!(concat!("{}", stringify!($ty)), n)
527 _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
532 let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
534 Literal { text: text.into(), id: tt::TokenId::unspecified() }
537 fn float(&mut self, n: &str) -> Self::Literal {
538 let n: f64 = n.parse().unwrap();
539 let mut text = f64::to_string(&n);
540 if !text.contains('.') {
543 Literal { text: text.into(), id: tt::TokenId::unspecified() }
546 fn f32(&mut self, n: &str) -> Self::Literal {
547 let n: f32 = n.parse().unwrap();
548 let text = format!("{}f32", n);
549 Literal { text: text.into(), id: tt::TokenId::unspecified() }
552 fn f64(&mut self, n: &str) -> Self::Literal {
553 let n: f64 = n.parse().unwrap();
554 let text = format!("{}f64", n);
555 Literal { text: text.into(), id: tt::TokenId::unspecified() }
558 fn string(&mut self, string: &str) -> Self::Literal {
559 let mut escaped = String::new();
560 for ch in string.chars() {
561 escaped.extend(ch.escape_debug());
563 Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
566 fn character(&mut self, ch: char) -> Self::Literal {
567 Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
570 fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
574 .flat_map(ascii::escape_default)
575 .map(Into::<char>::into)
576 .collect::<String>();
578 Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
581 fn span(&mut self, literal: &Self::Literal) -> Self::Span {
585 fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
591 _literal: &Self::Literal,
592 _start: Bound<usize>,
594 ) -> Option<Self::Span> {
600 impl server::SourceFile for RustAnalyzer {
601 // FIXME these are all stubs
602 fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
605 fn path(&mut self, _file: &Self::SourceFile) -> String {
608 fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
613 impl server::Diagnostic for RustAnalyzer {
614 fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
615 let mut diag = Diagnostic::new(level, msg);
622 _diag: &mut Self::Diagnostic,
625 _spans: Self::MultiSpan,
627 // FIXME handle diagnostic
631 fn emit(&mut self, _diag: Self::Diagnostic) {
632 // FIXME handle diagnostic
637 impl server::Span for RustAnalyzer {
638 fn debug(&mut self, span: Self::Span) -> String {
639 format!("{:?}", span.0)
641 fn def_site(&mut self) -> Self::Span {
642 // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
644 tt::TokenId::unspecified()
646 fn call_site(&mut self) -> Self::Span {
647 // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
649 tt::TokenId::unspecified()
651 fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
654 fn save_span(&mut self, _span: Self::Span) -> usize {
658 fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
660 tt::TokenId::unspecified()
662 /// Recent feature, not yet in the proc_macro
665 /// https://github.com/rust-lang/rust/pull/55780
666 fn source_text(&mut self, _span: Self::Span) -> Option<String> {
670 fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
674 fn source(&mut self, span: Self::Span) -> Self::Span {
678 fn start(&mut self, _span: Self::Span) -> LineColumn {
680 LineColumn { line: 0, column: 0 }
682 fn end(&mut self, _span: Self::Span) -> LineColumn {
684 LineColumn { line: 0, column: 0 }
686 fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
687 // Just return the first span again, because some macros will unwrap the result.
690 fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
692 tt::TokenId::unspecified()
695 fn mixed_site(&mut self) -> Self::Span {
697 tt::TokenId::unspecified()
700 fn after(&mut self, _self_: Self::Span) -> Self::Span {
701 tt::TokenId::unspecified()
704 fn before(&mut self, _self_: Self::Span) -> Self::Span {
705 tt::TokenId::unspecified()
709 impl server::MultiSpan for RustAnalyzer {
710 fn new(&mut self) -> Self::MultiSpan {
715 fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
723 use super::super::proc_macro::bridge::server::Literal;
727 fn test_ra_server_literals() {
728 let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
729 assert_eq!(srv.integer("1234").text, "1234");
731 assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
732 assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
733 assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
734 assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
735 assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
736 assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
737 assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
738 assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
739 assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
740 assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
741 assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
742 assert_eq!(srv.float("0").text, "0.0");
743 assert_eq!(srv.float("15684.5867").text, "15684.5867");
744 assert_eq!(srv.f32("15684.58").text, "15684.58f32");
745 assert_eq!(srv.f64("15684.58").text, "15684.58f64");
747 assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
748 assert_eq!(srv.character('c').text, "'c'");
749 assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
753 srv.integer("340282366920938463463374607431768211455").text,
754 "340282366920938463463374607431768211455"
758 srv.integer("-170141183460469231731687303715884105728").text,
759 "-170141183460469231731687303715884105728"
764 fn test_ra_server_to_string() {
765 let s = TokenStream {
767 tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
768 text: "struct".into(),
769 id: tt::TokenId::unspecified(),
771 tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
773 id: tt::TokenId::unspecified(),
775 tt::TokenTree::Subtree(tt::Subtree {
776 delimiter: Some(tt::Delimiter {
777 id: tt::TokenId::unspecified(),
778 kind: tt::DelimiterKind::Brace,
785 assert_eq!(s.to_string(), "struct T {}");
789 fn test_ra_server_from_str() {
790 use std::str::FromStr;
791 let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
792 delimiter: Some(tt::Delimiter {
793 id: tt::TokenId::unspecified(),
794 kind: tt::DelimiterKind::Parenthesis,
796 token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
798 id: tt::TokenId::unspecified(),
802 let t1 = TokenStream::from_str("(a)").unwrap();
803 assert_eq!(t1.token_trees.len(), 1);
804 assert_eq!(t1.token_trees[0], subtree_paren_a);
806 let t2 = TokenStream::from_str("(a);").unwrap();
807 assert_eq!(t2.token_trees.len(), 2);
808 assert_eq!(t2.token_trees[0], subtree_paren_a);
810 let underscore = TokenStream::from_str("_").unwrap();
812 underscore.token_trees[0],
813 tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
815 id: tt::TokenId::unspecified(),