]> git.lizzy.rs Git - rust.git/blob - src/libsyntax/ext/tt/quoted.rs
Rollup merge of #42037 - nagisa:charpat, r=sfackler
[rust.git] / src / libsyntax / ext / tt / quoted.rs
1 // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use ast;
12 use ext::tt::macro_parser;
13 use parse::{ParseSess, token};
14 use print::pprust;
15 use symbol::{keywords, Symbol};
16 use syntax_pos::{DUMMY_SP, Span, BytePos};
17 use tokenstream;
18
19 use std::rc::Rc;
20
21 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
22 pub struct Delimited {
23     pub delim: token::DelimToken,
24     pub tts: Vec<TokenTree>,
25 }
26
27 impl Delimited {
28     pub fn open_token(&self) -> token::Token {
29         token::OpenDelim(self.delim)
30     }
31
32     pub fn close_token(&self) -> token::Token {
33         token::CloseDelim(self.delim)
34     }
35
36     pub fn open_tt(&self, span: Span) -> TokenTree {
37         let open_span = if span == DUMMY_SP {
38             DUMMY_SP
39         } else {
40             Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }
41         };
42         TokenTree::Token(open_span, self.open_token())
43     }
44
45     pub fn close_tt(&self, span: Span) -> TokenTree {
46         let close_span = if span == DUMMY_SP {
47             DUMMY_SP
48         } else {
49             Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }
50         };
51         TokenTree::Token(close_span, self.close_token())
52     }
53 }
54
55 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
56 pub struct SequenceRepetition {
57     /// The sequence of token trees
58     pub tts: Vec<TokenTree>,
59     /// The optional separator
60     pub separator: Option<token::Token>,
61     /// Whether the sequence can be repeated zero (*), or one or more times (+)
62     pub op: KleeneOp,
63     /// The number of `Match`s that appear in the sequence (and subsequences)
64     pub num_captures: usize,
65 }
66
67 /// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
68 /// for token sequences.
69 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
70 pub enum KleeneOp {
71     ZeroOrMore,
72     OneOrMore,
73 }
74
75 /// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
76 /// are "first-class" token trees.
77 #[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
78 pub enum TokenTree {
79     Token(Span, token::Token),
80     Delimited(Span, Rc<Delimited>),
81     /// A kleene-style repetition sequence with a span
82     Sequence(Span, Rc<SequenceRepetition>),
83     /// Matches a nonterminal. This is only used in the left hand side of MBE macros.
84     MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */),
85 }
86
87 impl TokenTree {
88     pub fn len(&self) -> usize {
89         match *self {
90             TokenTree::Delimited(_, ref delimed) => match delimed.delim {
91                 token::NoDelim => delimed.tts.len(),
92                 _ => delimed.tts.len() + 2,
93             },
94             TokenTree::Sequence(_, ref seq) => seq.tts.len(),
95             _ => 0,
96         }
97     }
98
99     pub fn is_empty(&self) -> bool {
100         match *self {
101             TokenTree::Delimited(_, ref delimed) => match delimed.delim {
102                 token::NoDelim => delimed.tts.is_empty(),
103                 _ => false,
104             },
105             TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(),
106             _ => true,
107         }
108     }
109
110     pub fn get_tt(&self, index: usize) -> TokenTree {
111         match (self, index) {
112             (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
113                 delimed.tts[index].clone()
114             }
115             (&TokenTree::Delimited(span, ref delimed), _) => {
116                 if index == 0 {
117                     return delimed.open_tt(span);
118                 }
119                 if index == delimed.tts.len() + 1 {
120                     return delimed.close_tt(span);
121                 }
122                 delimed.tts[index - 1].clone()
123             }
124             (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
125             _ => panic!("Cannot expand a token tree"),
126         }
127     }
128
129     /// Retrieve the TokenTree's span.
130     pub fn span(&self) -> Span {
131         match *self {
132             TokenTree::Token(sp, _) |
133             TokenTree::MetaVarDecl(sp, _, _) |
134             TokenTree::Delimited(sp, _) |
135             TokenTree::Sequence(sp, _) => sp,
136         }
137     }
138 }
139
140 pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &ParseSess)
141              -> Vec<TokenTree> {
142     let mut result = Vec::new();
143     let mut trees = input.trees();
144     while let Some(tree) = trees.next() {
145         let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
146         match tree {
147             TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => {
148                 let span = match trees.next() {
149                     Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
150                         Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
151                             Some(kind) => {
152                                 let span = Span { lo: start_sp.lo, ..end_sp };
153                                 result.push(TokenTree::MetaVarDecl(span, ident, kind));
154                                 continue
155                             }
156                             _ => end_sp,
157                         },
158                         tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
159                     },
160                     tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
161                 };
162                 sess.missing_fragment_specifiers.borrow_mut().insert(span);
163                 result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident()));
164             }
165             _ => result.push(tree),
166         }
167     }
168     result
169 }
170
171 fn parse_tree<I>(tree: tokenstream::TokenTree,
172                  trees: &mut I,
173                  expect_matchers: bool,
174                  sess: &ParseSess)
175                  -> TokenTree
176     where I: Iterator<Item = tokenstream::TokenTree>,
177 {
178     match tree {
179         tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
180             Some(tokenstream::TokenTree::Delimited(span, delimited)) => {
181                 if delimited.delim != token::Paren {
182                     let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
183                     let msg = format!("expected `(`, found `{}`", tok);
184                     sess.span_diagnostic.span_err(span, &msg);
185                 }
186                 let sequence = parse(delimited.tts.into(), expect_matchers, sess);
187                 let (separator, op) = parse_sep_and_kleene_op(trees, span, sess);
188                 let name_captures = macro_parser::count_names(&sequence);
189                 TokenTree::Sequence(span, Rc::new(SequenceRepetition {
190                     tts: sequence,
191                     separator: separator,
192                     op: op,
193                     num_captures: name_captures,
194                 }))
195             }
196             Some(tokenstream::TokenTree::Token(ident_span, token::Ident(ident))) => {
197                 let span = Span { lo: span.lo, ..ident_span };
198                 if ident.name == keywords::Crate.name() {
199                     let ident = ast::Ident { name: Symbol::intern("$crate"), ..ident };
200                     TokenTree::Token(span, token::Ident(ident))
201                 } else {
202                     TokenTree::Token(span, token::SubstNt(ident))
203                 }
204             }
205             Some(tokenstream::TokenTree::Token(span, tok)) => {
206                 let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok));
207                 sess.span_diagnostic.span_err(span, &msg);
208                 TokenTree::Token(span, token::SubstNt(keywords::Invalid.ident()))
209             }
210             None => TokenTree::Token(span, token::Dollar),
211         },
212         tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
213         tokenstream::TokenTree::Delimited(span, delimited) => {
214             TokenTree::Delimited(span, Rc::new(Delimited {
215                 delim: delimited.delim,
216                 tts: parse(delimited.tts.into(), expect_matchers, sess),
217             }))
218         }
219     }
220 }
221
222 fn parse_sep_and_kleene_op<I>(input: &mut I, span: Span, sess: &ParseSess)
223                               -> (Option<token::Token>, KleeneOp)
224     where I: Iterator<Item = tokenstream::TokenTree>,
225 {
226     fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
227         match *token {
228             token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
229             token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
230             _ => None,
231         }
232     }
233
234     let span = match input.next() {
235         Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
236             Some(op) => return (None, op),
237             None => match input.next() {
238                 Some(tokenstream::TokenTree::Token(span, tok2)) => match kleene_op(&tok2) {
239                     Some(op) => return (Some(tok), op),
240                     None => span,
241                 },
242                 tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
243             }
244         },
245         tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
246     };
247
248     sess.span_diagnostic.span_err(span, "expected `*` or `+`");
249     (None, KleeneOp::ZeroOrMore)
250 }