}
}
+/// A sequence of token treesee
+#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
+pub struct SequenceRepetition {
+ /// The sequence of token trees
+ pub tts: Vec<TokenTree>,
+ /// The optional separator
+ pub separator: Option<token::Token>,
+ /// Whether the sequence can be repeated zero (*), or one or more times (+)
+ pub op: KleeneOp,
+ /// The number of `MatchNt`s that appear in the sequence (and subsequences)
+ pub num_captures: uint,
+}
+
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
/// for token sequences.
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
// This only makes sense in MBE macros.
- /// A kleene-style repetition sequence with a span, a TT forest,
- /// an optional separator, and a boolean where true indicates
- /// zero or more (..), and false indicates one or more (+).
- /// The last member denotes the number of `MATCH_NONTERMINAL`s
- /// in the forest.
- // FIXME(eddyb) #12938 Use Rc<[TokenTree]> after DST.
- TtSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, KleeneOp, uint),
+ /// A kleene-style repetition sequence with a span
+ // FIXME(eddyb) #12938 Use DST.
+ TtSequence(Span, Rc<SequenceRepetition>),
}
impl TokenTree {
- /// For unrolling some tokens or token trees into equivalent sequences.
- pub fn expand_into_tts(self) -> Rc<Vec<TokenTree>> {
- match self {
- TtToken(sp, token::DocComment(name)) => {
+ pub fn len(&self) -> uint {
+ match *self {
+ TtToken(_, token::DocComment(_)) => 2,
+ TtToken(_, token::SubstNt(..)) => 2,
+ TtToken(_, token::MatchNt(..)) => 3,
+ TtDelimited(_, ref delimed) => {
+ delimed.tts.len() + 2
+ }
+ TtSequence(_, ref seq) => {
+ seq.tts.len()
+ }
+ TtToken(..) => 0
+ }
+ }
+
+ pub fn get_tt(&self, index: uint) -> TokenTree {
+ match (self, index) {
+ (&TtToken(sp, token::DocComment(_)), 0) => {
+ TtToken(sp, token::Pound)
+ }
+ (&TtToken(sp, token::DocComment(name)), 1) => {
let doc = MetaNameValue(token::intern_and_get_ident("doc"),
respan(sp, LitStr(token::get_name(name), CookedStr)));
let doc = token::NtMeta(P(respan(sp, doc)));
- let delimed = Delimited {
+ TtDelimited(sp, Rc::new(Delimited {
delim: token::Bracket,
open_span: sp,
tts: vec![TtToken(sp, token::Interpolated(doc))],
close_span: sp,
- };
- Rc::new(vec![TtToken(sp, token::Pound),
- TtDelimited(sp, Rc::new(delimed))])
+ }))
}
- TtDelimited(_, ref delimed) => {
- let mut tts = Vec::with_capacity(1 + delimed.tts.len() + 1);
- tts.push(delimed.open_tt());
- tts.extend(delimed.tts.iter().map(|tt| tt.clone()));
- tts.push(delimed.close_tt());
- Rc::new(tts)
+ (&TtDelimited(_, ref delimed), _) => {
+ if index == 0 {
+ return delimed.open_tt();
+ }
+ if index == delimed.tts.len() + 1 {
+ return delimed.close_tt();
+ }
+ delimed.tts[index - 1].clone()
+ }
+ (&TtToken(sp, token::SubstNt(name, name_st)), _) => {
+ let v = [TtToken(sp, token::Dollar),
+ TtToken(sp, token::Ident(name, name_st))];
+ v[index]
}
- TtToken(sp, token::SubstNt(name, namep)) => {
- Rc::new(vec![TtToken(sp, token::Dollar),
- TtToken(sp, token::Ident(name, namep))])
+ (&TtToken(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => {
+ let v = [TtToken(sp, token::SubstNt(name, name_st)),
+ TtToken(sp, token::Colon),
+ TtToken(sp, token::Ident(kind, kind_st))];
+ v[index]
}
- TtToken(sp, token::MatchNt(name, kind, namep, kindp)) => {
- Rc::new(vec![TtToken(sp, token::SubstNt(name, namep)),
- TtToken(sp, token::Colon),
- TtToken(sp, token::Ident(kind, kindp))])
+ (&TtSequence(_, ref seq), _) => {
+ seq.tts[index].clone()
}
- _ => panic!("Cannot expand a token")
+ _ => panic!("Cannot expand a token tree")
}
}
/// Returns the `Span` corresponding to this token tree.
pub fn get_span(&self) -> Span {
match *self {
- TtToken(span, _) => span,
- TtDelimited(span, _) => span,
- TtSequence(span, _, _, _, _) => span,
+ TtToken(span, _) => span,
+ TtDelimited(span, _) => span,
+ TtSequence(span, _) => span,
}
}
}
// To avoid costly uniqueness checks, we require that `MatchSeq` always has
// a nonempty body.
+#[deriving(Clone)]
+enum TokenTreeOrTokenTreeVec {
+ Tt(ast::TokenTree),
+ TtSeq(Rc<Vec<ast::TokenTree>>),
+}
+
+impl TokenTreeOrTokenTreeVec {
+ fn len(&self) -> uint {
+ match self {
+ &TtSeq(ref v) => v.len(),
+ &Tt(ref tt) => tt.len(),
+ }
+ }
+
+ fn get_tt(&self, index: uint) -> TokenTree {
+ match self {
+ &TtSeq(ref v) => v[index].clone(),
+ &Tt(ref tt) => tt.get_tt(index),
+ }
+ }
+}
+
/// an unzipping of `TokenTree`s
#[deriving(Clone)]
struct MatcherTtFrame {
- elts: Rc<Vec<ast::TokenTree>>,
+ elts: TokenTreeOrTokenTreeVec,
idx: uint,
}
#[deriving(Clone)]
pub struct MatcherPos {
stack: Vec<MatcherTtFrame>,
- elts: Rc<Vec<ast::TokenTree>>,
+ top_elts: TokenTreeOrTokenTreeVec,
sep: Option<Token>,
idx: uint,
up: Option<Box<MatcherPos>>,
pub fn count_names(ms: &[TokenTree]) -> uint {
ms.iter().fold(0, |count, elt| {
count + match elt {
- &TtSequence(_, _, _, _, advance_by) => {
- advance_by
+ &TtSequence(_, ref seq) => {
+ seq.num_captures
}
&TtDelimited(_, ref delim) => {
count_names(delim.tts.as_slice())
let matches = Vec::from_fn(match_idx_hi, |_i| Vec::new());
box MatcherPos {
stack: vec![],
- elts: ms,
+ top_elts: TtSeq(ms),
sep: sep,
idx: 0u,
up: None,
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut uint) {
match m {
- &TtSequence(_, ref more_ms, _, _, _) => {
- for next_m in more_ms.iter() {
+ &TtSequence(_, ref seq) => {
+ for next_m in seq.tts.iter() {
n_rec(p_s, next_m, res, ret_val, idx)
}
}
};
// When unzipped trees end, remove them
- while ei.idx >= ei.elts.len() {
+ while ei.idx >= ei.top_elts.len() {
match ei.stack.pop() {
Some(MatcherTtFrame { elts, idx }) => {
- ei.elts = elts;
+ ei.top_elts = elts;
ei.idx = idx + 1;
}
None => break
}
let idx = ei.idx;
- let len = ei.elts.len();
+ let len = ei.top_elts.len();
/* at end of sequence */
if idx >= len {
eof_eis.push(ei);
}
} else {
- match (*ei.elts)[idx].clone() {
+ match ei.top_elts.get_tt(idx) {
/* need to descend into sequence */
- TtSequence(_, ref matchers, ref sep, kleene_op, match_num) => {
- if kleene_op == ast::ZeroOrMore {
+ TtSequence(sp, seq) => {
+ if seq.op == ast::ZeroOrMore {
let mut new_ei = ei.clone();
- new_ei.match_cur += match_num;
+ new_ei.match_cur += seq.num_captures;
new_ei.idx += 1u;
//we specifically matched zero repeats.
- for idx in range(ei.match_cur, ei.match_cur + match_num) {
- new_ei.matches[idx]
- .push(Rc::new(MatchedSeq(Vec::new(), sp)));
+ for idx in range(ei.match_cur, ei.match_cur + seq.num_captures) {
+ new_ei.matches[idx].push(Rc::new(MatchedSeq(Vec::new(), sp)));
}
cur_eis.push(new_ei);
let ei_t = ei;
cur_eis.push(box MatcherPos {
stack: vec![],
- elts: matchers.clone(),
- sep: (*sep).clone(),
+ sep: seq.separator.clone(),
idx: 0u,
matches: matches,
match_lo: ei_t.match_cur,
match_cur: ei_t.match_cur,
- match_hi: ei_t.match_cur + match_num,
+ match_hi: ei_t.match_cur + seq.num_captures,
up: Some(ei_t),
- sp_lo: sp.lo
+ sp_lo: sp.lo,
+ top_elts: Tt(TtSequence(sp, seq)),
});
}
TtToken(_, MatchNt(..)) => {
return Error(sp, "Cannot transcribe in macro LHS".into_string())
}
seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
- let tts = seq.expand_into_tts();
- let elts = mem::replace(&mut ei.elts, tts);
+ let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
let idx = ei.idx;
ei.stack.push(MatcherTtFrame {
- elts: elts,
+ elts: lower_elts,
idx: idx,
});
ei.idx = 0;
if (bb_eis.len() > 0u && next_eis.len() > 0u)
|| bb_eis.len() > 1u {
let nts = bb_eis.iter().map(|ei| {
- match (*ei.elts)[ei.idx] {
+ match ei.top_elts.get_tt(ei.idx) {
TtToken(_, MatchNt(bind, name, _, _)) => {
(format!("{} ('{}')",
token::get_ident(name),
let mut rust_parser = Parser::new(sess, cfg.clone(), box rdr.clone());
let mut ei = bb_eis.pop().unwrap();
- match (*ei.elts)[ei.idx] {
+ match ei.top_elts.get_tt(ei.idx) {
TtToken(_, MatchNt(_, name, _, _)) => {
let name_string = token::get_ident(name);
let match_cur = ei.match_cur;
let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
let argument_gram = vec!(
TtSequence(DUMMY_SP,
- Rc::new(vec![
- TtToken(DUMMY_SP, match_lhs),
- TtToken(DUMMY_SP, token::FatArrow),
- TtToken(DUMMY_SP, match_rhs)]),
- Some(token::Semi),
- ast::OneOrMore,
- 2),
+ Rc::new(ast::SequenceRepetition {
+ tts: vec![
+ TtToken(DUMMY_SP, match_lhs_tok),
+ TtToken(DUMMY_SP, token::FatArrow),
+ TtToken(DUMMY_SP, match_rhs_tok)],
+ separator: Some(token::Semi),
+ op: ast::OneOrMore,
+ num_captures: 2
+ })),
//to phase into semicolon-termination instead of
//semicolon-separation
TtSequence(DUMMY_SP,
- Rc::new(vec![TtToken(DUMMY_SP, token::Semi)]),
- None,
- ast::ZeroOrMore,
- 0));
+ Rc::new(ast::SequenceRepetition {
+ tts: vec![TtToken(DUMMY_SP, token::Semi)],
+ separator: None,
+ op: ast::ZeroOrMore,
+ num_captures: 0
+ })));
// Parse the macro_rules! invocation (`none` is for no interpolations):
///an unzipping of `TokenTree`s
#[deriving(Clone)]
struct TtFrame {
- forest: Rc<Vec<ast::TokenTree>>,
+ forest: TokenTree,
idx: uint,
dotdotdoted: bool,
sep: Option<Token>,
let mut r = TtReader {
sp_diag: sp_diag,
stack: vec!(TtFrame {
- forest: Rc::new(src),
+ forest: TtSequence(DUMMY_SP, Rc::new(ast::SequenceRepetition {
+ tts: src,
+ // doesn't matter. This merely holds the root unzipping.
+ separator: None, op: ast::ZeroOrMore, num_captures: 0
+ })),
idx: 0,
dotdotdoted: false,
sep: None,
size + lockstep_iter_size(tt, r)
})
},
- TtSequence(_, ref tts, _, _, _) => {
- tts.iter().fold(LisUnconstrained, |size, tt| {
+ TtSequence(_, ref seq) => {
+ seq.tts.iter().fold(LisUnconstrained, |size, tt| {
size + lockstep_iter_size(tt, r)
})
},
let t = {
let frame = r.stack.last().unwrap();
// FIXME(pcwalton): Bad copy.
- (*frame.forest)[frame.idx].clone()
+ frame.forest.get_tt(frame.idx)
};
match t {
- TtSequence(sp, tts, sep, kleene_op, n) => {
+ TtSequence(sp, seq) => {
// FIXME(pcwalton): Bad copy.
- match lockstep_iter_size(&TtSequence(sp, tts.clone(), sep.clone(), kleene_op, n),
+ match lockstep_iter_size(&TtSequence(sp, seq.clone()),
r) {
LisUnconstrained => {
r.sp_diag.span_fatal(
}
LisConstraint(len, _) => {
if len == 0 {
- if kleene_op == ast::OneOrMore {
+ if seq.op == ast::OneOrMore {
// FIXME #2887 blame invoker
r.sp_diag.span_fatal(sp.clone(),
"this must repeat at least once");
r.repeat_len.push(len);
r.repeat_idx.push(0);
r.stack.push(TtFrame {
- forest: tts,
idx: 0,
dotdotdoted: true,
- sep: sep.clone()
+ sep: seq.separator.clone(),
+ forest: TtSequence(sp, seq),
});
}
}
match lookup_cur_matched(r, ident) {
None => {
r.stack.push(TtFrame {
- forest: TtToken(sp, SubstNt(ident, namep)).expand_into_tts(),
+ forest: TtToken(sp, SubstNt(ident, namep)),
idx: 0,
dotdotdoted: false,
sep: None
seq @ TtDelimited(..) | seq @ TtToken(_, MatchNt(..)) => {
// do not advance the idx yet
r.stack.push(TtFrame {
- forest: seq.expand_into_tts(),
+ forest: seq,
idx: 0,
dotdotdoted: false,
sep: None
}
TtToken(sp, DocComment(name)) if r.desugar_doc_comments => {
r.stack.push(TtFrame {
- forest: TtToken(sp, DocComment(name)).expand_into_tts(),
+ forest: TtToken(sp, DocComment(name)),
idx: 0,
dotdotdoted: false,
sep: None
}
))
},
- TtSequence(span, ref pattern, ref sep, is_optional, advance_by) =>
+ TtSequence(span, ref seq) =>
TtSequence(span,
- Rc::new(fld.fold_tts(pattern.as_slice())),
- sep.clone().map(|tok| fld.fold_token(tok)),
- is_optional,
- advance_by),
+ Rc::new(SequenceRepetition {
+ tts: fld.fold_tts(seq.tts.as_slice()),
+ separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
+ ..**seq
+ })),
}
}
use ast::{StructVariantKind, BiSub};
use ast::StrStyle;
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
-use ast::{Delimited, TokenTree, TraitItem, TraitRef, TtDelimited, TtSequence, TtToken};
+use ast::{Delimited, SequenceRepetition, TokenTree, TraitItem, TraitRef};
+use ast::{TtDelimited, TtSequence, TtToken};
use ast::{TupleVariantKind, Ty, Ty_, TyBot};
use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn};
use ast::{TyTypeof, TyInfer, TypeMethod};
Spanned { node, .. } => node,
};
let name_num = macro_parser::count_names(seq.as_slice());
- TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), sep, repeat, name_num)
+ TtSequence(mk_sp(sp.lo, p.span.hi),
+ Rc::new(SequenceRepetition {
+ tts: seq,
+ separator: sep,
+ op: repeat,
+ num_captures: name_num
+ }))
} else {
// A nonterminal that matches or not
let namep = match p.token { token::Ident(_, p) => p, _ => token::Plain };
try!(space(&mut self.s));
word(&mut self.s, token_to_string(&delimed.close_token()).as_slice())
},
- ast::TtSequence(_, ref tts, ref separator, kleene_op, _) => {
+ ast::TtSequence(_, ref seq) => {
try!(word(&mut self.s, "$("));
- for tt_elt in (*tts).iter() {
+ for tt_elt in seq.tts.iter() {
try!(self.print_tt(tt_elt));
}
try!(word(&mut self.s, ")"));
- match *separator {
+ match seq.separator {
Some(ref tk) => {
try!(word(&mut self.s, token_to_string(tk).as_slice()));
}
None => {},
}
- match kleene_op {
+ match seq.op {
ast::ZeroOrMore => word(&mut self.s, "*"),
ast::OneOrMore => word(&mut self.s, "+"),
}