pub fn as_str<'a>(&'a self) -> &'a str {
self.name.as_str()
}
-
- pub fn encode_with_hygiene(&self) -> String {
- format!("\x00name_{},ctxt_{}\x00",
- self.name.usize(),
- self.ctxt)
- }
}
impl fmt::Debug for Ident {
use ext::base::ExtCtxt;
use parse::token;
use parse;
- use print::pprust;
use ptr::P;
+ use std::rc::Rc;
- use ast::{TokenTree, Generics, Expr};
+ use ast::{TokenTree, Expr};
pub use parse::new_parser_from_tts;
- pub use codemap::{BytePos, Span, dummy_spanned};
+ pub use codemap::{BytePos, Span, dummy_spanned, DUMMY_SP};
pub trait ToTokens {
- fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> ;
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree>;
}
impl ToTokens for TokenTree {
}
}
- /* Should be (when bugs in default methods are fixed):
-
- trait ToSource : ToTokens {
- // Takes a thing and generates a string containing rust code for it.
- pub fn to_source() -> String;
-
- // If you can make source, you can definitely make tokens.
- pub fn to_tokens(cx: &ExtCtxt) -> ~[TokenTree] {
- cx.parse_tts(self.to_source())
+ impl ToTokens for ast::Ident {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(DUMMY_SP, token::Ident(*self, token::Plain))]
}
}
- */
-
- // FIXME: Move this trait to pprust and get rid of *_to_str?
- pub trait ToSource {
- // Takes a thing and generates a string containing rust code for it.
- fn to_source(&self) -> String;
+ impl ToTokens for ast::Path {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtPath(Box::new(self.clone()))))]
+ }
}
- // FIXME (Issue #16472): This should go away after ToToken impls
- // are revised to go directly to token-trees.
- trait ToSourceWithHygiene : ToSource {
- // Takes a thing and generates a string containing rust code
- // for it, encoding Idents as special byte sequences to
- // maintain hygiene across serialization and deserialization.
- fn to_source_with_hygiene(&self) -> String;
+ impl ToTokens for ast::Ty {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(self.span, token::Interpolated(token::NtTy(P(self.clone()))))]
+ }
}
- macro_rules! impl_to_source {
- (P<$t:ty>, $pp:ident) => (
- impl ToSource for P<$t> {
- fn to_source(&self) -> String {
- pprust::$pp(&**self)
- }
- }
- impl ToSourceWithHygiene for P<$t> {
- fn to_source_with_hygiene(&self) -> String {
- pprust::with_hygiene::$pp(&**self)
- }
- }
- );
- ($t:ty, $pp:ident) => (
- impl ToSource for $t {
- fn to_source(&self) -> String {
- pprust::$pp(self)
- }
- }
- impl ToSourceWithHygiene for $t {
- fn to_source_with_hygiene(&self) -> String {
- pprust::with_hygiene::$pp(self)
- }
- }
- );
+ impl ToTokens for ast::Block {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))]
+ }
}
- fn slice_to_source<'a, T: ToSource>(sep: &'static str, xs: &'a [T]) -> String {
- xs.iter()
- .map(|i| i.to_source())
- .collect::<Vec<String>>()
- .connect(sep)
- .to_string()
+ impl ToTokens for P<ast::Item> {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(self.span, token::Interpolated(token::NtItem(self.clone())))]
+ }
}
- fn slice_to_source_with_hygiene<'a, T: ToSourceWithHygiene>(
- sep: &'static str, xs: &'a [T]) -> String {
- xs.iter()
- .map(|i| i.to_source_with_hygiene())
- .collect::<Vec<String>>()
- .connect(sep)
- .to_string()
+ impl ToTokens for P<ast::ImplItem> {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(self.span, token::Interpolated(token::NtImplItem(self.clone())))]
+ }
}
- macro_rules! impl_to_source_slice {
- ($t:ty, $sep:expr) => (
- impl ToSource for [$t] {
- fn to_source(&self) -> String {
- slice_to_source($sep, self)
- }
- }
-
- impl ToSourceWithHygiene for [$t] {
- fn to_source_with_hygiene(&self) -> String {
- slice_to_source_with_hygiene($sep, self)
- }
- }
- )
+ impl ToTokens for P<ast::TraitItem> {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(self.span, token::Interpolated(token::NtTraitItem(self.clone())))]
+ }
}
- impl ToSource for ast::Ident {
- fn to_source(&self) -> String {
- token::get_ident(*self).to_string()
+ impl ToTokens for P<ast::Stmt> {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(self.span, token::Interpolated(token::NtStmt(self.clone())))]
}
}
- impl ToSourceWithHygiene for ast::Ident {
- fn to_source_with_hygiene(&self) -> String {
- self.encode_with_hygiene()
+ impl ToTokens for P<ast::Expr> {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(self.span, token::Interpolated(token::NtExpr(self.clone())))]
}
}
- impl_to_source! { ast::Path, path_to_string }
- impl_to_source! { ast::Ty, ty_to_string }
- impl_to_source! { ast::Block, block_to_string }
- impl_to_source! { ast::Arg, arg_to_string }
- impl_to_source! { Generics, generics_to_string }
- impl_to_source! { ast::WhereClause, where_clause_to_string }
- impl_to_source! { P<ast::Item>, item_to_string }
- impl_to_source! { P<ast::ImplItem>, impl_item_to_string }
- impl_to_source! { P<ast::TraitItem>, trait_item_to_string }
- impl_to_source! { P<ast::Stmt>, stmt_to_string }
- impl_to_source! { P<ast::Expr>, expr_to_string }
- impl_to_source! { P<ast::Pat>, pat_to_string }
- impl_to_source! { ast::Arm, arm_to_string }
- impl_to_source_slice! { ast::Ty, ", " }
- impl_to_source_slice! { P<ast::Item>, "\n\n" }
-
- impl ToSource for ast::Attribute_ {
- fn to_source(&self) -> String {
- pprust::attribute_to_string(&dummy_spanned(self.clone()))
+ impl ToTokens for P<ast::Pat> {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(self.span, token::Interpolated(token::NtPat(self.clone())))]
}
}
- impl ToSourceWithHygiene for ast::Attribute_ {
- fn to_source_with_hygiene(&self) -> String {
- self.to_source()
+
+ impl ToTokens for ast::Arm {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))]
}
}
- impl ToSource for str {
- fn to_source(&self) -> String {
- let lit = dummy_spanned(ast::LitStr(
- token::intern_and_get_ident(self), ast::CookedStr));
- pprust::lit_to_string(&lit)
- }
+ macro_rules! impl_to_tokens_slice {
+ ($t: ty, $sep: expr) => {
+ impl ToTokens for [$t] {
+ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
+ let mut v = vec![];
+ for (i, x) in self.iter().enumerate() {
+ if i > 0 {
+ v.push_all(&$sep);
+ }
+ v.extend(x.to_tokens(cx));
+ }
+ v
+ }
+ }
+ };
}
- impl ToSourceWithHygiene for str {
- fn to_source_with_hygiene(&self) -> String {
- self.to_source()
+
+ impl_to_tokens_slice! { ast::Ty, [ast::TtToken(DUMMY_SP, token::Comma)] }
+ impl_to_tokens_slice! { P<ast::Item>, [] }
+
+ impl ToTokens for P<ast::MetaItem> {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))]
}
}
- impl ToSource for () {
- fn to_source(&self) -> String {
- "()".to_string()
+ impl ToTokens for ast::Attribute {
+ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
+ let mut r = vec![];
+ // FIXME: The spans could be better
+ r.push(ast::TtToken(self.span, token::Pound));
+ if self.node.style == ast::AttrInner {
+ r.push(ast::TtToken(self.span, token::Not));
+ }
+ r.push(ast::TtDelimited(self.span, Rc::new(ast::Delimited {
+ delim: token::Bracket,
+ open_span: self.span,
+ tts: self.node.value.to_tokens(cx),
+ close_span: self.span,
+ })));
+ r
}
}
- impl ToSourceWithHygiene for () {
- fn to_source_with_hygiene(&self) -> String {
- self.to_source()
+
+ impl ToTokens for str {
+ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
+ let lit = ast::LitStr(
+ token::intern_and_get_ident(self), ast::CookedStr);
+ dummy_spanned(lit).to_tokens(cx)
}
}
- impl ToSource for bool {
- fn to_source(&self) -> String {
- let lit = dummy_spanned(ast::LitBool(*self));
- pprust::lit_to_string(&lit)
+ impl ToTokens for () {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
+ vec![ast::TtDelimited(DUMMY_SP, Rc::new(ast::Delimited {
+ delim: token::Paren,
+ open_span: DUMMY_SP,
+ tts: vec![],
+ close_span: DUMMY_SP,
+ }))]
}
}
- impl ToSourceWithHygiene for bool {
- fn to_source_with_hygiene(&self) -> String {
- self.to_source()
+
+ impl ToTokens for ast::Lit {
+ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
+ // FIXME: This is wrong
+ P(ast::Expr {
+ id: ast::DUMMY_NODE_ID,
+ node: ast::ExprLit(P(self.clone())),
+ span: DUMMY_SP,
+ }).to_tokens(cx)
}
}
- impl ToSource for char {
- fn to_source(&self) -> String {
- let lit = dummy_spanned(ast::LitChar(*self));
- pprust::lit_to_string(&lit)
+ impl ToTokens for bool {
+ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
+ dummy_spanned(ast::LitBool(*self)).to_tokens(cx)
}
}
- impl ToSourceWithHygiene for char {
- fn to_source_with_hygiene(&self) -> String {
- self.to_source()
+
+ impl ToTokens for char {
+ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
+ dummy_spanned(ast::LitChar(*self)).to_tokens(cx)
}
}
- macro_rules! impl_to_source_int {
+ macro_rules! impl_to_tokens_int {
(signed, $t:ty, $tag:expr) => (
- impl ToSource for $t {
- fn to_source(&self) -> String {
+ impl ToTokens for $t {
+ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
let lit = ast::LitInt(*self as u64, ast::SignedIntLit($tag,
ast::Sign::new(*self)));
- pprust::lit_to_string(&dummy_spanned(lit))
- }
- }
- impl ToSourceWithHygiene for $t {
- fn to_source_with_hygiene(&self) -> String {
- self.to_source()
+ dummy_spanned(lit).to_tokens(cx)
}
}
);
(unsigned, $t:ty, $tag:expr) => (
- impl ToSource for $t {
- fn to_source(&self) -> String {
+ impl ToTokens for $t {
+ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
let lit = ast::LitInt(*self as u64, ast::UnsignedIntLit($tag));
- pprust::lit_to_string(&dummy_spanned(lit))
- }
- }
- impl ToSourceWithHygiene for $t {
- fn to_source_with_hygiene(&self) -> String {
- self.to_source()
+ dummy_spanned(lit).to_tokens(cx)
}
}
);
}
- impl_to_source_int! { signed, isize, ast::TyIs }
- impl_to_source_int! { signed, i8, ast::TyI8 }
- impl_to_source_int! { signed, i16, ast::TyI16 }
- impl_to_source_int! { signed, i32, ast::TyI32 }
- impl_to_source_int! { signed, i64, ast::TyI64 }
-
- impl_to_source_int! { unsigned, usize, ast::TyUs }
- impl_to_source_int! { unsigned, u8, ast::TyU8 }
- impl_to_source_int! { unsigned, u16, ast::TyU16 }
- impl_to_source_int! { unsigned, u32, ast::TyU32 }
- impl_to_source_int! { unsigned, u64, ast::TyU64 }
-
- // Alas ... we write these out instead. All redundant.
-
- macro_rules! impl_to_tokens {
- ($t:ty) => (
- impl ToTokens for $t {
- fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
- cx.parse_tts_with_hygiene(self.to_source_with_hygiene())
- }
- }
- )
- }
+ impl_to_tokens_int! { signed, isize, ast::TyIs }
+ impl_to_tokens_int! { signed, i8, ast::TyI8 }
+ impl_to_tokens_int! { signed, i16, ast::TyI16 }
+ impl_to_tokens_int! { signed, i32, ast::TyI32 }
+ impl_to_tokens_int! { signed, i64, ast::TyI64 }
- macro_rules! impl_to_tokens_lifetime {
- ($t:ty) => (
- impl<'a> ToTokens for $t {
- fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
- cx.parse_tts_with_hygiene(self.to_source_with_hygiene())
- }
- }
- )
- }
-
- impl_to_tokens! { ast::Ident }
- impl_to_tokens! { ast::Path }
- impl_to_tokens! { P<ast::Item> }
- impl_to_tokens! { P<ast::ImplItem> }
- impl_to_tokens! { P<ast::TraitItem> }
- impl_to_tokens! { P<ast::Pat> }
- impl_to_tokens! { ast::Arm }
- impl_to_tokens_lifetime! { &'a [P<ast::Item>] }
- impl_to_tokens! { ast::Ty }
- impl_to_tokens_lifetime! { &'a [ast::Ty] }
- impl_to_tokens! { Generics }
- impl_to_tokens! { ast::WhereClause }
- impl_to_tokens! { P<ast::Stmt> }
- impl_to_tokens! { P<ast::Expr> }
- impl_to_tokens! { ast::Block }
- impl_to_tokens! { ast::Arg }
- impl_to_tokens! { ast::Attribute_ }
- impl_to_tokens_lifetime! { &'a str }
- impl_to_tokens! { () }
- impl_to_tokens! { char }
- impl_to_tokens! { bool }
- impl_to_tokens! { isize }
- impl_to_tokens! { i8 }
- impl_to_tokens! { i16 }
- impl_to_tokens! { i32 }
- impl_to_tokens! { i64 }
- impl_to_tokens! { usize }
- impl_to_tokens! { u8 }
- impl_to_tokens! { u16 }
- impl_to_tokens! { u32 }
- impl_to_tokens! { u64 }
+ impl_to_tokens_int! { unsigned, usize, ast::TyUs }
+ impl_to_tokens_int! { unsigned, u8, ast::TyU8 }
+ impl_to_tokens_int! { unsigned, u16, ast::TyU16 }
+ impl_to_tokens_int! { unsigned, u32, ast::TyU32 }
+ impl_to_tokens_int! { unsigned, u64, ast::TyU64 }
pub trait ExtParseUtils {
fn parse_item(&self, s: String) -> P<ast::Item>;
fn parse_tts(&self, s: String) -> Vec<ast::TokenTree>;
}
- trait ExtParseUtilsWithHygiene {
- // FIXME (Issue #16472): This should go away after ToToken impls
- // are revised to go directly to token-trees.
- fn parse_tts_with_hygiene(&self, s: String) -> Vec<ast::TokenTree>;
- }
-
impl<'a> ExtParseUtils for ExtCtxt<'a> {
fn parse_item(&self, s: String) -> P<ast::Item> {
self.parse_sess())
}
}
-
- impl<'a> ExtParseUtilsWithHygiene for ExtCtxt<'a> {
-
- fn parse_tts_with_hygiene(&self, s: String) -> Vec<ast::TokenTree> {
- use parse::with_hygiene::parse_tts_from_source_str;
- parse_tts_from_source_str("<quote expansion>".to_string(),
- s,
- self.cfg(),
- self.parse_sess())
- }
-
- }
-
}
pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt,
token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&*tt))),
+ token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
+ token::NtImplItem(arm) =>
+ token::NtImplItem(fld.fold_impl_item(arm)
+ .expect_one("expected fold to produce exactly one item")),
+ token::NtTraitItem(arm) =>
+ token::NtTraitItem(fld.fold_trait_item(arm)
+ .expect_one("expected fold to produce exactly one item")),
}
}
use std::borrow::Cow;
use std::char;
-use std::fmt;
use std::mem::replace;
use std::rc::Rc;
pub peek_tok: token::Token,
pub peek_span: Span,
- // FIXME (Issue #16472): This field should go away after ToToken impls
- // are revised to go directly to token-trees.
- /// Is \x00<name>,<ctxt>\x00 is interpreted as encoded ast::Ident?
- read_embedded_ident: bool,
-
// cache a direct reference to the source text, so that we don't have to
// retrieve it via `self.filemap.src.as_ref().unwrap()` all the time.
source_text: Rc<String>
}
}
-// FIXME (Issue #16472): This function should go away after
-// ToToken impls are revised to go directly to token-trees.
-pub fn make_reader_with_embedded_idents<'b>(span_diagnostic: &'b SpanHandler,
- filemap: Rc<codemap::FileMap>)
- -> StringReader<'b> {
- let mut sr = StringReader::new_raw(span_diagnostic, filemap);
- sr.read_embedded_ident = true;
- sr.advance_token();
- sr
-}
-
impl<'a> StringReader<'a> {
/// For comments.rs, which hackily pokes into pos and curr
pub fn new_raw<'b>(span_diagnostic: &'b SpanHandler,
/* dummy values; not read */
peek_tok: token::Eof,
peek_span: codemap::DUMMY_SP,
- read_embedded_ident: false,
source_text: source_text
};
sr.bump();
})
}
- // FIXME (Issue #16472): The scan_embedded_hygienic_ident function
- // should go away after we revise the syntax::ext::quote::ToToken
- // impls to go directly to token-trees instead of thing -> string
- // -> token-trees. (The function is currently used to resolve
- // Issues #15750 and #15962.)
- //
- // Since this function is only used for certain internal macros,
- // and the functionality it provides is not exposed to end user
- // programs, pnkfelix deliberately chose to write it in a way that
- // favors rustc debugging effectiveness over runtime efficiency.
-
- /// Scan through input of form \x00name_NNNNNN,ctxt_CCCCCCC\x00
- /// whence: `NNNNNN` is a string of characters forming an integer
- /// (the name) and `CCCCCCC` is a string of characters forming an
- /// integer (the ctxt), separate by a comma and delimited by a
- /// `\x00` marker.
- #[inline(never)]
- fn scan_embedded_hygienic_ident(&mut self) -> ast::Ident {
- fn bump_expecting_char<'a,D:fmt::Debug>(r: &mut StringReader<'a>,
- c: char,
- described_c: D,
- whence: &str) {
- match r.curr {
- Some(r_c) if r_c == c => r.bump(),
- Some(r_c) => panic!("expected {:?}, hit {:?}, {}", described_c, r_c, whence),
- None => panic!("expected {:?}, hit EOF, {}", described_c, whence),
- }
- }
-
- let whence = "while scanning embedded hygienic ident";
-
- // skip over the leading `\x00`
- bump_expecting_char(self, '\x00', "nul-byte", whence);
-
- // skip over the "name_"
- for c in "name_".chars() {
- bump_expecting_char(self, c, c, whence);
- }
-
- let start_bpos = self.last_pos;
- let base = 10;
-
- // find the integer representing the name
- self.scan_digits(base, base);
- let encoded_name : u32 = self.with_str_from(start_bpos, |s| {
- u32::from_str_radix(s, 10).unwrap_or_else(|_| {
- panic!("expected digits representing a name, got {:?}, {}, range [{:?},{:?}]",
- s, whence, start_bpos, self.last_pos);
- })
- });
-
- // skip over the `,`
- bump_expecting_char(self, ',', "comma", whence);
-
- // skip over the "ctxt_"
- for c in "ctxt_".chars() {
- bump_expecting_char(self, c, c, whence);
- }
-
- // find the integer representing the ctxt
- let start_bpos = self.last_pos;
- self.scan_digits(base, base);
- let encoded_ctxt : ast::SyntaxContext = self.with_str_from(start_bpos, |s| {
- u32::from_str_radix(s, 10).unwrap_or_else(|_| {
- panic!("expected digits representing a ctxt, got {:?}, {}", s, whence);
- })
- });
-
- // skip over the `\x00`
- bump_expecting_char(self, '\x00', "nul-byte", whence);
-
- ast::Ident { name: ast::Name(encoded_name),
- ctxt: encoded_ctxt, }
- }
-
/// Scan through any digits (base `scan_radix`) or underscores,
/// and return how many digits there were.
///
return token::Literal(num, suffix)
}
- if self.read_embedded_ident {
- match (c.unwrap(), self.nextch(), self.nextnextch()) {
- ('\x00', Some('n'), Some('a')) => {
- let ast_ident = self.scan_embedded_hygienic_ident();
- return if self.curr_is(':') && self.nextch_is(':') {
- token::Ident(ast_ident, token::ModName)
- } else {
- token::Ident(ast_ident, token::Plain)
- };
- }
- _ => {}
- }
- }
-
match c.expect("next_token_inner called at EOF") {
// One-byte tokens.
';' => { self.bump(); return token::Semi; }
maybe_aborted(p.parse_stmt(), p)
}
-// Note: keep in sync with `with_hygiene::parse_tts_from_source_str`
-// until #16472 is resolved.
-//
// Warning: This parses with quote_depth > 0, which is not the default.
pub fn parse_tts_from_source_str(name: String,
source: String,
maybe_aborted(panictry!(p.parse_all_token_trees()),p)
}
-// Note: keep in sync with `with_hygiene::new_parser_from_source_str`
-// until #16472 is resolved.
// Create a new parser from a source string
pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
p
}
-// Note: keep this in sync with `with_hygiene::filemap_to_parser` until
-// #16472 is resolved.
/// Given a filemap and config, return a parser
pub fn filemap_to_parser<'a>(sess: &'a ParseSess,
filemap: Rc<FileMap>,
sess.span_diagnostic.cm.new_filemap(path, source)
}
-// Note: keep this in sync with `with_hygiene::filemap_to_tts` (apart
-// from the StringReader constructor), until #16472 is resolved.
/// Given a filemap, produce a sequence of token-trees
pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
-> Vec<ast::TokenTree> {
p
}
-// FIXME (Issue #16472): The `with_hygiene` mod should go away after
-// ToToken impls are revised to go directly to token-trees.
-pub mod with_hygiene {
- use ast;
- use codemap::FileMap;
- use parse::parser::Parser;
- use std::rc::Rc;
- use super::ParseSess;
- use super::{maybe_aborted, string_to_filemap, tts_to_parser};
-
- // Note: keep this in sync with `super::parse_tts_from_source_str` until
- // #16472 is resolved.
- //
- // Warning: This parses with quote_depth > 0, which is not the default.
- pub fn parse_tts_from_source_str(name: String,
- source: String,
- cfg: ast::CrateConfig,
- sess: &ParseSess) -> Vec<ast::TokenTree> {
- let mut p = new_parser_from_source_str(
- sess,
- cfg,
- name,
- source
- );
- p.quote_depth += 1;
- // right now this is re-creating the token trees from ... token trees.
- maybe_aborted(panictry!(p.parse_all_token_trees()),p)
- }
-
- // Note: keep this in sync with `super::new_parser_from_source_str` until
- // #16472 is resolved.
- // Create a new parser from a source string
- fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
- cfg: ast::CrateConfig,
- name: String,
- source: String) -> Parser<'a> {
- filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg)
- }
-
- // Note: keep this in sync with `super::filemap_to_parserr` until
- // #16472 is resolved.
- /// Given a filemap and config, return a parser
- fn filemap_to_parser<'a>(sess: &'a ParseSess,
- filemap: Rc<FileMap>,
- cfg: ast::CrateConfig) -> Parser<'a> {
- tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg)
- }
-
- // Note: keep this in sync with `super::filemap_to_tts` until
- // #16472 is resolved.
- /// Given a filemap, produce a sequence of token-trees
- fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
- -> Vec<ast::TokenTree> {
- // it appears to me that the cfg doesn't matter here... indeed,
- // parsing tt's probably shouldn't require a parser at all.
- use super::lexer::make_reader_with_embedded_idents as make_reader;
- let cfg = Vec::new();
- let srdr = make_reader(&sess.span_diagnostic, filemap);
- let mut p1 = Parser::new(sess, cfg, Box::new(srdr));
- panictry!(p1.parse_all_token_trees())
- }
-}
-
/// Abort if necessary
pub fn maybe_aborted<T>(result: T, p: Parser) -> T {
p.abort_if_errors();
&token::OpenDelim(token::Brace),
&token::CloseDelim(token::Brace),
seq_sep_none(),
- |p| {
+ |p| -> PResult<P<TraitItem>> {
+ maybe_whole!(no_clone p, NtTraitItem);
let mut attrs = p.parse_outer_attributes();
let lo = p.span.lo;
}
pub fn parse_arm_nopanic(&mut self) -> PResult<Arm> {
+ maybe_whole!(no_clone self, NtArm);
+
let attrs = self.parse_outer_attributes();
let pats = try!(self.parse_pats());
let mut guard = None;
/// Parse an impl item.
pub fn parse_impl_item(&mut self) -> PResult<P<ImplItem>> {
+ maybe_whole!(no_clone self, NtImplItem);
+
let mut attrs = self.parse_outer_attributes();
let lo = self.span.lo;
let vis = try!(self.parse_visibility());
NtMeta(P<ast::MetaItem>),
NtPath(Box<ast::Path>),
NtTT(P<ast::TokenTree>), // needs P'ed to break a circularity
+ // These is not exposed to macros, but is used by quasiquote.
+ NtArm(ast::Arm),
+ NtImplItem(P<ast::ImplItem>),
+ NtTraitItem(P<ast::TraitItem>),
}
impl fmt::Debug for Nonterminal {
NtMeta(..) => f.pad("NtMeta(..)"),
NtPath(..) => f.pad("NtPath(..)"),
NtTT(..) => f.pad("NtTT(..)"),
+ NtArm(..) => f.pad("NtArm(..)"),
+ NtImplItem(..) => f.pad("NtImplItem(..)"),
+ NtTraitItem(..) => f.pad("NtTraitItem(..)"),
}
}
}
cur_cmnt_and_lit: CurrentCommentAndLiteral,
boxes: Vec<pp::Breaks>,
ann: &'a (PpAnn+'a),
- encode_idents_with_hygiene: bool,
}
pub fn rust_printer<'a>(writer: Box<Write+'a>) -> State<'a> {
},
boxes: Vec::new(),
ann: ann,
- encode_idents_with_hygiene: false,
}
}
},
boxes: Vec::new(),
ann: ann,
- encode_idents_with_hygiene: false,
}
}
}
token::SpecialVarNt(var) => format!("${}", var.as_str()),
token::Interpolated(ref nt) => match *nt {
- token::NtExpr(ref e) => expr_to_string(&**e),
- token::NtMeta(ref e) => meta_item_to_string(&**e),
- token::NtTy(ref e) => ty_to_string(&**e),
- token::NtPath(ref e) => path_to_string(&**e),
- token::NtItem(..) => "an interpolated item".to_string(),
- token::NtBlock(..) => "an interpolated block".to_string(),
- token::NtStmt(..) => "an interpolated statement".to_string(),
- token::NtPat(..) => "an interpolated pattern".to_string(),
- token::NtIdent(..) => "an interpolated identifier".to_string(),
- token::NtTT(..) => "an interpolated tt".to_string(),
+ token::NtExpr(ref e) => expr_to_string(&**e),
+ token::NtMeta(ref e) => meta_item_to_string(&**e),
+ token::NtTy(ref e) => ty_to_string(&**e),
+ token::NtPath(ref e) => path_to_string(&**e),
+ token::NtItem(..) => "an interpolated item".to_string(),
+ token::NtBlock(..) => "an interpolated block".to_string(),
+ token::NtStmt(..) => "an interpolated statement".to_string(),
+ token::NtPat(..) => "an interpolated pattern".to_string(),
+ token::NtIdent(..) => "an interpolated identifier".to_string(),
+ token::NtTT(..) => "an interpolated tt".to_string(),
+ token::NtArm(..) => "an interpolated arm".to_string(),
+ token::NtImplItem(..) => "an interpolated impl item".to_string(),
+ token::NtTraitItem(..) => "an interpolated trait item".to_string(),
}
}
}
-// FIXME (Issue #16472): the thing_to_string_impls macro should go away
-// after we revise the syntax::ext::quote::ToToken impls to go directly
-// to token-trees instead of thing -> string -> token-trees.
-
-macro_rules! thing_to_string_impls {
- ($to_string:ident) => {
-
pub fn ty_to_string(ty: &ast::Ty) -> String {
- $to_string(|s| s.print_type(ty))
+ to_string(|s| s.print_type(ty))
}
pub fn bounds_to_string(bounds: &[ast::TyParamBound]) -> String {
- $to_string(|s| s.print_bounds("", bounds))
+ to_string(|s| s.print_bounds("", bounds))
}
pub fn pat_to_string(pat: &ast::Pat) -> String {
- $to_string(|s| s.print_pat(pat))
+ to_string(|s| s.print_pat(pat))
}
pub fn arm_to_string(arm: &ast::Arm) -> String {
- $to_string(|s| s.print_arm(arm))
+ to_string(|s| s.print_arm(arm))
}
pub fn expr_to_string(e: &ast::Expr) -> String {
- $to_string(|s| s.print_expr(e))
+ to_string(|s| s.print_expr(e))
}
pub fn lifetime_to_string(e: &ast::Lifetime) -> String {
- $to_string(|s| s.print_lifetime(e))
+ to_string(|s| s.print_lifetime(e))
}
pub fn tt_to_string(tt: &ast::TokenTree) -> String {
- $to_string(|s| s.print_tt(tt))
+ to_string(|s| s.print_tt(tt))
}
pub fn tts_to_string(tts: &[ast::TokenTree]) -> String {
- $to_string(|s| s.print_tts(tts))
+ to_string(|s| s.print_tts(tts))
}
pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
- $to_string(|s| s.print_stmt(stmt))
+ to_string(|s| s.print_stmt(stmt))
}
pub fn attr_to_string(attr: &ast::Attribute) -> String {
- $to_string(|s| s.print_attribute(attr))
+ to_string(|s| s.print_attribute(attr))
}
pub fn item_to_string(i: &ast::Item) -> String {
- $to_string(|s| s.print_item(i))
+ to_string(|s| s.print_item(i))
}
pub fn impl_item_to_string(i: &ast::ImplItem) -> String {
- $to_string(|s| s.print_impl_item(i))
+ to_string(|s| s.print_impl_item(i))
}
pub fn trait_item_to_string(i: &ast::TraitItem) -> String {
- $to_string(|s| s.print_trait_item(i))
+ to_string(|s| s.print_trait_item(i))
}
pub fn generics_to_string(generics: &ast::Generics) -> String {
- $to_string(|s| s.print_generics(generics))
+ to_string(|s| s.print_generics(generics))
}
pub fn where_clause_to_string(i: &ast::WhereClause) -> String {
- $to_string(|s| s.print_where_clause(i))
+ to_string(|s| s.print_where_clause(i))
}
pub fn fn_block_to_string(p: &ast::FnDecl) -> String {
- $to_string(|s| s.print_fn_block_args(p))
+ to_string(|s| s.print_fn_block_args(p))
}
pub fn path_to_string(p: &ast::Path) -> String {
- $to_string(|s| s.print_path(p, false, 0))
+ to_string(|s| s.print_path(p, false, 0))
}
pub fn ident_to_string(id: &ast::Ident) -> String {
- $to_string(|s| s.print_ident(*id))
+ to_string(|s| s.print_ident(*id))
}
pub fn fun_to_string(decl: &ast::FnDecl, unsafety: ast::Unsafety, name: ast::Ident,
opt_explicit_self: Option<&ast::ExplicitSelf_>,
generics: &ast::Generics) -> String {
- $to_string(|s| {
+ to_string(|s| {
try!(s.head(""));
try!(s.print_fn(decl, unsafety, abi::Rust, Some(name),
generics, opt_explicit_self, ast::Inherited));
}
pub fn block_to_string(blk: &ast::Block) -> String {
- $to_string(|s| {
+ to_string(|s| {
// containing cbox, will be closed by print-block at }
try!(s.cbox(indent_unit));
// head-ibox, will be closed by print-block after {
}
pub fn meta_item_to_string(mi: &ast::MetaItem) -> String {
- $to_string(|s| s.print_meta_item(mi))
+ to_string(|s| s.print_meta_item(mi))
}
pub fn attribute_to_string(attr: &ast::Attribute) -> String {
- $to_string(|s| s.print_attribute(attr))
+ to_string(|s| s.print_attribute(attr))
}
pub fn lit_to_string(l: &ast::Lit) -> String {
- $to_string(|s| s.print_literal(l))
+ to_string(|s| s.print_literal(l))
}
pub fn explicit_self_to_string(explicit_self: &ast::ExplicitSelf_) -> String {
- $to_string(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {}))
+ to_string(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {}))
}
pub fn variant_to_string(var: &ast::Variant) -> String {
- $to_string(|s| s.print_variant(var))
+ to_string(|s| s.print_variant(var))
}
pub fn arg_to_string(arg: &ast::Arg) -> String {
- $to_string(|s| s.print_arg(arg))
+ to_string(|s| s.print_arg(arg))
}
pub fn mac_to_string(arg: &ast::Mac) -> String {
- $to_string(|s| s.print_mac(arg, ::parse::token::Paren))
-}
-
-} }
-
-thing_to_string_impls! { to_string }
-
-// FIXME (Issue #16472): the whole `with_hygiene` mod should go away
-// after we revise the syntax::ext::quote::ToToken impls to go directly
-// to token-trees instea of thing -> string -> token-trees.
-
-pub mod with_hygiene {
- use abi;
- use ast;
- use std::io;
- use super::indent_unit;
-
- // This function is the trick that all the rest of the routines
- // hang on.
- pub fn to_string_hyg<F>(f: F) -> String where
- F: FnOnce(&mut super::State) -> io::Result<()>,
- {
- super::to_string(move |s| {
- s.encode_idents_with_hygiene = true;
- f(s)
- })
- }
-
- thing_to_string_impls! { to_string_hyg }
+ to_string(|s| s.print_mac(arg, ::parse::token::Paren))
}
pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> String {
}
pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> {
- if self.encode_idents_with_hygiene {
- let encoded = ident.encode_with_hygiene();
- try!(word(&mut self.s, &encoded[..]))
- } else {
- try!(word(&mut self.s, &token::get_ident(ident)))
- }
+ try!(word(&mut self.s, &token::get_ident(ident)));
self.ann.post(self, NodeIdent(&ident))
}
// except according to those terms.
// ignore-cross-compile
-// ignore-pretty
#![feature(quote, rustc_private)]
extern crate syntax;
-use syntax::ast;
-use syntax::codemap;
-use syntax::parse;
-use syntax::print::pprust;
+use syntax::codemap::DUMMY_SP;
+use syntax::print::pprust::*;
-trait FakeExtCtxt {
- fn call_site(&self) -> codemap::Span;
- fn cfg(&self) -> ast::CrateConfig;
- fn ident_of(&self, st: &str) -> ast::Ident;
- fn name_of(&self, st: &str) -> ast::Name;
- fn parse_sess(&self) -> &parse::ParseSess;
-}
-
-impl FakeExtCtxt for parse::ParseSess {
- fn call_site(&self) -> codemap::Span {
- codemap::Span {
- lo: codemap::BytePos(0),
- hi: codemap::BytePos(0),
- expn_id: codemap::NO_EXPANSION,
+fn main() {
+ let ps = syntax::parse::new_parse_sess();
+ let mut cx = syntax::ext::base::ExtCtxt::new(
+ &ps, vec![],
+ syntax::ext::expand::ExpansionConfig::default("qquote".to_string()));
+ cx.bt_push(syntax::codemap::ExpnInfo {
+ call_site: DUMMY_SP,
+ callee: syntax::codemap::NameAndSpan {
+ name: "".to_string(),
+ format: syntax::codemap::MacroBang,
+ allow_internal_unstable: false,
+ span: None,
}
+ });
+ let cx = &mut cx;
+
+ macro_rules! check {
+ ($f: ident, $($e: expr),+; $expect: expr) => ({
+ $(assert_eq!($f(&$e), $expect);)+
+ });
}
- fn cfg(&self) -> ast::CrateConfig { Vec::new() }
- fn ident_of(&self, st: &str) -> ast::Ident {
- parse::token::str_to_ident(st)
- }
- fn name_of(&self, st: &str) -> ast::Name {
- parse::token::intern(st)
- }
- fn parse_sess(&self) -> &parse::ParseSess { self }
-}
-fn main() {
- let cx = parse::new_parse_sess();
+ let abc = quote_expr!(cx, 23);
+ check!(expr_to_string, abc, *quote_expr!(cx, $abc); "23");
+
+ let ty = quote_ty!(cx, isize);
+ check!(ty_to_string, ty, *quote_ty!(cx, $ty); "isize");
- assert_eq!(pprust::expr_to_string(&*quote_expr!(&cx, 23)), "23");
- assert_eq!(pprust::pat_to_string(&*quote_pat!(&cx, Some(_))), "Some(_)");
- assert_eq!(pprust::ty_to_string(&*quote_ty!(&cx, isize)), "isize");
+ let item = quote_item!(cx, static x: $ty = 10;).unwrap();
+ check!(item_to_string, item, quote_item!(cx, $item).unwrap(); "static x: isize = 10;");
- let arm = quote_arm!(&cx, (ref x, ref y) => (x, y),);
- assert_eq!(pprust::arm_to_string(&arm), " (ref x, ref y) => (x, y),");
+ let twenty: u16 = 20;
+ let stmt = quote_stmt!(cx, let x = $twenty;).unwrap();
+ check!(stmt_to_string, stmt, *quote_stmt!(cx, $stmt).unwrap(); "let x = 20u16;");
- let attr = quote_attr!(&cx, #![cfg(foo = "bar")]);
- assert_eq!(pprust::attr_to_string(&attr), "#![cfg(foo = \"bar\")]");
+ let pat = quote_pat!(cx, Some(_));
+ check!(pat_to_string, pat, *quote_pat!(cx, $pat); "Some(_)");
- let item = quote_item!(&cx, static x : isize = 10;).unwrap();
- assert_eq!(pprust::item_to_string(&*item), "static x: isize = 10;");
+ let expr = quote_expr!(cx, (x, y));
+ let arm = quote_arm!(cx, (ref x, ref y) => $expr,);
+ check!(arm_to_string, arm, quote_arm!(cx, $arm); " (ref x, ref y) => (x, y),");
- let stmt = quote_stmt!(&cx, let x = 20;).unwrap();
- assert_eq!(pprust::stmt_to_string(&*stmt), "let x = 20;");
+ let attr = quote_attr!(cx, #![cfg(foo = "bar")]);
+ check!(attribute_to_string, attr, quote_attr!(cx, $attr); r#"#![cfg(foo = "bar")]"#);
}