panic_abort panic_unwind unwind
RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_driver \
rustc_trans rustc_back rustc_llvm rustc_privacy rustc_lint \
- rustc_data_structures rustc_platform_intrinsics \
+ rustc_data_structures rustc_platform_intrinsics rustc_errors \
rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \
rustc_const_eval rustc_const_math rustc_incremental
-HOST_CRATES := syntax syntax_ext $(RUSTC_CRATES) rustdoc fmt_macros \
+HOST_CRATES := syntax syntax_ext syntax_pos $(RUSTC_CRATES) rustdoc fmt_macros \
flate arena graphviz rbml log serialize
TOOLS := compiletest rustdoc rustc rustbook error_index_generator
DEPS_term := std
DEPS_test := std getopts term native:rust_test_helpers
-DEPS_syntax := std term serialize log arena libc rustc_bitflags rustc_unicode
-DEPS_syntax_ext := syntax fmt_macros
+DEPS_syntax := std term serialize log arena libc rustc_bitflags rustc_unicode rustc_errors syntax_pos
+DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros
+DEPS_syntax_pos := serialize
DEPS_rustc_const_math := std syntax log serialize
DEPS_rustc_const_eval := rustc_const_math rustc syntax log serialize \
- rustc_back graphviz
+ rustc_back graphviz syntax_pos
DEPS_rustc := syntax fmt_macros flate arena serialize getopts rbml \
log graphviz rustc_llvm rustc_back rustc_data_structures\
- rustc_const_math
+ rustc_const_math syntax_pos rustc_errors
DEPS_rustc_back := std syntax flate log libc
-DEPS_rustc_borrowck := rustc log graphviz syntax rustc_mir
+DEPS_rustc_borrowck := rustc log graphviz syntax syntax_pos rustc_errors rustc_mir
DEPS_rustc_data_structures := std log serialize
DEPS_rustc_driver := arena flate getopts graphviz libc rustc rustc_back rustc_borrowck \
rustc_typeck rustc_mir rustc_resolve log syntax serialize rustc_llvm \
rustc_trans rustc_privacy rustc_lint rustc_plugin \
rustc_metadata syntax_ext rustc_passes rustc_save_analysis rustc_const_eval \
- rustc_incremental
-DEPS_rustc_lint := rustc log syntax rustc_const_eval
+ rustc_incremental syntax_pos rustc_errors
+DEPS_rustc_errors := log libc serialize syntax_pos
+DEPS_rustc_lint := rustc log syntax syntax_pos rustc_const_eval
DEPS_rustc_llvm := native:rustllvm libc std rustc_bitflags
-DEPS_rustc_metadata := rustc syntax rbml rustc_const_math
-DEPS_rustc_passes := syntax rustc core rustc_const_eval
-DEPS_rustc_mir := rustc syntax rustc_const_math rustc_const_eval rustc_bitflags
-DEPS_rustc_resolve := arena rustc log syntax
+DEPS_rustc_metadata := rustc syntax syntax_pos rustc_errors rbml rustc_const_math
+DEPS_rustc_passes := syntax syntax_pos rustc core rustc_const_eval rustc_errors
+DEPS_rustc_mir := rustc syntax syntax_pos rustc_const_math rustc_const_eval rustc_bitflags
+DEPS_rustc_resolve := arena rustc log syntax syntax_pos rustc_errors
DEPS_rustc_platform_intrinsics := std
-DEPS_rustc_plugin := rustc rustc_metadata syntax
-DEPS_rustc_privacy := rustc log syntax
+DEPS_rustc_plugin := rustc rustc_metadata syntax syntax_pos rustc_errors
+DEPS_rustc_privacy := rustc log syntax syntax_pos
DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back \
log syntax serialize rustc_llvm rustc_platform_intrinsics \
- rustc_const_math rustc_const_eval rustc_incremental
-DEPS_rustc_incremental := rbml rustc serialize rustc_data_structures
-DEPS_rustc_save_analysis := rustc log syntax serialize
-DEPS_rustc_typeck := rustc syntax rustc_platform_intrinsics rustc_const_math \
- rustc_const_eval
+ rustc_const_math rustc_const_eval rustc_incremental rustc_errors syntax_pos
+DEPS_rustc_incremental := rbml rustc syntax_pos serialize rustc_data_structures
+DEPS_rustc_save_analysis := rustc log syntax syntax_pos serialize
+DEPS_rustc_typeck := rustc syntax syntax_pos rustc_platform_intrinsics rustc_const_math \
+ rustc_const_eval rustc_errors
DEPS_rustdoc := rustc rustc_driver native:hoedown serialize getopts \
- test rustc_lint rustc_const_eval
+ test rustc_lint rustc_const_eval syntax_pos
TOOL_DEPS_compiletest := test getopts log serialize
extern crate rustc;
extern crate rustc_plugin;
-use syntax::codemap::Span;
use syntax::parse::token;
use syntax::ast::TokenTree;
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
use syntax::ext::build::AstBuilder; // trait for expr_usize
+use syntax_pos::Span;
use rustc_plugin::Registry;
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
use syntax::ast;
use syntax::ast::Name;
use syntax::codemap;
-use syntax::codemap::Pos;
use syntax::parse::token::{self, BinOpToken, DelimToken, Lit, Token};
use syntax::parse::lexer::TokenAndSpan;
+use syntax_pos::Pos;
fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
fn id() -> token::Token {
lo -= surrogate_pairs_pos.binary_search(&(lo as usize)).unwrap_or_else(|x| x) as u32;
hi -= surrogate_pairs_pos.binary_search(&(hi as usize)).unwrap_or_else(|x| x) as u32;
- let sp = codemap::Span {
- lo: codemap::BytePos(lo),
- hi: codemap::BytePos(hi),
- expn_id: codemap::NO_EXPANSION
+ let sp = syntax_pos::Span {
+ lo: syntax_pos::BytePos(lo),
+ hi: syntax_pos::BytePos(hi),
+ expn_id: syntax_pos::NO_EXPANSION
};
TokenAndSpan {
rustc_bitflags = { path = "../librustc_bitflags" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
+rustc_errors = { path = "../librustc_errors" }
rustc_llvm = { path = "../librustc_llvm" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
}
}
-impl<'a, 'v> Visitor<'v> for CheckAttrVisitor<'a> {
+impl<'a> Visitor for CheckAttrVisitor<'a> {
fn visit_item(&mut self, item: &ast::Item) {
let target = Target::from_item(item);
for attr in &item.attrs {
use hir::*;
use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, Attribute, Attribute_, MetaItem};
use syntax::ast::MetaItemKind;
-use syntax::attr::ThinAttributesExt;
use hir;
-use syntax::codemap::{respan, Span, Spanned};
+use syntax_pos::Span;
+use syntax::codemap::{respan, Spanned};
use syntax::ptr::P;
use syntax::parse::token::keywords;
use syntax::util::move_map::MoveMap;
})
}
-pub fn fold_attrs<T: Folder>(attrs: HirVec<Attribute>, fld: &mut T) -> HirVec<Attribute> {
- attrs.move_flat_map(|x| fld.fold_attribute(x))
+pub fn fold_attrs<T, F>(attrs: T, fld: &mut F) -> T
+ where T: Into<Vec<Attribute>> + From<Vec<Attribute>>,
+ F: Folder,
+{
+ attrs.into().move_flat_map(|x| fld.fold_attribute(x)).into()
}
pub fn noop_fold_arm<T: Folder>(Arm { attrs, pats, guard, body }: Arm, fld: &mut T) -> Arm {
pat: fld.fold_pat(pat),
init: init.map(|e| fld.fold_expr(e)),
span: fld.new_span(span),
- attrs: attrs.map_thin_attrs(|attrs| fold_attrs(attrs.into(), fld).into()),
+ attrs: fold_attrs(attrs, fld),
}
})
}
}
},
span: folder.new_span(span),
- attrs: attrs.map_thin_attrs(|attrs| fold_attrs(attrs.into(), folder).into()),
+ attrs: fold_attrs(attrs, folder),
}
}
use syntax::abi::Abi;
use syntax::ast::{NodeId, CRATE_NODE_ID, Name, Attribute};
-use syntax::attr::ThinAttributesExt;
-use syntax::codemap::{Span, Spanned};
+use syntax::codemap::Spanned;
+use syntax_pos::Span;
use hir::*;
use std::cmp;
walk_list!(visitor, visit_arm, arms);
}
ExprClosure(_, ref function_declaration, ref body, _fn_decl_span) => {
- visitor.visit_fn(FnKind::Closure(expression.attrs.as_attr_slice()),
+ visitor.visit_fn(FnKind::Closure(&expression.attrs),
function_declaration,
body,
expression.span,
use std::collections::BTreeMap;
use std::iter;
use syntax::ast::*;
-use syntax::attr::{ThinAttributes, ThinAttributesExt};
use syntax::ptr::P;
-use syntax::codemap::{respan, Spanned, Span};
+use syntax::codemap::{respan, Spanned};
use syntax::parse::token;
use syntax::std_inject;
use syntax::visit::{self, Visitor};
+use syntax_pos::Span;
pub struct LoweringContext<'a> {
crate_root: Option<&'static str>,
lctx: &'lcx mut LoweringContext<'interner>,
}
- impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> {
- fn visit_item(&mut self, item: &'lcx Item) {
+ impl<'lcx, 'interner> Visitor for ItemLowerer<'lcx, 'interner> {
+ fn visit_item(&mut self, item: &Item) {
self.items.insert(item.id, self.lctx.lower_item(item));
visit::walk_item(self, item);
}
}
}
- fn lower_decl(&mut self, d: &Decl) -> P<hir::Decl> {
- match d.node {
- DeclKind::Local(ref l) => P(Spanned {
- node: hir::DeclLocal(self.lower_local(l)),
- span: d.span,
- }),
- DeclKind::Item(ref it) => P(Spanned {
- node: hir::DeclItem(self.lower_item_id(it)),
- span: d.span,
- }),
- }
- }
-
fn lower_ty_binding(&mut self, b: &TypeBinding) -> hir::TypeBinding {
hir::TypeBinding {
id: b.id,
}
fn lower_block(&mut self, b: &Block) -> P<hir::Block> {
+ let mut stmts = Vec::new();
+ let mut expr = None;
+
+ if let Some((last, rest)) = b.stmts.split_last() {
+ stmts = rest.iter().map(|s| self.lower_stmt(s)).collect::<Vec<_>>();
+ let last = self.lower_stmt(last);
+ if let hir::StmtExpr(e, _) = last.node {
+ expr = Some(e);
+ } else {
+ stmts.push(last);
+ }
+ }
+
P(hir::Block {
id: b.id,
- stmts: b.stmts.iter().map(|s| self.lower_stmt(s)).collect(),
- expr: b.expr.as_ref().map(|ref x| self.lower_expr(x)),
+ stmts: stmts.into(),
+ expr: expr,
rules: self.lower_block_check_mode(&b.rules),
span: b.span,
})
hir::TypeTraitItem(this.lower_bounds(bounds),
default.as_ref().map(|x| this.lower_ty(x)))
}
+ TraitItemKind::Macro(..) => panic!("Shouldn't exist any more"),
},
span: i.span,
}
pats.iter().map(|x| self.lower_pat(x)).collect(),
ddpos)
}
- PatKind::Path(ref pth) => {
+ PatKind::Path(None, ref pth) => {
hir::PatKind::Path(self.lower_path(pth))
}
- PatKind::QPath(ref qself, ref pth) => {
+ PatKind::Path(Some(ref qself), ref pth) => {
let qself = hir::QSelf {
ty: self.lower_ty(&qself.ty),
position: qself.position,
let make_call = |this: &mut LoweringContext, p, args| {
let path = this.core_path(e.span, p);
- let path = this.expr_path(path, None);
- this.expr_call(e.span, path, args, None)
+ let path = this.expr_path(path, ThinVec::new());
+ this.expr_call(e.span, path, args)
};
let mk_stmt_let = |this: &mut LoweringContext, bind, expr| {
- this.stmt_let(e.span, false, bind, expr, None)
+ this.stmt_let(e.span, false, bind, expr)
};
let mk_stmt_let_mut = |this: &mut LoweringContext, bind, expr| {
- this.stmt_let(e.span, true, bind, expr, None)
+ this.stmt_let(e.span, true, bind, expr)
};
// let placer = <placer_expr> ;
placer_expr,
e.span,
hir::PopUnstableBlock,
- None);
+ ThinVec::new());
mk_stmt_let(self, placer_ident, placer_expr)
};
// let mut place = Placer::make_place(placer);
let (s2, place_binding) = {
- let placer = self.expr_ident(e.span, placer_ident, None, placer_binding);
+ let placer = self.expr_ident(e.span, placer_ident, placer_binding);
let call = make_call(self, &make_place, hir_vec![placer]);
mk_stmt_let_mut(self, place_ident, call)
};
// let p_ptr = Place::pointer(&mut place);
let (s3, p_ptr_binding) = {
- let agent = self.expr_ident(e.span, place_ident, None, place_binding);
- let args = hir_vec![self.expr_mut_addr_of(e.span, agent, None)];
+ let agent = self.expr_ident(e.span, place_ident, place_binding);
+ let args = hir_vec![self.expr_mut_addr_of(e.span, agent)];
let call = make_call(self, &place_pointer, args);
mk_stmt_let(self, p_ptr_ident, call)
};
value_expr,
e.span,
hir::PopUnstableBlock,
- None);
+ ThinVec::new());
self.signal_block_expr(hir_vec![],
value_expr,
e.span,
- hir::PopUnsafeBlock(hir::CompilerGenerated), None)
+ hir::PopUnsafeBlock(hir::CompilerGenerated),
+ ThinVec::new())
};
// push_unsafe!({
// InPlace::finalize(place)
// })
let expr = {
- let ptr = self.expr_ident(e.span, p_ptr_ident, None, p_ptr_binding);
+ let ptr = self.expr_ident(e.span, p_ptr_ident, p_ptr_binding);
let call_move_val_init =
hir::StmtSemi(
make_call(self, &move_val_init, hir_vec![ptr, pop_unsafe_expr]),
self.next_id());
let call_move_val_init = respan(e.span, call_move_val_init);
- let place = self.expr_ident(e.span, place_ident, None, place_binding);
+ let place = self.expr_ident(e.span, place_ident, place_binding);
let call = make_call(self, &inplace_finalize, hir_vec![place]);
self.signal_block_expr(hir_vec![call_move_val_init],
call,
e.span,
- hir::PushUnsafeBlock(hir::CompilerGenerated), None)
+ hir::PushUnsafeBlock(hir::CompilerGenerated),
+ ThinVec::new())
};
return self.signal_block_expr(hir_vec![s1, s2, s3],
rules: hir::DefaultBlock,
span: span,
});
- self.expr_block(blk, None)
+ self.expr_block(blk, ThinVec::new())
}
_ => self.lower_expr(els),
}
expr,
e.span,
hir::PopUnstableBlock,
- None);
+ ThinVec::new());
this.field(token::intern(s), signal_block, ast_expr.span)
}).collect();
let attrs = ast_expr.attrs.clone();
hir_expr,
ast_expr.span,
hir::PushUnstableBlock,
- None)
+ ThinVec::new())
}
use syntax::ast::RangeLimits::*;
hir::ExprPath(hir_qself, self.lower_path(path))
}
ExprKind::Break(opt_ident) => hir::ExprBreak(self.lower_opt_sp_ident(opt_ident)),
- ExprKind::Again(opt_ident) => hir::ExprAgain(self.lower_opt_sp_ident(opt_ident)),
+ ExprKind::Continue(opt_ident) => hir::ExprAgain(self.lower_opt_sp_ident(opt_ident)),
ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| self.lower_expr(x))),
ExprKind::InlineAsm(InlineAsm {
ref inputs,
ex.span = e.span;
}
// merge attributes into the inner expression.
- ex.attrs.update(|attrs| {
- attrs.prepend(e.attrs.clone())
- });
+ let mut attrs = e.attrs.clone();
+ attrs.extend::<Vec<_>>(ex.attrs.into());
+ ex.attrs = attrs;
ex
});
}
// `<pat> => <body>`
let pat_arm = {
let body = self.lower_block(body);
- let body_expr = self.expr_block(body, None);
+ let body_expr = self.expr_block(body, ThinVec::new());
let pat = self.lower_pat(pat);
self.arm(hir_vec![pat], body_expr)
};
attrs: hir_vec![],
pats: hir_vec![pat_under],
guard: Some(cond),
- body: self.expr_block(then, None),
+ body: self.expr_block(then, ThinVec::new()),
});
else_opt.map(|else_opt| (else_opt, true))
}
let else_arm = {
let pat_under = self.pat_wild(e.span);
let else_expr =
- else_opt.unwrap_or_else(|| self.expr_tuple(e.span, hir_vec![], None));
+ else_opt.unwrap_or_else(|| self.expr_tuple(e.span, hir_vec![]));
self.arm(hir_vec![pat_under], else_expr)
};
// `<pat> => <body>`
let pat_arm = {
let body = self.lower_block(body);
- let body_expr = self.expr_block(body, None);
+ let body_expr = self.expr_block(body, ThinVec::new());
let pat = self.lower_pat(pat);
self.arm(hir_vec![pat], body_expr)
};
// `_ => break`
let break_arm = {
let pat_under = self.pat_wild(e.span);
- let break_expr = self.expr_break(e.span, None);
+ let break_expr = self.expr_break(e.span, ThinVec::new());
self.arm(hir_vec![pat_under], break_expr)
};
hir::ExprMatch(sub_expr,
arms,
hir::MatchSource::WhileLetDesugar),
- None);
+ ThinVec::new());
// `[opt_ident]: loop { ... }`
let loop_block = self.block_expr(match_expr);
id: self.next_id(),
node: hir::ExprBlock(body_block),
span: body_span,
- attrs: None,
+ attrs: ThinVec::new(),
});
let pat = self.lower_pat(pat);
let some_pat = self.pat_some(e.span, pat);
// `::std::option::Option::None => break`
let break_arm = {
- let break_expr = self.expr_break(e.span, None);
+ let break_expr = self.expr_break(e.span, ThinVec::new());
let pat = self.pat_none(e.span);
self.arm(hir_vec![pat], break_expr)
};
self.path_global(e.span, strs)
};
- let iter = self.expr_ident(e.span, iter, None, iter_pat.id);
- let ref_mut_iter = self.expr_mut_addr_of(e.span, iter, None);
- let next_path = self.expr_path(next_path, None);
- let next_expr = self.expr_call(e.span,
- next_path,
- hir_vec![ref_mut_iter],
- None);
+ let iter = self.expr_ident(e.span, iter, iter_pat.id);
+ let ref_mut_iter = self.expr_mut_addr_of(e.span, iter);
+ let next_path = self.expr_path(next_path, ThinVec::new());
+ let next_expr = self.expr_call(e.span, next_path, hir_vec![ref_mut_iter]);
let arms = hir_vec![pat_arm, break_arm];
self.expr(e.span,
hir::ExprMatch(next_expr, arms, hir::MatchSource::ForLoopDesugar),
- None)
+ ThinVec::new())
};
// `[opt_ident]: loop { ... }`
let loop_block = self.block_expr(match_expr);
let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident));
- let loop_expr =
- P(hir::Expr { id: e.id, node: loop_expr, span: e.span, attrs: None });
+ let loop_expr = P(hir::Expr {
+ id: e.id,
+ node: loop_expr,
+ span: e.span,
+ attrs: ThinVec::new(),
+ });
// `mut iter => { ... }`
let iter_arm = self.arm(hir_vec![iter_pat], loop_expr);
self.path_global(e.span, strs)
};
- let into_iter = self.expr_path(into_iter_path, None);
- self.expr_call(e.span, into_iter, hir_vec![head], None)
+ let into_iter = self.expr_path(into_iter_path, ThinVec::new());
+ self.expr_call(e.span, into_iter, hir_vec![head])
};
let match_expr = self.expr_match(e.span,
into_iter_expr,
hir_vec![iter_arm],
- hir::MatchSource::ForLoopDesugar,
- None);
+ hir::MatchSource::ForLoopDesugar);
// `{ let _result = ...; _result }`
// underscore prevents an unused_variables lint if the head diverges
let result_ident = self.str_to_ident("_result");
let (let_stmt, let_stmt_binding) =
- self.stmt_let(e.span, false, result_ident, match_expr, None);
+ self.stmt_let(e.span, false, result_ident, match_expr);
- let result = self.expr_ident(e.span, result_ident, None, let_stmt_binding);
+ let result = self.expr_ident(e.span, result_ident, let_stmt_binding);
let block = self.block_all(e.span, hir_vec![let_stmt], Some(result));
// add the attributes to the outer returned expr node
return self.expr_block(block, e.attrs.clone());
let ok_arm = {
let val_ident = self.str_to_ident("val");
let val_pat = self.pat_ident(e.span, val_ident);
- let val_expr = self.expr_ident(e.span, val_ident, None, val_pat.id);
+ let val_expr = self.expr_ident(e.span, val_ident, val_pat.id);
let ok_pat = self.pat_ok(e.span, val_pat);
self.arm(hir_vec![ok_pat], val_expr)
let from_expr = {
let path = self.std_path(&["convert", "From", "from"]);
let path = self.path_global(e.span, path);
- let from = self.expr_path(path, None);
- let err_expr = self.expr_ident(e.span, err_ident, None, err_local.id);
+ let from = self.expr_path(path, ThinVec::new());
+ let err_expr = self.expr_ident(e.span, err_ident, err_local.id);
- self.expr_call(e.span, from, hir_vec![err_expr], None)
+ self.expr_call(e.span, from, hir_vec![err_expr])
};
let err_expr = {
let path = self.std_path(&["result", "Result", "Err"]);
let path = self.path_global(e.span, path);
- let err_ctor = self.expr_path(path, None);
- self.expr_call(e.span, err_ctor, hir_vec![from_expr], None)
+ let err_ctor = self.expr_path(path, ThinVec::new());
+ self.expr_call(e.span, err_ctor, hir_vec![from_expr])
};
let err_pat = self.pat_err(e.span, err_local);
let ret_expr = self.expr(e.span,
- hir::Expr_::ExprRet(Some(err_expr)), None);
-
+ hir::Expr_::ExprRet(Some(err_expr)),
+ ThinVec::new());
self.arm(hir_vec![err_pat], ret_expr)
};
return self.expr_match(e.span, sub_expr, hir_vec![err_arm, ok_arm],
- hir::MatchSource::TryDesugar, None);
+ hir::MatchSource::TryDesugar);
}
ExprKind::Mac(_) => panic!("Shouldn't exist here"),
fn lower_stmt(&mut self, s: &Stmt) -> hir::Stmt {
match s.node {
- StmtKind::Decl(ref d, id) => {
- Spanned {
- node: hir::StmtDecl(self.lower_decl(d), id),
+ StmtKind::Local(ref l) => Spanned {
+ node: hir::StmtDecl(P(Spanned {
+ node: hir::DeclLocal(self.lower_local(l)),
span: s.span,
- }
- }
- StmtKind::Expr(ref e, id) => {
+ }), s.id),
+ span: s.span,
+ },
+ StmtKind::Item(ref it) => Spanned {
+ node: hir::StmtDecl(P(Spanned {
+ node: hir::DeclItem(self.lower_item_id(it)),
+ span: s.span,
+ }), s.id),
+ span: s.span,
+ },
+ StmtKind::Expr(ref e) => {
Spanned {
- node: hir::StmtExpr(self.lower_expr(e), id),
+ node: hir::StmtExpr(self.lower_expr(e), s.id),
span: s.span,
}
}
- StmtKind::Semi(ref e, id) => {
+ StmtKind::Semi(ref e) => {
Spanned {
- node: hir::StmtSemi(self.lower_expr(e), id),
+ node: hir::StmtSemi(self.lower_expr(e), s.id),
span: s.span,
}
}
}
}
- fn expr_break(&mut self, span: Span, attrs: ThinAttributes) -> P<hir::Expr> {
+ fn expr_break(&mut self, span: Span, attrs: ThinVec<Attribute>) -> P<hir::Expr> {
self.expr(span, hir::ExprBreak(None), attrs)
}
- fn expr_call(&mut self,
- span: Span,
- e: P<hir::Expr>,
- args: hir::HirVec<P<hir::Expr>>,
- attrs: ThinAttributes)
+ fn expr_call(&mut self, span: Span, e: P<hir::Expr>, args: hir::HirVec<P<hir::Expr>>)
-> P<hir::Expr> {
- self.expr(span, hir::ExprCall(e, args), attrs)
+ self.expr(span, hir::ExprCall(e, args), ThinVec::new())
}
- fn expr_ident(&mut self, span: Span, id: Name, attrs: ThinAttributes, binding: NodeId)
- -> P<hir::Expr> {
+ fn expr_ident(&mut self, span: Span, id: Name, binding: NodeId) -> P<hir::Expr> {
let expr_path = hir::ExprPath(None, self.path_ident(span, id));
- let expr = self.expr(span, expr_path, attrs);
+ let expr = self.expr(span, expr_path, ThinVec::new());
let def = self.resolver.definitions().map(|defs| {
Def::Local(defs.local_def_id(binding), binding)
expr
}
- fn expr_mut_addr_of(&mut self, span: Span, e: P<hir::Expr>, attrs: ThinAttributes)
- -> P<hir::Expr> {
- self.expr(span, hir::ExprAddrOf(hir::MutMutable, e), attrs)
+ fn expr_mut_addr_of(&mut self, span: Span, e: P<hir::Expr>) -> P<hir::Expr> {
+ self.expr(span, hir::ExprAddrOf(hir::MutMutable, e), ThinVec::new())
}
- fn expr_path(&mut self, path: hir::Path, attrs: ThinAttributes) -> P<hir::Expr> {
+ fn expr_path(&mut self, path: hir::Path, attrs: ThinVec<Attribute>) -> P<hir::Expr> {
let def = self.resolver.resolve_generated_global_path(&path, true);
let expr = self.expr(path.span, hir::ExprPath(None, path), attrs);
self.resolver.record_resolution(expr.id, def);
span: Span,
arg: P<hir::Expr>,
arms: hir::HirVec<hir::Arm>,
- source: hir::MatchSource,
- attrs: ThinAttributes)
+ source: hir::MatchSource)
-> P<hir::Expr> {
- self.expr(span, hir::ExprMatch(arg, arms, source), attrs)
+ self.expr(span, hir::ExprMatch(arg, arms, source), ThinVec::new())
}
- fn expr_block(&mut self, b: P<hir::Block>, attrs: ThinAttributes) -> P<hir::Expr> {
+ fn expr_block(&mut self, b: P<hir::Block>, attrs: ThinVec<Attribute>) -> P<hir::Expr> {
self.expr(b.span, hir::ExprBlock(b), attrs)
}
- fn expr_tuple(&mut self, sp: Span, exprs: hir::HirVec<P<hir::Expr>>, attrs: ThinAttributes)
- -> P<hir::Expr> {
- self.expr(sp, hir::ExprTup(exprs), attrs)
+ fn expr_tuple(&mut self, sp: Span, exprs: hir::HirVec<P<hir::Expr>>) -> P<hir::Expr> {
+ self.expr(sp, hir::ExprTup(exprs), ThinVec::new())
}
fn expr_struct(&mut self,
path: hir::Path,
fields: hir::HirVec<hir::Field>,
e: Option<P<hir::Expr>>,
- attrs: ThinAttributes) -> P<hir::Expr> {
+ attrs: ThinVec<Attribute>) -> P<hir::Expr> {
let def = self.resolver.resolve_generated_global_path(&path, false);
let expr = self.expr(sp, hir::ExprStruct(path, fields, e), attrs);
self.resolver.record_resolution(expr.id, def);
expr
}
- fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinAttributes) -> P<hir::Expr> {
+ fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinVec<Attribute>) -> P<hir::Expr> {
P(hir::Expr {
id: self.next_id(),
node: node,
})
}
- fn stmt_let(&mut self,
- sp: Span,
- mutbl: bool,
- ident: Name,
- ex: P<hir::Expr>,
- attrs: ThinAttributes)
+ fn stmt_let(&mut self, sp: Span, mutbl: bool, ident: Name, ex: P<hir::Expr>)
-> (hir::Stmt, NodeId) {
let pat = if mutbl {
self.pat_ident_binding_mode(sp, ident, hir::BindByValue(hir::MutMutable))
init: Some(ex),
id: self.next_id(),
span: sp,
- attrs: attrs,
+ attrs: ThinVec::new(),
});
let decl = respan(sp, hir::DeclLocal(local));
(respan(sp, hir::StmtDecl(P(decl), self.next_id())), pat_id)
expr: P<hir::Expr>,
span: Span,
rule: hir::BlockCheckMode,
- attrs: ThinAttributes)
+ attrs: ThinVec<Attribute>)
-> P<hir::Expr> {
let id = self.next_id();
let block = P(hir::Block {
use syntax::abi;
use hir::{Block, FnDecl};
use syntax::ast::{Attribute, Name, NodeId};
-use syntax::attr::ThinAttributesExt;
use hir as ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use hir::intravisit::FnKind;
/// An FnLikeNode is a Node that is like a fn, in that it has a decl
}
map::NodeExpr(e) => match e.node {
ast::ExprClosure(_, ref decl, ref block, _fn_decl_span) =>
- closure(ClosureParts::new(&decl,
- &block,
- e.id,
- e.span,
- e.attrs.as_attr_slice())),
+ closure(ClosureParts::new(&decl, &block, e.id, e.span, &e.attrs)),
_ => bug!("expr FnLikeNode that is not fn-like"),
},
_ => bug!("other FnLikeNode that is not fn-like"),
use middle::cstore::InlinedItem;
use std::iter::repeat;
use syntax::ast::{NodeId, CRATE_NODE_ID};
-use syntax::codemap::Span;
+use syntax_pos::Span;
/// A Visitor that walks over the HIR and collects Nodes into a HIR map
pub struct NodeCollector<'ast> {
self.parent_def = parent;
}
- fn visit_ast_const_integer(&mut self, expr: &'ast Expr) {
+ fn visit_ast_const_integer(&mut self, expr: &Expr) {
// Find the node which will be used after lowering.
if let ExprKind::Paren(ref inner) = expr.node {
return self.visit_ast_const_integer(inner);
}
}
-impl<'ast> visit::Visitor<'ast> for DefCollector<'ast> {
- fn visit_item(&mut self, i: &'ast Item) {
+impl<'ast> visit::Visitor for DefCollector<'ast> {
+ fn visit_item(&mut self, i: &Item) {
debug!("visit_item: {:?}", i);
// Pick the def data. This need not be unique, but the more
});
}
- fn visit_foreign_item(&mut self, foreign_item: &'ast ForeignItem) {
+ fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
let def = self.create_def(foreign_item.id, DefPathData::ValueNs(foreign_item.ident.name));
self.with_parent(def, |this| {
});
}
- fn visit_generics(&mut self, generics: &'ast Generics) {
+ fn visit_generics(&mut self, generics: &Generics) {
for ty_param in generics.ty_params.iter() {
self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.ident.name));
}
visit::walk_generics(self, generics);
}
- fn visit_trait_item(&mut self, ti: &'ast TraitItem) {
+ fn visit_trait_item(&mut self, ti: &TraitItem) {
let def_data = match ti.node {
TraitItemKind::Method(..) | TraitItemKind::Const(..) =>
DefPathData::ValueNs(ti.ident.name),
TraitItemKind::Type(..) => DefPathData::TypeNs(ti.ident.name),
+ TraitItemKind::Macro(..) => DefPathData::MacroDef(ti.ident.name),
};
let def = self.create_def(ti.id, def_data);
});
}
- fn visit_impl_item(&mut self, ii: &'ast ImplItem) {
+ fn visit_impl_item(&mut self, ii: &ImplItem) {
let def_data = match ii.node {
ImplItemKind::Method(..) | ImplItemKind::Const(..) =>
DefPathData::ValueNs(ii.ident.name),
});
}
- fn visit_pat(&mut self, pat: &'ast Pat) {
+ fn visit_pat(&mut self, pat: &Pat) {
let parent_def = self.parent_def;
if let PatKind::Ident(_, id, _) = pat.node {
self.parent_def = parent_def;
}
- fn visit_expr(&mut self, expr: &'ast Expr) {
+ fn visit_expr(&mut self, expr: &Expr) {
let parent_def = self.parent_def;
if let ExprKind::Repeat(_, ref count) = expr.node {
self.parent_def = parent_def;
}
- fn visit_ty(&mut self, ty: &'ast Ty) {
+ fn visit_ty(&mut self, ty: &Ty) {
if let TyKind::FixedLengthVec(_, ref length) = ty.node {
self.visit_ast_const_integer(length);
}
visit::walk_ty(self, ty);
}
- fn visit_lifetime_def(&mut self, def: &'ast LifetimeDef) {
+ fn visit_lifetime_def(&mut self, def: &LifetimeDef) {
self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name));
}
- fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) {
+ fn visit_macro_def(&mut self, macro_def: &MacroDef) {
self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name));
}
}
use syntax::abi::Abi;
use syntax::ast::{self, Name, NodeId, DUMMY_NODE_ID, };
-use syntax::attr::ThinAttributesExt;
-use syntax::codemap::{Span, Spanned};
+use syntax::codemap::Spanned;
use syntax::visit;
+use syntax_pos::Span;
use hir::*;
use hir::fold::Folder;
Some(NodeTraitItem(ref ti)) => Some(&ti.attrs[..]),
Some(NodeImplItem(ref ii)) => Some(&ii.attrs[..]),
Some(NodeVariant(ref v)) => Some(&v.node.attrs[..]),
- Some(NodeExpr(ref e)) => Some(e.attrs.as_attr_slice()),
+ Some(NodeExpr(ref e)) => Some(&*e.attrs),
Some(NodeStmt(ref s)) => Some(s.node.attrs()),
// unit/tuple structs take the attributes straight from
// the struct definition.
use hir::def_id::DefId;
use util::nodemap::{NodeMap, FnvHashSet};
-use syntax::codemap::{self, mk_sp, respan, Span, Spanned, ExpnId};
+use syntax_pos::{mk_sp, Span, ExpnId};
+use syntax::codemap::{self, respan, Spanned};
use syntax::abi::Abi;
-use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, TokenTree, AsmDialect};
+use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect};
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
-use syntax::attr::{ThinAttributes, ThinAttributesExt};
use syntax::parse::token::{keywords, InternedString};
use syntax::ptr::P;
+use syntax::tokenstream::TokenTree;
+use syntax::util::ThinVec;
use std::collections::BTreeMap;
use std::fmt;
match *self {
StmtDecl(ref d, _) => d.node.attrs(),
StmtExpr(ref e, _) |
- StmtSemi(ref e, _) => e.attrs.as_attr_slice(),
+ StmtSemi(ref e, _) => &e.attrs,
}
}
pub init: Option<P<Expr>>,
pub id: NodeId,
pub span: Span,
- pub attrs: ThinAttributes,
+ pub attrs: ThinVec<Attribute>,
}
pub type Decl = Spanned<Decl_>;
impl Decl_ {
pub fn attrs(&self) -> &[Attribute] {
match *self {
- DeclLocal(ref l) => l.attrs.as_attr_slice(),
+ DeclLocal(ref l) => &l.attrs,
DeclItem(_) => &[]
}
}
pub id: NodeId,
pub node: Expr_,
pub span: Span,
- pub attrs: ThinAttributes,
+ pub attrs: ThinVec<Attribute>,
}
impl fmt::Debug for Expr {
use ty::TyCtxt;
use util::nodemap::FnvHashMap;
use syntax::ast;
-use syntax::codemap::{Span, Spanned, DUMMY_SP};
+use syntax::codemap::Spanned;
+use syntax_pos::{Span, DUMMY_SP};
use std::iter::{Enumerate, ExactSizeIterator};
use syntax::abi::Abi;
use syntax::ast;
-use syntax::codemap::{self, CodeMap, BytePos, Spanned};
-use syntax::errors;
+use syntax::codemap::{CodeMap, Spanned};
use syntax::parse::token::{self, keywords, BinOpToken};
use syntax::parse::lexer::comments;
use syntax::print::pp::{self, break_offset, word, space, hardbreak};
use syntax::print::pp::Breaks::{Consistent, Inconsistent};
use syntax::print::pprust::{self as ast_pp, PrintState};
use syntax::ptr::P;
+use syntax_pos::{self, BytePos};
+use errors;
use hir;
use hir::{Crate, PatKind, RegionTyParamBound, SelfKind, TraitTyParamBound, TraitBoundModifier};
self.end() // close the head-box
}
- pub fn bclose_(&mut self, span: codemap::Span, indented: usize) -> io::Result<()> {
+ pub fn bclose_(&mut self, span: syntax_pos::Span, indented: usize) -> io::Result<()> {
self.bclose_maybe_open(span, indented, true)
}
pub fn bclose_maybe_open(&mut self,
- span: codemap::Span,
+ span: syntax_pos::Span,
indented: usize,
close_box: bool)
-> io::Result<()> {
}
Ok(())
}
- pub fn bclose(&mut self, span: codemap::Span) -> io::Result<()> {
+ pub fn bclose(&mut self, span: syntax_pos::Span) -> io::Result<()> {
self.bclose_(span, indent_unit)
}
mut get_span: G)
-> io::Result<()>
where F: FnMut(&mut State, &T) -> io::Result<()>,
- G: FnMut(&T) -> codemap::Span
+ G: FnMut(&T) -> syntax_pos::Span
{
self.rbox(0, b)?;
let len = elts.len();
enum_definition: &hir::EnumDef,
generics: &hir::Generics,
name: ast::Name,
- span: codemap::Span,
+ span: syntax_pos::Span,
visibility: &hir::Visibility)
-> io::Result<()> {
self.head(&visibility_qualified(visibility, "enum"))?;
pub fn print_variants(&mut self,
variants: &[hir::Variant],
- span: codemap::Span)
+ span: syntax_pos::Span)
-> io::Result<()> {
self.bopen()?;
for v in variants {
struct_def: &hir::VariantData,
generics: &hir::Generics,
name: ast::Name,
- span: codemap::Span,
+ span: syntax_pos::Span,
print_finalizer: bool)
-> io::Result<()> {
self.print_name(name)?;
}
pub fn maybe_print_trailing_comment(&mut self,
- span: codemap::Span,
+ span: syntax_pos::Span,
next_pos: Option<BytePos>)
-> io::Result<()> {
let cm = match self.cm {
use traits::PredicateObligations;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
#[derive(Clone)]
pub struct CombineFields<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
use std::char::from_u32;
use std::fmt;
use syntax::ast;
-use syntax::errors::{DiagnosticBuilder, check_old_skool};
-use syntax::codemap::{self, Pos, Span};
use syntax::parse::token;
use syntax::ptr::P;
+use syntax_pos::{self, Pos, Span};
+use errors::{DiagnosticBuilder, check_old_skool};
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn note_and_explain_region(self,
fn name_to_dummy_lifetime(name: ast::Name) -> hir::Lifetime {
hir::Lifetime { id: ast::DUMMY_NODE_ID,
- span: codemap::DUMMY_SP,
+ span: syntax_pos::DUMMY_SP,
name: name }
}
use ty::{self, TyCtxt, Binder, TypeFoldable};
use ty::error::TypeError;
use ty::relate::{Relate, RelateResult, TypeRelation};
-use syntax::codemap::Span;
+use syntax_pos::Span;
use util::nodemap::{FnvHashMap, FnvHashSet};
pub struct HrMatchResult<U> {
use std::cell::{Cell, RefCell, Ref, RefMut};
use std::fmt;
use syntax::ast;
-use syntax::codemap;
-use syntax::codemap::{Span, DUMMY_SP};
-use syntax::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
+use syntax_pos::{self, Span, DUMMY_SP};
use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap};
use self::combine::CombineFields;
-> UnitResult<'tcx>
{
self.probe(|_| {
- let origin = TypeOrigin::Misc(codemap::DUMMY_SP);
+ let origin = TypeOrigin::Misc(syntax_pos::DUMMY_SP);
let trace = TypeTrace::types(origin, true, a, b);
self.sub(true, trace, &a, &b).map(|_| ())
})
pub fn dummy(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> TypeTrace<'tcx> {
TypeTrace {
- origin: TypeOrigin::Misc(codemap::DUMMY_SP),
+ origin: TypeOrigin::Misc(syntax_pos::DUMMY_SP),
values: Types(ExpectedFound {
expected: tcx.types.err,
found: tcx.types.err,
Coercion(a) => a,
EarlyBoundRegion(a, _) => a,
LateBoundRegion(a, _, _) => a,
- BoundRegionInCoherence(_) => codemap::DUMMY_SP,
+ BoundRegionInCoherence(_) => syntax_pos::DUMMY_SP,
UpvarRegion(_, a) => a
}
}
use self::UndoEntry::*;
use hir::def_id::{DefId};
use ty::{self, Ty};
-use syntax::codemap::Span;
+use syntax_pos::Span;
use std::cmp::min;
use std::marker::PhantomData;
extern crate serialize;
extern crate collections;
extern crate rustc_const_math;
+extern crate rustc_errors as errors;
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
+#[macro_use] extern crate syntax_pos;
#[macro_use] #[no_link] extern crate rustc_bitflags;
extern crate serialize as rustc_serialize; // used by deriving
use std::default::Default as StdDefault;
use std::mem;
use syntax::attr::{self, AttrMetaMethods};
-use syntax::codemap::Span;
-use syntax::errors::DiagnosticBuilder;
use syntax::parse::token::InternedString;
use syntax::ast;
-use syntax::attr::ThinAttributesExt;
+use syntax_pos::Span;
+use errors::DiagnosticBuilder;
use hir;
use hir::intravisit as hir_visit;
use hir::intravisit::{IdVisitor, IdVisitingOperation};
}
fn visit_expr(&mut self, e: &hir::Expr) {
- self.with_lint_attrs(e.attrs.as_attr_slice(), |cx| {
+ self.with_lint_attrs(&e.attrs, |cx| {
run_lints!(cx, check_expr, late_passes, e);
hir_visit::walk_expr(cx, e);
})
}
fn visit_local(&mut self, l: &hir::Local) {
- self.with_lint_attrs(l.attrs.as_attr_slice(), |cx| {
+ self.with_lint_attrs(&l.attrs, |cx| {
run_lints!(cx, check_local, late_passes, l);
hir_visit::walk_local(cx, l);
})
}
}
-impl<'a, 'v> ast_visit::Visitor<'v> for EarlyContext<'a> {
+impl<'a> ast_visit::Visitor for EarlyContext<'a> {
fn visit_item(&mut self, it: &ast::Item) {
self.with_lint_attrs(&it.attrs, |cx| {
run_lints!(cx, check_item, early_passes, it);
}
fn visit_expr(&mut self, e: &ast::Expr) {
- self.with_lint_attrs(e.attrs.as_attr_slice(), |cx| {
+ self.with_lint_attrs(&e.attrs, |cx| {
run_lints!(cx, check_expr, early_passes, e);
ast_visit::walk_expr(cx, e);
})
ast_visit::walk_stmt(self, s);
}
- fn visit_fn(&mut self, fk: ast_visit::FnKind<'v>, decl: &'v ast::FnDecl,
- body: &'v ast::Block, span: Span, id: ast::NodeId) {
+ fn visit_fn(&mut self, fk: ast_visit::FnKind, decl: &ast::FnDecl,
+ body: &ast::Block, span: Span, id: ast::NodeId) {
run_lints!(self, check_fn, early_passes, fk, decl, body, span, id);
ast_visit::walk_fn(self, fk, decl, body, span);
run_lints!(self, check_fn_post, early_passes, fk, decl, body, span, id);
}
fn visit_local(&mut self, l: &ast::Local) {
- self.with_lint_attrs(l.attrs.as_attr_slice(), |cx| {
+ self.with_lint_attrs(&l.attrs, |cx| {
run_lints!(cx, check_local, early_passes, l);
ast_visit::walk_local(cx, l);
})
ast_visit::walk_arm(self, a);
}
- fn visit_decl(&mut self, d: &ast::Decl) {
- run_lints!(self, check_decl, early_passes, d);
- ast_visit::walk_decl(self, d);
- }
-
fn visit_expr_post(&mut self, e: &ast::Expr) {
run_lints!(self, check_expr_post, early_passes, e);
}
use std::hash;
use std::ascii::AsciiExt;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use hir::intravisit::FnKind;
use syntax::visit as ast_visit;
use syntax::ast;
fn check_stmt(&mut self, _: &EarlyContext, _: &ast::Stmt) { }
fn check_arm(&mut self, _: &EarlyContext, _: &ast::Arm) { }
fn check_pat(&mut self, _: &EarlyContext, _: &ast::Pat) { }
- fn check_decl(&mut self, _: &EarlyContext, _: &ast::Decl) { }
fn check_expr(&mut self, _: &EarlyContext, _: &ast::Expr) { }
fn check_expr_post(&mut self, _: &EarlyContext, _: &ast::Expr) { }
fn check_ty(&mut self, _: &EarlyContext, _: &ast::Ty) { }
use hir::def::Def;
use ty::{Ty, TyCtxt};
-use syntax::codemap::Span;
+use syntax_pos::Span;
use hir as ast;
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
use std::path::PathBuf;
use syntax::ast;
use syntax::attr;
-use syntax::codemap::Span;
use syntax::ptr::P;
use syntax::parse::token::InternedString;
+use syntax_pos::Span;
use rustc_back::target::Target;
use hir;
use hir::intravisit::{IdVisitor, IdVisitingOperation, Visitor};
use std::collections::HashSet;
use syntax::{ast, codemap};
use syntax::attr;
+use syntax_pos;
// Any local node that may call something in its body block should be
// explored. For example, if it's a live NodeItem that is a
impl<'a, 'tcx, 'v> Visitor<'v> for MarkSymbolVisitor<'a, 'tcx> {
fn visit_variant_data(&mut self, def: &hir::VariantData, _: ast::Name,
- _: &hir::Generics, _: ast::NodeId, _: codemap::Span) {
+ _: &hir::Generics, _: ast::NodeId, _: syntax_pos::Span) {
let has_extern_repr = self.struct_has_extern_repr;
let inherited_pub_visibility = self.inherited_pub_visibility;
let live_fields = def.fields().iter().filter(|f| {
fn warn_dead_code(&mut self,
id: ast::NodeId,
- span: codemap::Span,
+ span: syntax_pos::Span,
name: ast::Name,
node_type: &str) {
let name = name.as_str();
use ty::MethodCall;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use hir;
use hir::intravisit;
use hir::intravisit::{FnKind, Visitor};
use session::{config, Session};
use syntax::ast::NodeId;
use syntax::attr;
-use syntax::codemap::Span;
use syntax::entry::EntryPointType;
+use syntax_pos::Span;
use hir::{Item, ItemFn};
use hir::intravisit::Visitor;
use syntax::ast;
use syntax::ptr::P;
-use syntax::codemap::Span;
+use syntax_pos::Span;
///////////////////////////////////////////////////////////////////////////
// The Delegate trait
use syntax::abi::Abi::RustIntrinsic;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use hir::intravisit::{self, Visitor, FnKind};
use hir;
use std::io;
use std::rc::Rc;
use syntax::ast::{self, NodeId};
-use syntax::codemap::{BytePos, original_sp, Span};
+use syntax::codemap::original_sp;
use syntax::parse::token::keywords;
use syntax::ptr::P;
+use syntax_pos::{BytePos, Span};
use hir::Expr;
use hir;
use hir::pat_util::EnumerateAndAdjustIterator;
use hir;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use std::fmt;
use std::rc::Rc;
use std::collections::hash_map::Entry;
use std::fmt;
use std::mem;
-use syntax::codemap::{self, Span};
+use syntax::codemap;
use syntax::ast::{self, NodeId};
+use syntax_pos::Span;
use hir;
use hir::intravisit::{self, Visitor, FnKind};
use std::fmt;
use std::mem::replace;
use syntax::ast;
-use syntax::codemap::Span;
use syntax::parse::token::keywords;
+use syntax_pos::Span;
use util::nodemap::NodeMap;
use rustc_data_structures::fnv::FnvHashSet;
use ty::{self, TyCtxt};
use middle::privacy::AccessLevels;
use syntax::parse::token::InternedString;
-use syntax::codemap::{Span, DUMMY_SP};
+use syntax_pos::{Span, DUMMY_SP};
use syntax::ast;
use syntax::ast::{NodeId, Attribute};
use syntax::feature_gate::{GateIssue, emit_feature_err, find_lang_feature_accepted_version};
use middle::lang_items;
use syntax::ast;
-use syntax::codemap::Span;
use syntax::parse::token::InternedString;
+use syntax_pos::Span;
use hir::intravisit::Visitor;
use hir::intravisit;
use hir;
use rustc_serialize as serialize;
-#[derive(Clone)]
+#[derive(Clone, Debug)]
pub struct Cache {
predecessors: RefCell<Option<IndexVec<BasicBlock, Vec<BasicBlock>>>>
}
use middle::const_val::ConstVal;
use rustc_const_math::{ConstUsize, ConstInt, ConstMathErr};
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
+use rustc_data_structures::control_flow_graph::dominators::{Dominators, dominators};
+use rustc_data_structures::control_flow_graph::{GraphPredecessors, GraphSuccessors};
+use rustc_data_structures::control_flow_graph::ControlFlowGraph;
use hir::def_id::DefId;
use ty::subst::Substs;
use ty::{self, AdtDef, ClosureSubsts, FnOutput, Region, Ty};
use std::fmt::{self, Debug, Formatter, Write};
use std::{iter, u32};
use std::ops::{Index, IndexMut};
+use std::vec::IntoIter;
use syntax::ast::{self, Name};
-use syntax::codemap::Span;
+use syntax_pos::Span;
use super::cache::Cache;
}
/// Lowered representation of a single function.
-#[derive(Clone, RustcEncodable, RustcDecodable)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Mir<'tcx> {
/// List of basic blocks. References to basic block use a newtyped index type `BasicBlock`
/// that indexes into this vector.
Ref::map(self.predecessors(), |p| &p[bb])
}
+ #[inline]
+ pub fn dominators(&self) -> Dominators<BasicBlock> {
+ dominators(self)
+ }
+
/// Maps locals (Arg's, Var's, Temp's and ReturnPointer, in that order)
/// to their index in the whole list of locals. This is useful if you
/// want to treat all locals the same instead of repeating yourself.
fn item_path_str(def_id: DefId) -> String {
ty::tls::with(|tcx| tcx.item_path_str(def_id))
}
+
+impl<'tcx> ControlFlowGraph for Mir<'tcx> {
+
+ type Node = BasicBlock;
+
+ fn num_nodes(&self) -> usize { self.basic_blocks.len() }
+
+ fn start_node(&self) -> Self::Node { START_BLOCK }
+
+ fn predecessors<'graph>(&'graph self, node: Self::Node)
+ -> <Self as GraphPredecessors<'graph>>::Iter
+ {
+ self.predecessors_for(node).clone().into_iter()
+ }
+ fn successors<'graph>(&'graph self, node: Self::Node)
+ -> <Self as GraphSuccessors<'graph>>::Iter
+ {
+ self.basic_blocks[node].terminator().successors().into_owned().into_iter()
+ }
+}
+
+impl<'a, 'b> GraphPredecessors<'b> for Mir<'a> {
+ type Item = BasicBlock;
+ type Iter = IntoIter<BasicBlock>;
+}
+
+impl<'a, 'b> GraphSuccessors<'b> for Mir<'a> {
+ type Item = BasicBlock;
+ type Iter = IntoIter<BasicBlock>;
+}
use rustc_const_math::ConstUsize;
use rustc_data_structures::tuple_slice::TupleSlice;
use rustc_data_structures::indexed_vec::Idx;
-use syntax::codemap::Span;
+use syntax_pos::Span;
// # The MIR Visitor
//
use syntax::ast::{self, IntTy, UintTy};
use syntax::attr;
use syntax::attr::AttrMetaMethods;
-use syntax::errors::{ColorConfig, Handler};
use syntax::parse;
use syntax::parse::token::InternedString;
use syntax::feature_gate::UnstableFeatures;
+use errors::{ColorConfig, Handler};
+
use getopts;
use std::collections::HashMap;
use std::env;
use middle::cstore::DummyCrateStore;
use session::config::{build_configuration, build_session_options};
use session::build_session;
-
+ use errors;
use std::rc::Rc;
use getopts::{getopts, OptGroup};
use syntax::attr;
use syntax::attr::AttrMetaMethods;
- use syntax::diagnostics;
fn optgroups() -> Vec<OptGroup> {
super::rustc_optgroups().into_iter()
Ok(m) => m,
Err(f) => panic!("test_switch_implies_cfg_test: {}", f)
};
- let registry = diagnostics::registry::Registry::new(&[]);
+ let registry = errors::registry::Registry::new(&[]);
let sessopts = build_session_options(matches);
let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore));
let cfg = build_configuration(&sess);
panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f)
}
};
- let registry = diagnostics::registry::Registry::new(&[]);
+ let registry = errors::registry::Registry::new(&[]);
let sessopts = build_session_options(matches);
let sess = build_session(sessopts, &dep_graph, None, registry,
Rc::new(DummyCrateStore));
let matches = getopts(&[
"-Awarnings".to_string()
], &optgroups()).unwrap();
- let registry = diagnostics::registry::Registry::new(&[]);
+ let registry = errors::registry::Registry::new(&[]);
let sessopts = build_session_options(&matches);
let sess = build_session(sessopts, &dep_graph, None, registry,
Rc::new(DummyCrateStore));
"-Awarnings".to_string(),
"-Dwarnings".to_string()
], &optgroups()).unwrap();
- let registry = diagnostics::registry::Registry::new(&[]);
+ let registry = errors::registry::Registry::new(&[]);
let sessopts = build_session_options(&matches);
let sess = build_session(sessopts, &dep_graph, None, registry,
Rc::new(DummyCrateStore));
let matches = getopts(&[
"-Adead_code".to_string()
], &optgroups()).unwrap();
- let registry = diagnostics::registry::Registry::new(&[]);
+ let registry = errors::registry::Registry::new(&[]);
let sessopts = build_session_options(&matches);
let sess = build_session(sessopts, &dep_graph, None, registry,
Rc::new(DummyCrateStore));
use mir::transform as mir_pass;
use syntax::ast::{NodeId, NodeIdAssigner, Name};
-use syntax::codemap::{Span, MultiSpan};
-use syntax::errors::{self, DiagnosticBuilder};
-use syntax::errors::emitter::{Emitter, BasicEmitter, EmitterWriter};
-use syntax::errors::json::JsonEmitter;
-use syntax::diagnostics;
+use errors::{self, DiagnosticBuilder};
+use errors::emitter::{Emitter, BasicEmitter, EmitterWriter};
+use syntax::json::JsonEmitter;
use syntax::feature_gate;
use syntax::parse;
use syntax::parse::ParseSess;
use syntax::parse::token;
use syntax::{ast, codemap};
use syntax::feature_gate::AttributeType;
+use syntax_pos::{Span, MultiSpan};
use rustc_back::target::Target;
use llvm;
pub fn build_session(sopts: config::Options,
dep_graph: &DepGraph,
local_crate_source_file: Option<PathBuf>,
- registry: diagnostics::registry::Registry,
+ registry: errors::registry::Registry,
cstore: Rc<for<'a> CrateStore<'a>>)
-> Session {
build_session_with_codemap(sopts,
pub fn build_session_with_codemap(sopts: config::Options,
dep_graph: &DepGraph,
local_crate_source_file: Option<PathBuf>,
- registry: diagnostics::registry::Registry,
+ registry: errors::registry::Registry,
cstore: Rc<for<'a> CrateStore<'a>>,
codemap: Rc<codemap::CodeMap>)
-> Session {
let emitter: Box<Emitter> = match sopts.error_format {
config::ErrorOutputType::HumanReadable(color_config) => {
- Box::new(EmitterWriter::stderr(color_config, Some(registry), codemap.clone()))
+ Box::new(EmitterWriter::stderr(color_config,
+ Some(registry),
+ codemap.clone(),
+ errors::snippet::FormatMode::EnvironmentSelected))
}
config::ErrorOutputType::Json => {
Box::new(JsonEmitter::stderr(Some(registry), codemap.clone()))
use ty::subst::TypeSpace;
use ty::{self, Ty, TyCtxt};
use infer::{InferCtxt, TypeOrigin};
-use syntax::codemap::DUMMY_SP;
+use syntax_pos::DUMMY_SP;
#[derive(Copy, Clone)]
struct InferIsLocal(bool);
use std::fmt;
use syntax::ast;
use syntax::attr::{AttributeMethods, AttrMetaMethods};
-use syntax::codemap::Span;
-use syntax::errors::DiagnosticBuilder;
+use syntax_pos::Span;
+use errors::DiagnosticBuilder;
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct TraitErrorKey<'tcx> {
use std::rc::Rc;
use syntax::ast;
-use syntax::codemap::{Span, DUMMY_SP};
+use syntax_pos::{Span, DUMMY_SP};
pub use self::error_reporting::TraitErrorKey;
pub use self::coherence::orphan_check;
use ty::subst::{Subst, Substs};
use traits::{self, ProjectionMode, ObligationCause, Normalized};
use ty::{self, TyCtxt};
-use syntax::codemap::DUMMY_SP;
+use syntax_pos::DUMMY_SP;
pub mod specialization_graph;
use infer::InferCtxt;
use ty::subst::{Subst, Substs};
use ty::{self, Ty, TyCtxt, ToPredicate, ToPolyTraitRef};
-use syntax::codemap::Span;
+use syntax_pos::Span;
use util::common::ErrorReported;
use util::nodemap::FnvHashSet;
use ty::LvaluePreference::{NoPreference};
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use hir;
use std::cell::Cell;
use std::fmt;
- use syntax::codemap;
+ use syntax_pos;
/// Marker types used for the scoped TLS slot.
/// The type context cannot be used directly because the scoped TLS
*const ThreadLocalInterners)>> = Cell::new(None)
}
- fn span_debug(span: codemap::Span, f: &mut fmt::Formatter) -> fmt::Result {
+ fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
with(|tcx| {
write!(f, "{}", tcx.sess.codemap().span_to_string(span))
})
pub fn enter_global<'gcx, F, R>(gcx: GlobalCtxt<'gcx>, f: F) -> R
where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
{
- codemap::SPAN_DEBUG.with(|span_dbg| {
+ syntax_pos::SPAN_DEBUG.with(|span_dbg| {
let original_span_debug = span_dbg.get();
span_dbg.set(span_debug);
let result = enter(&gcx, &gcx.global_interners, f);
use std::fmt;
use syntax::abi;
use syntax::ast::{self, Name};
-use syntax::codemap::Span;
-use syntax::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
+use syntax_pos::Span;
use hir;
use syntax::ast::{FloatTy, IntTy, UintTy};
use syntax::attr;
-use syntax::codemap::DUMMY_SP;
+use syntax_pos::DUMMY_SP;
use std::cmp;
use std::fmt;
use std::vec::IntoIter;
use syntax::ast::{self, CrateNum, Name, NodeId};
use syntax::attr::{self, AttrMetaMethods};
-use syntax::codemap::{DUMMY_SP, Span};
use syntax::parse::token::InternedString;
+use syntax_pos::{DUMMY_SP, Span};
use rustc_const_math::ConstInt;
use std::iter::IntoIterator;
use std::slice::Iter;
use std::vec::{Vec, IntoIter};
-use syntax::codemap::{Span, DUMMY_SP};
+use syntax_pos::{Span, DUMMY_SP};
///////////////////////////////////////////////////////////////////////////
use std::hash::{Hash, SipHasher, Hasher};
use syntax::ast::{self, Name};
use syntax::attr::{self, SignedInt, UnsignedInt};
-use syntax::codemap::Span;
+use syntax_pos::Span;
use hir;
use ty::{self, ToPredicate, Ty, TyCtxt, TypeFoldable};
use std::iter::once;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use util::common::ErrorReported;
/// Returns the set of obligations needed to make `ty` well-formed.
[dependencies]
log = { path = "../liblog" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
graphviz = { path = "../libgraphviz" }
rustc = { path = "../librustc" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_mir = { path = "../librustc_mir" }
+rustc_errors = { path = "../librustc_errors" }
use rustc::middle::region;
use rustc::ty::{self, TyCtxt};
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir;
use std::rc::Rc;
use std::mem;
use std::rc::Rc;
use syntax::ast;
-use syntax::codemap::{Span, DUMMY_SP};
use syntax::attr::AttrMetaMethods;
+use syntax_pos::{Span, DUMMY_SP};
#[derive(PartialEq, Eq, PartialOrd, Ord)]
enum Fragment {
use std::rc::Rc;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir::{self, PatKind};
struct GatherMoveInfo<'tcx> {
use rustc::ty;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
type R = Result<(),()>;
use rustc::ty::{self, TyCtxt};
use syntax::ast;
-use syntax::codemap::Span;
use syntax::ast::NodeId;
+use syntax_pos::Span;
use rustc::hir;
use rustc::hir::Expr;
use rustc::hir::intravisit;
use rustc::middle::mem_categorization::InteriorOffsetKind as Kind;
use rustc::ty;
use syntax::ast;
-use syntax::codemap;
-use syntax::errors::DiagnosticBuilder;
+use syntax_pos;
+use errors::DiagnosticBuilder;
use rustc::hir;
pub struct MoveErrorCollector<'tcx> {
#[derive(Clone)]
pub struct MoveSpanAndPath {
- pub span: codemap::Span,
+ pub span: syntax_pos::Span,
pub name: ast::Name,
}
}
fn note_move_destination(mut err: DiagnosticBuilder,
- move_to_span: codemap::Span,
+ move_to_span: syntax_pos::Span,
pat_name: ast::Name,
is_first_note: bool) -> DiagnosticBuilder {
if is_first_note {
use rustc::middle::mem_categorization as mc;
use rustc::middle::mem_categorization::Categorization;
use rustc::ty;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use borrowck::ToInteriorKind;
use syntax::abi::{Abi};
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::ty::{self, TyCtxt};
use rustc::mir::repr::{self, Mir};
use rustc::middle::lang_items;
use rustc::util::nodemap::FnvHashMap;
use rustc_data_structures::indexed_vec::Idx;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use std::fmt;
use std::u32;
use syntax::ast::{self, MetaItem};
use syntax::attr::AttrMetaMethods;
-use syntax::codemap::{Span, DUMMY_SP};
use syntax::ptr::P;
+use syntax_pos::{Span, DUMMY_SP};
use rustc::hir;
use rustc::hir::intravisit::{FnKind};
use std::rc::Rc;
use syntax::ast;
use syntax::attr::AttrMetaMethods;
-use syntax::codemap::{MultiSpan, Span};
-use syntax::errors::DiagnosticBuilder;
+use syntax_pos::{MultiSpan, Span};
+use errors::DiagnosticBuilder;
use rustc::hir;
use rustc::hir::{FnDecl, Block};
use std::rc::Rc;
use std::usize;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir;
use rustc::hir::intravisit::IdRange;
#![feature(question_mark)]
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
+extern crate syntax_pos;
+extern crate rustc_errors as errors;
// for "clarity", rename the graphviz crate to dot; graphviz within `borrowck`
// refers to the borrowck-specific graphviz adapter traits.
rustc_const_math = { path = "../librustc_const_math" }
syntax = { path = "../libsyntax" }
graphviz = { path = "../libgraphviz" }
+syntax_pos = { path = "../libsyntax_pos" }
\ No newline at end of file
use rustc_back::slice;
use syntax::ast::{self, DUMMY_NODE_ID, NodeId};
-use syntax::codemap::{Span, Spanned, DUMMY_SP};
+use syntax::codemap::Spanned;
+use syntax_pos::{Span, DUMMY_SP};
use rustc::hir::fold::{Folder, noop_fold_pat};
use rustc::hir::print::pat_to_string;
use syntax::ptr::P;
id: 0,
node: hir::ExprLit(P(Spanned { node: node, span: DUMMY_SP })),
span: DUMMY_SP,
- attrs: None,
+ attrs: ast::ThinVec::new(),
})
}
use rustc::hir::{Expr, PatKind};
use rustc::hir;
use rustc::hir::intravisit::FnKind;
-use syntax::codemap::Span;
use syntax::ptr::P;
use syntax::codemap;
use syntax::attr::IntType;
+use syntax_pos::{self, Span};
use std::borrow::Cow;
use std::cmp::Ordering;
let field_pats =
try!(fields.iter()
.map(|field| Ok(codemap::Spanned {
- span: codemap::DUMMY_SP,
+ span: syntax_pos::DUMMY_SP,
node: hir::FieldPat {
name: field.name.node,
pat: try!(const_expr_to_pat(tcx, &field.expr,
extern crate rustc_back;
extern crate rustc_const_math;
extern crate graphviz;
-
+extern crate syntax_pos;
extern crate serialize as rustc_serialize; // used by deriving
// NB: This module needs to be declared first so diagnostics are
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Algorithm citation:
+//! A Simple, Fast Dominance Algorithm.
+//! Keith D. Cooper, Timothy J. Harvey, and Ken Kennedy
+//! Rice Computer Science TS-06-33870
+//! https://www.cs.rice.edu/~keith/EMBED/dom.pdf
+
+use super::ControlFlowGraph;
+use super::iterate::reverse_post_order;
+use super::super::indexed_vec::{IndexVec, Idx};
+
+use std::fmt;
+
+#[cfg(test)]
+mod test;
+
+pub fn dominators<G: ControlFlowGraph>(graph: &G) -> Dominators<G::Node> {
+ let start_node = graph.start_node();
+ let rpo = reverse_post_order(graph, start_node);
+ dominators_given_rpo(graph, &rpo)
+}
+
+pub fn dominators_given_rpo<G: ControlFlowGraph>(graph: &G,
+ rpo: &[G::Node])
+ -> Dominators<G::Node> {
+ let start_node = graph.start_node();
+ assert_eq!(rpo[0], start_node);
+
+ // compute the post order index (rank) for each node
+ let mut post_order_rank: IndexVec<G::Node, usize> = IndexVec::from_elem_n(usize::default(),
+ graph.num_nodes());
+ for (index, node) in rpo.iter().rev().cloned().enumerate() {
+ post_order_rank[node] = index;
+ }
+
+ let mut immediate_dominators: IndexVec<G::Node, Option<G::Node>> =
+ IndexVec::from_elem_n(Option::default(), graph.num_nodes());
+ immediate_dominators[start_node] = Some(start_node);
+
+ let mut changed = true;
+ while changed {
+ changed = false;
+
+ for &node in &rpo[1..] {
+ let mut new_idom = None;
+ for pred in graph.predecessors(node) {
+ if immediate_dominators[pred].is_some() {
+ // (*)
+ // (*) dominators for `pred` have been calculated
+ new_idom = intersect_opt(&post_order_rank,
+ &immediate_dominators,
+ new_idom,
+ Some(pred));
+ }
+ }
+
+ if new_idom != immediate_dominators[node] {
+ immediate_dominators[node] = new_idom;
+ changed = true;
+ }
+ }
+ }
+
+ Dominators {
+ post_order_rank: post_order_rank,
+ immediate_dominators: immediate_dominators,
+ }
+}
+
+fn intersect_opt<Node: Idx>(post_order_rank: &IndexVec<Node, usize>,
+ immediate_dominators: &IndexVec<Node, Option<Node>>,
+ node1: Option<Node>,
+ node2: Option<Node>)
+ -> Option<Node> {
+ match (node1, node2) {
+ (None, None) => None,
+ (Some(n), None) | (None, Some(n)) => Some(n),
+ (Some(n1), Some(n2)) => Some(intersect(post_order_rank, immediate_dominators, n1, n2)),
+ }
+}
+
+fn intersect<Node: Idx>(post_order_rank: &IndexVec<Node, usize>,
+ immediate_dominators: &IndexVec<Node, Option<Node>>,
+ mut node1: Node,
+ mut node2: Node)
+ -> Node {
+ while node1 != node2 {
+ while post_order_rank[node1] < post_order_rank[node2] {
+ node1 = immediate_dominators[node1].unwrap();
+ }
+
+ while post_order_rank[node2] < post_order_rank[node1] {
+ node2 = immediate_dominators[node2].unwrap();
+ }
+ }
+ return node1;
+}
+
+#[derive(Clone, Debug)]
+pub struct Dominators<N: Idx> {
+ post_order_rank: IndexVec<N, usize>,
+ immediate_dominators: IndexVec<N, Option<N>>,
+}
+
+impl<Node: Idx> Dominators<Node> {
+ pub fn is_reachable(&self, node: Node) -> bool {
+ self.immediate_dominators[node].is_some()
+ }
+
+ pub fn immediate_dominator(&self, node: Node) -> Node {
+ assert!(self.is_reachable(node), "node {:?} is not reachable", node);
+ self.immediate_dominators[node].unwrap()
+ }
+
+ pub fn dominators(&self, node: Node) -> Iter<Node> {
+ assert!(self.is_reachable(node), "node {:?} is not reachable", node);
+ Iter {
+ dominators: self,
+ node: Some(node),
+ }
+ }
+
+ pub fn is_dominated_by(&self, node: Node, dom: Node) -> bool {
+ // FIXME -- could be optimized by using post-order-rank
+ self.dominators(node).any(|n| n == dom)
+ }
+
+ pub fn mutual_dominator_node(&self, node1: Node, node2: Node) -> Node {
+ assert!(self.is_reachable(node1),
+ "node {:?} is not reachable",
+ node1);
+ assert!(self.is_reachable(node2),
+ "node {:?} is not reachable",
+ node2);
+ intersect::<Node>(&self.post_order_rank,
+ &self.immediate_dominators,
+ node1,
+ node2)
+ }
+
+ pub fn mutual_dominator<I>(&self, iter: I) -> Option<Node>
+ where I: IntoIterator<Item = Node>
+ {
+ let mut iter = iter.into_iter();
+ iter.next()
+ .map(|dom| iter.fold(dom, |dom, node| self.mutual_dominator_node(dom, node)))
+ }
+
+ pub fn all_immediate_dominators(&self) -> &IndexVec<Node, Option<Node>> {
+ &self.immediate_dominators
+ }
+
+ pub fn dominator_tree(&self) -> DominatorTree<Node> {
+ let elem: Vec<Node> = Vec::new();
+ let mut children: IndexVec<Node, Vec<Node>> =
+ IndexVec::from_elem_n(elem, self.immediate_dominators.len());
+ let mut root = None;
+ for (index, immed_dom) in self.immediate_dominators.iter().enumerate() {
+ let node = Node::new(index);
+ match *immed_dom {
+ None => {
+ // node not reachable
+ }
+ Some(immed_dom) => {
+ if node == immed_dom {
+ root = Some(node);
+ } else {
+ children[immed_dom].push(node);
+ }
+ }
+ }
+ }
+ DominatorTree {
+ root: root.unwrap(),
+ children: children,
+ }
+ }
+}
+
+pub struct Iter<'dom, Node: Idx + 'dom> {
+ dominators: &'dom Dominators<Node>,
+ node: Option<Node>,
+}
+
+impl<'dom, Node: Idx> Iterator for Iter<'dom, Node> {
+ type Item = Node;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(node) = self.node {
+ let dom = self.dominators.immediate_dominator(node);
+ if dom == node {
+ self.node = None; // reached the root
+ } else {
+ self.node = Some(dom);
+ }
+ return Some(node);
+ } else {
+ return None;
+ }
+ }
+}
+
+pub struct DominatorTree<N: Idx> {
+ root: N,
+ children: IndexVec<N, Vec<N>>,
+}
+
+impl<Node: Idx> DominatorTree<Node> {
+ pub fn root(&self) -> Node {
+ self.root
+ }
+
+ pub fn children(&self, node: Node) -> &[Node] {
+ &self.children[node]
+ }
+
+ pub fn iter_children_of(&self, node: Node) -> IterChildrenOf<Node> {
+ IterChildrenOf {
+ tree: self,
+ stack: vec![node],
+ }
+ }
+}
+
+pub struct IterChildrenOf<'iter, Node: Idx + 'iter> {
+ tree: &'iter DominatorTree<Node>,
+ stack: Vec<Node>,
+}
+
+impl<'iter, Node: Idx> Iterator for IterChildrenOf<'iter, Node> {
+ type Item = Node;
+
+ fn next(&mut self) -> Option<Node> {
+ if let Some(node) = self.stack.pop() {
+ self.stack.extend(self.tree.children(node));
+ Some(node)
+ } else {
+ None
+ }
+ }
+}
+
+impl<Node: Idx> fmt::Debug for DominatorTree<Node> {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
+ fmt::Debug::fmt(&DominatorTreeNode {
+ tree: self,
+ node: self.root,
+ },
+ fmt)
+ }
+}
+
+struct DominatorTreeNode<'tree, Node: Idx> {
+ tree: &'tree DominatorTree<Node>,
+ node: Node,
+}
+
+impl<'tree, Node: Idx> fmt::Debug for DominatorTreeNode<'tree, Node> {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
+ let subtrees: Vec<_> = self.tree
+ .children(self.node)
+ .iter()
+ .map(|&child| {
+ DominatorTreeNode {
+ tree: self.tree,
+ node: child,
+ }
+ })
+ .collect();
+ fmt.debug_tuple("")
+ .field(&self.node)
+ .field(&subtrees)
+ .finish()
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::super::test::TestGraph;
+
+use super::*;
+
+#[test]
+fn diamond() {
+ let graph = TestGraph::new(0, &[
+ (0, 1),
+ (0, 2),
+ (1, 3),
+ (2, 3),
+ ]);
+
+ let dominators = dominators(&graph);
+ let immediate_dominators = dominators.all_immediate_dominators();
+ assert_eq!(immediate_dominators[0], Some(0));
+ assert_eq!(immediate_dominators[1], Some(0));
+ assert_eq!(immediate_dominators[2], Some(0));
+ assert_eq!(immediate_dominators[3], Some(0));
+}
+
+#[test]
+fn paper() {
+ // example from the paper:
+ let graph = TestGraph::new(6, &[
+ (6, 5),
+ (6, 4),
+ (5, 1),
+ (4, 2),
+ (4, 3),
+ (1, 2),
+ (2, 3),
+ (3, 2),
+ (2, 1),
+ ]);
+
+ let dominators = dominators(&graph);
+ let immediate_dominators = dominators.all_immediate_dominators();
+ assert_eq!(immediate_dominators[0], None); // <-- note that 0 is not in graph
+ assert_eq!(immediate_dominators[1], Some(6));
+ assert_eq!(immediate_dominators[2], Some(6));
+ assert_eq!(immediate_dominators[3], Some(6));
+ assert_eq!(immediate_dominators[4], Some(6));
+ assert_eq!(immediate_dominators[5], Some(6));
+ assert_eq!(immediate_dominators[6], Some(6));
+}
+
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::ControlFlowGraph;
+use super::super::indexed_vec::IndexVec;
+
+#[cfg(test)]
+mod test;
+
+pub fn post_order_from<G: ControlFlowGraph>(graph: &G, start_node: G::Node) -> Vec<G::Node> {
+ post_order_from_to(graph, start_node, None)
+}
+
+pub fn post_order_from_to<G: ControlFlowGraph>(graph: &G,
+ start_node: G::Node,
+ end_node: Option<G::Node>)
+ -> Vec<G::Node> {
+ let mut visited: IndexVec<G::Node, bool> = IndexVec::from_elem_n(false, graph.num_nodes());
+ let mut result: Vec<G::Node> = Vec::with_capacity(graph.num_nodes());
+ if let Some(end_node) = end_node {
+ visited[end_node] = true;
+ }
+ post_order_walk(graph, start_node, &mut result, &mut visited);
+ result
+}
+
+fn post_order_walk<G: ControlFlowGraph>(graph: &G,
+ node: G::Node,
+ result: &mut Vec<G::Node>,
+ visited: &mut IndexVec<G::Node, bool>) {
+ if visited[node] {
+ return;
+ }
+ visited[node] = true;
+
+ for successor in graph.successors(node) {
+ post_order_walk(graph, successor, result, visited);
+ }
+
+ result.push(node);
+}
+
+pub fn pre_order_walk<G: ControlFlowGraph>(graph: &G,
+ node: G::Node,
+ result: &mut Vec<G::Node>,
+ visited: &mut IndexVec<G::Node, bool>) {
+ if visited[node] {
+ return;
+ }
+ visited[node] = true;
+
+ result.push(node);
+
+ for successor in graph.successors(node) {
+ pre_order_walk(graph, successor, result, visited);
+ }
+}
+
+pub fn reverse_post_order<G: ControlFlowGraph>(graph: &G, start_node: G::Node) -> Vec<G::Node> {
+ let mut vec = post_order_from(graph, start_node);
+ vec.reverse();
+ vec
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::super::test::TestGraph;
+use super::super::transpose::TransposedGraph;
+
+use super::*;
+
+#[test]
+fn diamond_post_order() {
+ let graph = TestGraph::new(0, &[
+ (0, 1),
+ (0, 2),
+ (1, 3),
+ (2, 3),
+ ]);
+
+ let result = post_order_from(&graph, 0);
+ assert_eq!(result, vec![3, 1, 2, 0]);
+}
+
+
+#[test]
+fn rev_post_order_inner_loop() {
+ // 0 -> 1 -> 2 -> 3 -> 5
+ // ^ ^ v |
+ // | 6 <- 4 |
+ // +-----------------+
+ let graph = TestGraph::new(0, &[
+ (0, 1),
+ (1, 2),
+ (2, 3),
+ (3, 5),
+ (3, 1),
+ (2, 4),
+ (4, 6),
+ (6, 2),
+ ]);
+
+ let rev_graph = TransposedGraph::new(&graph);
+
+ let result = post_order_from_to(&rev_graph, 6, Some(2));
+ assert_eq!(result, vec![4, 6]);
+
+ let result = post_order_from_to(&rev_graph, 3, Some(1));
+ assert_eq!(result, vec![4, 6, 2, 3]);
+}
+
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::indexed_vec::Idx;
+pub use std::slice::Iter;
+
+pub mod dominators;
+pub mod iterate;
+pub mod reachable;
+mod reference;
+pub mod transpose;
+
+#[cfg(test)]
+mod test;
+
+pub trait ControlFlowGraph
+ where Self: for<'graph> GraphPredecessors<'graph, Item=<Self as ControlFlowGraph>::Node>,
+ Self: for<'graph> GraphSuccessors<'graph, Item=<Self as ControlFlowGraph>::Node>
+{
+ type Node: Idx;
+
+ fn num_nodes(&self) -> usize;
+ fn start_node(&self) -> Self::Node;
+ fn predecessors<'graph>(&'graph self, node: Self::Node)
+ -> <Self as GraphPredecessors<'graph>>::Iter;
+ fn successors<'graph>(&'graph self, node: Self::Node)
+ -> <Self as GraphSuccessors<'graph>>::Iter;
+}
+
+pub trait GraphPredecessors<'graph> {
+ type Item;
+ type Iter: Iterator<Item=Self::Item>;
+}
+
+pub trait GraphSuccessors<'graph> {
+ type Item;
+ type Iter: Iterator<Item=Self::Item>;
+}
\ No newline at end of file
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Compute reachability using a simple dataflow propagation.
+//! Store end-result in a big NxN bit matrix.
+
+use super::ControlFlowGraph;
+use super::super::bitvec::BitVector;
+use super::iterate::reverse_post_order;
+use super::super::indexed_vec::{IndexVec, Idx};
+
+#[cfg(test)]
+mod test;
+
+pub fn reachable<G: ControlFlowGraph>(graph: &G)
+ -> Reachability<G::Node> {
+ let reverse_post_order = reverse_post_order(graph, graph.start_node());
+ reachable_given_rpo(graph, &reverse_post_order)
+}
+
+pub fn reachable_given_rpo<G: ControlFlowGraph>(graph: &G,
+ reverse_post_order: &[G::Node])
+ -> Reachability<G::Node> {
+ let mut reachability = Reachability::new(graph);
+ let mut changed = true;
+ while changed {
+ changed = false;
+ for &node in reverse_post_order.iter().rev() {
+ // every node can reach itself
+ changed |= reachability.bits[node].insert(node.index());
+
+ // and every pred can reach everything node can reach
+ for pred in graph.predecessors(node) {
+ let nodes_bits = reachability.bits[node].clone();
+ changed |= reachability.bits[pred].insert_all(&nodes_bits);
+ }
+ }
+ }
+ reachability
+}
+
+pub struct Reachability<Node: Idx> {
+ bits: IndexVec<Node, BitVector>,
+}
+
+impl<Node: Idx> Reachability<Node> {
+ fn new<G: ControlFlowGraph>(graph: &G) -> Self {
+ let num_nodes = graph.num_nodes();
+ Reachability {
+ bits: IndexVec::from_elem_n(BitVector::new(num_nodes), num_nodes),
+ }
+ }
+
+ pub fn can_reach(&self, source: Node, target: Node)-> bool {
+ let bit: usize = target.index();
+ self.bits[source].contains(bit)
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::super::test::TestGraph;
+
+use super::*;
+
+#[test]
+fn test1() {
+ // 0 -> 1 -> 2 -> 3
+ // ^ v
+ // 6 <- 4 -> 5
+ let graph = TestGraph::new(0, &[
+ (0, 1),
+ (1, 2),
+ (2, 3),
+ (2, 4),
+ (4, 5),
+ (4, 6),
+ (6, 1),
+ ]);
+ let reachable = reachable(&graph);
+ assert!((0..6).all(|i| reachable.can_reach(0, i)));
+ assert!((1..6).all(|i| reachable.can_reach(1, i)));
+ assert!((1..6).all(|i| reachable.can_reach(2, i)));
+ assert!((1..6).all(|i| reachable.can_reach(4, i)));
+ assert!((1..6).all(|i| reachable.can_reach(6, i)));
+ assert!(reachable.can_reach(3, 3));
+ assert!(!reachable.can_reach(3, 5));
+ assert!(!reachable.can_reach(5, 3));
+}
+
+/// use bigger indices to cross between words in the bit set
+#[test]
+fn test2() {
+ // 30 -> 31 -> 32 -> 33
+ // ^ v
+ // 36 <- 34 -> 35
+ let graph = TestGraph::new(30, &[
+ (30, 31),
+ (31, 32),
+ (32, 33),
+ (32, 34),
+ (34, 35),
+ (34, 36),
+ (36, 31),
+ ]);
+ let reachable = reachable(&graph);
+ assert!((30..36).all(|i| reachable.can_reach(30, i)));
+ assert!((31..36).all(|i| reachable.can_reach(31, i)));
+ assert!((31..36).all(|i| reachable.can_reach(32, i)));
+ assert!((31..36).all(|i| reachable.can_reach(34, i)));
+ assert!((31..36).all(|i| reachable.can_reach(36, i)));
+ assert!(reachable.can_reach(33, 33));
+ assert!(!reachable.can_reach(33, 35));
+ assert!(!reachable.can_reach(35, 33));
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::*;
+
+impl<'graph, G: ControlFlowGraph> ControlFlowGraph for &'graph G {
+ type Node = G::Node;
+
+ fn num_nodes(&self) -> usize {
+ (**self).num_nodes()
+ }
+
+ fn start_node(&self) -> Self::Node {
+ (**self).start_node()
+ }
+
+ fn predecessors<'iter>(&'iter self, node: Self::Node)
+ -> <Self as GraphPredecessors<'iter>>::Iter {
+ (**self).predecessors(node)
+ }
+
+ fn successors<'iter>(&'iter self, node: Self::Node)
+ -> <Self as GraphSuccessors<'iter>>::Iter {
+ (**self).successors(node)
+ }
+}
+
+impl<'iter, 'graph, G: ControlFlowGraph> GraphPredecessors<'iter> for &'graph G {
+ type Item = G::Node;
+ type Iter = <G as GraphPredecessors<'iter>>::Iter;
+}
+
+impl<'iter, 'graph, G: ControlFlowGraph> GraphSuccessors<'iter> for &'graph G {
+ type Item = G::Node;
+ type Iter = <G as GraphSuccessors<'iter>>::Iter;
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::collections::HashMap;
+use std::cmp::max;
+use std::slice;
+use std::iter;
+
+use super::{ControlFlowGraph, GraphPredecessors, GraphSuccessors};
+
+pub struct TestGraph {
+ num_nodes: usize,
+ start_node: usize,
+ successors: HashMap<usize, Vec<usize>>,
+ predecessors: HashMap<usize, Vec<usize>>,
+}
+
+impl TestGraph {
+ pub fn new(start_node: usize, edges: &[(usize, usize)]) -> Self {
+ let mut graph = TestGraph {
+ num_nodes: start_node + 1,
+ start_node: start_node,
+ successors: HashMap::new(),
+ predecessors: HashMap::new()
+ };
+ for &(source, target) in edges {
+ graph.num_nodes = max(graph.num_nodes, source + 1);
+ graph.num_nodes = max(graph.num_nodes, target + 1);
+ graph.successors.entry(source).or_insert(vec![]).push(target);
+ graph.predecessors.entry(target).or_insert(vec![]).push(source);
+ }
+ for node in 0..graph.num_nodes {
+ graph.successors.entry(node).or_insert(vec![]);
+ graph.predecessors.entry(node).or_insert(vec![]);
+ }
+ graph
+ }
+}
+
+impl ControlFlowGraph for TestGraph {
+ type Node = usize;
+
+ fn start_node(&self) -> usize {
+ self.start_node
+ }
+
+ fn num_nodes(&self) -> usize {
+ self.num_nodes
+ }
+
+ fn predecessors<'graph>(&'graph self, node: usize)
+ -> <Self as GraphPredecessors<'graph>>::Iter {
+ self.predecessors[&node].iter().cloned()
+ }
+
+ fn successors<'graph>(&'graph self, node: usize)
+ -> <Self as GraphSuccessors<'graph>>::Iter {
+ self.successors[&node].iter().cloned()
+ }
+}
+
+impl<'graph> GraphPredecessors<'graph> for TestGraph {
+ type Item = usize;
+ type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
+}
+
+impl<'graph> GraphSuccessors<'graph> for TestGraph {
+ type Item = usize;
+ type Iter = iter::Cloned<slice::Iter<'graph, usize>>;
+}
+
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::*;
+
+pub struct TransposedGraph<G: ControlFlowGraph> {
+ base_graph: G,
+ start_node: G::Node,
+}
+
+impl<G: ControlFlowGraph> TransposedGraph<G> {
+ pub fn new(base_graph: G) -> Self {
+ let start_node = base_graph.start_node();
+ Self::with_start(base_graph, start_node)
+ }
+
+ pub fn with_start(base_graph: G, start_node: G::Node) -> Self {
+ TransposedGraph { base_graph: base_graph, start_node: start_node }
+ }
+}
+
+impl<G: ControlFlowGraph> ControlFlowGraph for TransposedGraph<G> {
+ type Node = G::Node;
+
+ fn num_nodes(&self) -> usize {
+ self.base_graph.num_nodes()
+ }
+
+ fn start_node(&self) -> Self::Node {
+ self.start_node
+ }
+
+ fn predecessors<'graph>(&'graph self, node: Self::Node)
+ -> <Self as GraphPredecessors<'graph>>::Iter {
+ self.base_graph.successors(node)
+ }
+
+ fn successors<'graph>(&'graph self, node: Self::Node)
+ -> <Self as GraphSuccessors<'graph>>::Iter {
+ self.base_graph.predecessors(node)
+ }
+}
+
+impl<'graph, G: ControlFlowGraph> GraphPredecessors<'graph> for TransposedGraph<G> {
+ type Item = G::Node;
+ type Iter = <G as GraphSuccessors<'graph>>::Iter;
+}
+
+impl<'graph, G: ControlFlowGraph> GraphSuccessors<'graph> for TransposedGraph<G> {
+ type Item = G::Node;
+ type Iter = <G as GraphPredecessors<'graph>>::Iter;
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::fmt::Debug;
use std::iter::{self, FromIterator};
use std::slice;
use std::marker::PhantomData;
/// Represents some newtyped `usize` wrapper.
///
/// (purpose: avoid mixing indexes for different bitvector domains.)
-pub trait Idx: Copy + 'static {
+pub trait Idx: Copy + 'static + Eq + Debug {
fn new(usize) -> Self;
fn index(self) -> usize;
}
IndexVec { raw: vec![elem; universe.len()], _marker: PhantomData }
}
+ #[inline]
+ pub fn from_elem_n(elem: T, n: usize) -> Self
+ where T: Clone
+ {
+ IndexVec { raw: vec![elem; n], _marker: PhantomData }
+ }
+
#[inline]
pub fn push(&mut self, d: T) -> I {
let idx = I::new(self.len());
pub mod fnv;
pub mod tuple_slice;
pub mod veccell;
+pub mod control_flow_graph;
// See comments in src/librustc/lib.rs
#[doc(hidden)]
rustc_back = { path = "../librustc_back" }
rustc_borrowck = { path = "../librustc_borrowck" }
rustc_const_eval = { path = "../librustc_const_eval" }
+rustc_errors = { path = "../librustc_errors" }
rustc_lint = { path = "../librustc_lint" }
rustc_llvm = { path = "../librustc_llvm" }
rustc_mir = { path = "../librustc_mir" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
syntax_ext = { path = "../libsyntax_ext" }
+syntax_pos = { path = "../libsyntax_pos" }
\ No newline at end of file
}
pub fn assign_node_ids(sess: &Session, krate: ast::Crate) -> ast::Crate {
+ use syntax::ptr::P;
+ use syntax::util::move_map::MoveMap;
+
struct NodeIdAssigner<'a> {
sess: &'a Session,
}
assert_eq!(old_id, ast::DUMMY_NODE_ID);
self.sess.next_node_id()
}
+
+ fn fold_block(&mut self, block: P<ast::Block>) -> P<ast::Block> {
+ block.map(|mut block| {
+ block.id = self.new_id(block.id);
+
+ let stmt = block.stmts.pop();
+ block.stmts = block.stmts.move_flat_map(|s| self.fold_stmt(s).into_iter());
+ if let Some(ast::Stmt { node: ast::StmtKind::Expr(expr), span, .. }) = stmt {
+ let expr = self.fold_expr(expr);
+ block.stmts.push(ast::Stmt {
+ id: expr.id,
+ node: ast::StmtKind::Expr(expr),
+ span: span,
+ });
+ } else if let Some(stmt) = stmt {
+ block.stmts.extend(self.fold_stmt(stmt));
+ }
+
+ block
+ })
+ }
}
let krate = time(sess.time_passes(),
extern crate rustc_back;
extern crate rustc_borrowck;
extern crate rustc_const_eval;
+extern crate rustc_errors as errors;
extern crate rustc_passes;
extern crate rustc_lint;
extern crate rustc_plugin;
#[macro_use]
extern crate syntax;
extern crate syntax_ext;
+extern crate syntax_pos;
use driver::CompileController;
use pretty::{PpMode, UserIdentifiedItem};
use rustc::session::early_error;
-use syntax::{ast, errors, diagnostics};
-use syntax::codemap::{CodeMap, FileLoader, RealFileLoader, MultiSpan};
-use syntax::errors::emitter::Emitter;
+use syntax::{ast, json};
+use syntax::codemap::{CodeMap, FileLoader, RealFileLoader};
use syntax::feature_gate::{GatedCfg, UnstableFeatures};
use syntax::parse::{self, PResult, token};
+use syntax_pos::MultiSpan;
+use errors::emitter::Emitter;
#[cfg(test)]
pub mod test;
fn early_callback(&mut self,
_: &getopts::Matches,
_: &config::Options,
- _: &diagnostics::registry::Registry,
+ _: &errors::registry::Registry,
_: ErrorOutputType)
-> Compilation {
Compilation::Continue
_: &config::Options,
_: &Option<PathBuf>,
_: &Option<PathBuf>,
- _: &diagnostics::registry::Registry)
+ _: &errors::registry::Registry)
-> Option<(Input, Option<PathBuf>)> {
None
}
pub struct RustcDefaultCalls;
fn handle_explain(code: &str,
- descriptions: &diagnostics::registry::Registry,
+ descriptions: &errors::registry::Registry,
output: ErrorOutputType) {
let normalised = if code.starts_with("E") {
code.to_string()
config::ErrorOutputType::HumanReadable(color_config) => {
Box::new(errors::emitter::BasicEmitter::stderr(color_config))
}
- config::ErrorOutputType::Json => Box::new(errors::json::JsonEmitter::basic()),
+ config::ErrorOutputType::Json => Box::new(json::JsonEmitter::basic()),
};
let mut saw_invalid_predicate = false;
fn early_callback(&mut self,
matches: &getopts::Matches,
sopts: &config::Options,
- descriptions: &diagnostics::registry::Registry,
+ descriptions: &errors::registry::Registry,
output: ErrorOutputType)
-> Compilation {
if let Some(ref code) = matches.opt_str("explain") {
sopts: &config::Options,
odir: &Option<PathBuf>,
ofile: &Option<PathBuf>,
- descriptions: &diagnostics::registry::Registry)
+ descriptions: &errors::registry::Registry)
-> Option<(Input, Option<PathBuf>)> {
match matches.free.len() {
0 => {
panic!();
}
-pub fn diagnostics_registry() -> diagnostics::registry::Registry {
- use syntax::diagnostics::registry::Registry;
+pub fn diagnostics_registry() -> errors::registry::Registry {
+ use errors::registry::Registry;
let mut all_errors = Vec::new();
all_errors.extend_from_slice(&rustc::DIAGNOSTICS);
use rustc_mir::graphviz::write_mir_graphviz;
use syntax::ast::{self, BlockCheckMode};
-use syntax::codemap;
use syntax::fold::{self, Folder};
use syntax::print::{pp, pprust};
use syntax::print::pprust::PrintState;
use syntax::ptr::P;
use syntax::util::small_vector::SmallVector;
+use syntax_pos;
use graphviz as dot;
fn fold_block(&mut self, b: P<ast::Block>) -> P<ast::Block> {
fn expr_to_block(rules: ast::BlockCheckMode, e: Option<P<ast::Expr>>) -> P<ast::Block> {
P(ast::Block {
- expr: e,
- stmts: vec![],
+ stmts: e.map(|e| ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: e.span,
+ node: ast::StmtKind::Expr(e),
+ }).into_iter().collect(),
rules: rules,
id: ast::DUMMY_NODE_ID,
- span: codemap::DUMMY_SP,
+ span: syntax_pos::DUMMY_SP,
})
}
let loop_expr = P(ast::Expr {
node: ast::ExprKind::Loop(empty_block, None),
id: ast::DUMMY_NODE_ID,
- span: codemap::DUMMY_SP,
- attrs: None,
+ span: syntax_pos::DUMMY_SP,
+ attrs: ast::ThinVec::new(),
});
expr_to_block(b.rules, Some(loop_expr))
use std::rc::Rc;
use syntax::ast;
use syntax::abi::Abi;
-use syntax::codemap::{CodeMap, DUMMY_SP};
-use syntax::errors;
-use syntax::errors::emitter::{CoreEmitter, Emitter};
-use syntax::errors::{Level, RenderSpan};
+use syntax::codemap::CodeMap;
+use errors;
+use errors::emitter::{CoreEmitter, Emitter};
+use errors::{Level, RenderSpan};
use syntax::parse::token;
use syntax::feature_gate::UnstableFeatures;
+use syntax_pos::DUMMY_SP;
use rustc::hir;
--- /dev/null
+[package]
+authors = ["The Rust Project Developers"]
+name = "rustc_errors"
+version = "0.0.0"
+
+[lib]
+name = "rustc_errors"
+path = "lib.rs"
+crate-type = ["dylib"]
+
+[dependencies]
+log = { path = "../liblog" }
+serialize = { path = "../libserialize" }
+syntax_pos = { path = "../libsyntax_pos" }
\ No newline at end of file
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use self::Destination::*;
+
+use syntax_pos::{COMMAND_LINE_SP, DUMMY_SP, Span, MultiSpan, LineInfo};
+use registry;
+
+use check_old_skool;
+use {Level, RenderSpan, CodeSuggestion, DiagnosticBuilder, CodeMapper};
+use RenderSpan::*;
+use Level::*;
+use snippet::{RenderedLineKind, SnippetData, Style, FormatMode};
+
+use std::{cmp, fmt};
+use std::io::prelude::*;
+use std::io;
+use std::rc::Rc;
+use term;
+
+/// Emitter trait for emitting errors. Do not implement this directly:
+/// implement `CoreEmitter` instead.
+pub trait Emitter {
+ /// Emit a standalone diagnostic message.
+ fn emit(&mut self, span: &MultiSpan, msg: &str, code: Option<&str>, lvl: Level);
+
+ /// Emit a structured diagnostic.
+ fn emit_struct(&mut self, db: &DiagnosticBuilder);
+}
+
+pub trait CoreEmitter {
+ fn emit_message(&mut self,
+ rsp: &RenderSpan,
+ msg: &str,
+ code: Option<&str>,
+ lvl: Level,
+ is_header: bool,
+ show_snippet: bool);
+}
+
+impl<T: CoreEmitter> Emitter for T {
+ fn emit(&mut self,
+ msp: &MultiSpan,
+ msg: &str,
+ code: Option<&str>,
+ lvl: Level) {
+ self.emit_message(&FullSpan(msp.clone()),
+ msg,
+ code,
+ lvl,
+ true,
+ true);
+ }
+
+ fn emit_struct(&mut self, db: &DiagnosticBuilder) {
+ let old_school = check_old_skool();
+ let db_span = FullSpan(db.span.clone());
+ self.emit_message(&FullSpan(db.span.clone()),
+ &db.message,
+ db.code.as_ref().map(|s| &**s),
+ db.level,
+ true,
+ true);
+ for child in &db.children {
+ let render_span = child.render_span
+ .clone()
+ .unwrap_or_else(
+ || FullSpan(child.span.clone()));
+
+ if !old_school {
+ self.emit_message(&render_span,
+ &child.message,
+ None,
+ child.level,
+ false,
+ true);
+ } else {
+ let (render_span, show_snippet) = match render_span.span().primary_span() {
+ None => (db_span.clone(), false),
+ _ => (render_span, true)
+ };
+ self.emit_message(&render_span,
+ &child.message,
+ None,
+ child.level,
+ false,
+ show_snippet);
+ }
+ }
+ }
+}
+
+/// maximum number of lines we will print for each error; arbitrary.
+pub const MAX_HIGHLIGHT_LINES: usize = 6;
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum ColorConfig {
+ Auto,
+ Always,
+ Never,
+}
+
+impl ColorConfig {
+ fn use_color(&self) -> bool {
+ match *self {
+ ColorConfig::Always => true,
+ ColorConfig::Never => false,
+ ColorConfig::Auto => stderr_isatty(),
+ }
+ }
+}
+
+/// A basic emitter for when we don't have access to a codemap or registry. Used
+/// for reporting very early errors, etc.
+pub struct BasicEmitter {
+ dst: Destination,
+}
+
+impl CoreEmitter for BasicEmitter {
+ fn emit_message(&mut self,
+ _rsp: &RenderSpan,
+ msg: &str,
+ code: Option<&str>,
+ lvl: Level,
+ _is_header: bool,
+ _show_snippet: bool) {
+ // we ignore the span as we have no access to a codemap at this point
+ if let Err(e) = print_diagnostic(&mut self.dst, "", lvl, msg, code) {
+ panic!("failed to print diagnostics: {:?}", e);
+ }
+ }
+}
+
+impl BasicEmitter {
+ pub fn stderr(color_config: ColorConfig) -> BasicEmitter {
+ if color_config.use_color() {
+ let dst = Destination::from_stderr();
+ BasicEmitter { dst: dst }
+ } else {
+ BasicEmitter { dst: Raw(Box::new(io::stderr())) }
+ }
+ }
+}
+
+pub struct EmitterWriter {
+ dst: Destination,
+ registry: Option<registry::Registry>,
+ cm: Rc<CodeMapper>,
+
+ /// Is this the first error emitted thus far? If not, we emit a
+ /// `\n` before the top-level errors.
+ first: bool,
+
+ // For now, allow an old-school mode while we transition
+ format_mode: FormatMode
+}
+
+impl CoreEmitter for EmitterWriter {
+ fn emit_message(&mut self,
+ rsp: &RenderSpan,
+ msg: &str,
+ code: Option<&str>,
+ lvl: Level,
+ is_header: bool,
+ show_snippet: bool) {
+ match self.emit_message_(rsp, msg, code, lvl, is_header, show_snippet) {
+ Ok(()) => { }
+ Err(e) => panic!("failed to emit error: {}", e)
+ }
+ }
+}
+
+/// Do not use this for messages that end in `\n` – use `println_maybe_styled` instead. See
+/// `EmitterWriter::print_maybe_styled` for details.
+macro_rules! print_maybe_styled {
+ ($dst: expr, $style: expr, $($arg: tt)*) => {
+ $dst.print_maybe_styled(format_args!($($arg)*), $style, false)
+ }
+}
+
+macro_rules! println_maybe_styled {
+ ($dst: expr, $style: expr, $($arg: tt)*) => {
+ $dst.print_maybe_styled(format_args!($($arg)*), $style, true)
+ }
+}
+
+impl EmitterWriter {
+ pub fn stderr(color_config: ColorConfig,
+ registry: Option<registry::Registry>,
+ code_map: Rc<CodeMapper>,
+ format_mode: FormatMode)
+ -> EmitterWriter {
+ if color_config.use_color() {
+ let dst = Destination::from_stderr();
+ EmitterWriter { dst: dst,
+ registry: registry,
+ cm: code_map,
+ first: true,
+ format_mode: format_mode.clone() }
+ } else {
+ EmitterWriter { dst: Raw(Box::new(io::stderr())),
+ registry: registry,
+ cm: code_map,
+ first: true,
+ format_mode: format_mode.clone() }
+ }
+ }
+
+ pub fn new(dst: Box<Write + Send>,
+ registry: Option<registry::Registry>,
+ code_map: Rc<CodeMapper>,
+ format_mode: FormatMode)
+ -> EmitterWriter {
+ EmitterWriter { dst: Raw(dst),
+ registry: registry,
+ cm: code_map,
+ first: true,
+ format_mode: format_mode.clone() }
+ }
+
+ fn emit_message_(&mut self,
+ rsp: &RenderSpan,
+ msg: &str,
+ code: Option<&str>,
+ lvl: Level,
+ is_header: bool,
+ show_snippet: bool)
+ -> io::Result<()> {
+ let old_school = match self.format_mode {
+ FormatMode::NewErrorFormat => false,
+ FormatMode::OriginalErrorFormat => true,
+ FormatMode::EnvironmentSelected => check_old_skool()
+ };
+
+ if is_header {
+ if self.first {
+ self.first = false;
+ } else {
+ if !old_school {
+ write!(self.dst, "\n")?;
+ }
+ }
+ }
+
+ match code {
+ Some(code) if self.registry.as_ref()
+ .and_then(|registry| registry.find_description(code))
+ .is_some() => {
+ let code_with_explain = String::from("--explain ") + code;
+ if old_school {
+ let loc = match rsp.span().primary_span() {
+ Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(),
+ Some(ps) => self.cm.span_to_string(ps),
+ None => "".to_string()
+ };
+ print_diagnostic(&mut self.dst, &loc, lvl, msg, Some(code))?
+ }
+ else {
+ print_diagnostic(&mut self.dst, "", lvl, msg, Some(&code_with_explain))?
+ }
+ }
+ _ => {
+ if old_school {
+ let loc = match rsp.span().primary_span() {
+ Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(),
+ Some(ps) => self.cm.span_to_string(ps),
+ None => "".to_string()
+ };
+ print_diagnostic(&mut self.dst, &loc, lvl, msg, code)?
+ }
+ else {
+ print_diagnostic(&mut self.dst, "", lvl, msg, code)?
+ }
+ }
+ }
+
+ if !show_snippet {
+ return Ok(());
+ }
+
+ // Watch out for various nasty special spans; don't try to
+ // print any filename or anything for those.
+ match rsp.span().primary_span() {
+ Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => {
+ return Ok(());
+ }
+ _ => { }
+ }
+
+ // Otherwise, print out the snippet etc as needed.
+ match *rsp {
+ FullSpan(ref msp) => {
+ self.highlight_lines(msp, lvl)?;
+ if let Some(primary_span) = msp.primary_span() {
+ self.print_macro_backtrace(primary_span)?;
+ }
+ }
+ Suggestion(ref suggestion) => {
+ self.highlight_suggestion(suggestion)?;
+ if let Some(primary_span) = rsp.span().primary_span() {
+ self.print_macro_backtrace(primary_span)?;
+ }
+ }
+ }
+ if old_school {
+ match code {
+ Some(code) if self.registry.as_ref()
+ .and_then(|registry| registry.find_description(code))
+ .is_some() => {
+ let loc = match rsp.span().primary_span() {
+ Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(),
+ Some(ps) => self.cm.span_to_string(ps),
+ None => "".to_string()
+ };
+ let msg = "run `rustc --explain ".to_string() + &code.to_string() +
+ "` to see a detailed explanation";
+ print_diagnostic(&mut self.dst, &loc, Level::Help, &msg,
+ None)?
+ }
+ _ => ()
+ }
+ }
+ Ok(())
+ }
+
+ fn highlight_suggestion(&mut self, suggestion: &CodeSuggestion) -> io::Result<()>
+ {
+ use std::borrow::Borrow;
+
+ let primary_span = suggestion.msp.primary_span().unwrap();
+ let lines = self.cm.span_to_lines(primary_span).unwrap();
+ assert!(!lines.lines.is_empty());
+
+ let complete = suggestion.splice_lines(self.cm.borrow());
+ let line_count = cmp::min(lines.lines.len(), MAX_HIGHLIGHT_LINES);
+ let display_lines = &lines.lines[..line_count];
+
+ let fm = &*lines.file;
+ // Calculate the widest number to format evenly
+ let max_digits = line_num_max_digits(display_lines.last().unwrap());
+
+ // print the suggestion without any line numbers, but leave
+ // space for them. This helps with lining up with previous
+ // snippets from the actual error being reported.
+ let mut lines = complete.lines();
+ for line in lines.by_ref().take(MAX_HIGHLIGHT_LINES) {
+ write!(&mut self.dst, "{0}:{1:2$} {3}\n",
+ fm.name, "", max_digits, line)?;
+ }
+
+ // if we elided some lines, add an ellipsis
+ if let Some(_) = lines.next() {
+ write!(&mut self.dst, "{0:1$} {0:2$} ...\n",
+ "", fm.name.len(), max_digits)?;
+ }
+
+ Ok(())
+ }
+
+ pub fn highlight_lines(&mut self,
+ msp: &MultiSpan,
+ lvl: Level)
+ -> io::Result<()>
+ {
+ let old_school = match self.format_mode {
+ FormatMode::NewErrorFormat => false,
+ FormatMode::OriginalErrorFormat => true,
+ FormatMode::EnvironmentSelected => check_old_skool()
+ };
+
+ let mut snippet_data = SnippetData::new(self.cm.clone(),
+ msp.primary_span(),
+ self.format_mode.clone());
+ if old_school {
+ let mut output_vec = vec![];
+
+ for span_label in msp.span_labels() {
+ let mut snippet_data = SnippetData::new(self.cm.clone(),
+ Some(span_label.span),
+ self.format_mode.clone());
+
+ snippet_data.push(span_label.span,
+ span_label.is_primary,
+ span_label.label);
+ if span_label.is_primary {
+ output_vec.insert(0, snippet_data);
+ }
+ else {
+ output_vec.push(snippet_data);
+ }
+ }
+
+ for snippet_data in output_vec.iter() {
+ let rendered_lines = snippet_data.render_lines();
+ for rendered_line in &rendered_lines {
+ for styled_string in &rendered_line.text {
+ self.dst.apply_style(lvl, &rendered_line.kind, styled_string.style)?;
+ write!(&mut self.dst, "{}", styled_string.text)?;
+ self.dst.reset_attrs()?;
+ }
+ write!(&mut self.dst, "\n")?;
+ }
+ }
+ }
+ else {
+ for span_label in msp.span_labels() {
+ snippet_data.push(span_label.span,
+ span_label.is_primary,
+ span_label.label);
+ }
+ let rendered_lines = snippet_data.render_lines();
+ for rendered_line in &rendered_lines {
+ for styled_string in &rendered_line.text {
+ self.dst.apply_style(lvl, &rendered_line.kind, styled_string.style)?;
+ write!(&mut self.dst, "{}", styled_string.text)?;
+ self.dst.reset_attrs()?;
+ }
+ write!(&mut self.dst, "\n")?;
+ }
+ }
+ Ok(())
+ }
+
+ fn print_macro_backtrace(&mut self,
+ sp: Span)
+ -> io::Result<()> {
+ for trace in self.cm.macro_backtrace(sp) {
+ let mut diag_string =
+ format!("in this expansion of {}", trace.macro_decl_name);
+ if let Some(def_site_span) = trace.def_site_span {
+ diag_string.push_str(
+ &format!(" (defined in {})",
+ self.cm.span_to_filename(def_site_span)));
+ }
+ let snippet = self.cm.span_to_string(trace.call_site);
+ print_diagnostic(&mut self.dst, &snippet, Note, &diag_string, None)?;
+ }
+ Ok(())
+ }
+}
+
+fn line_num_max_digits(line: &LineInfo) -> usize {
+ let mut max_line_num = line.line_index + 1;
+ let mut digits = 0;
+ while max_line_num > 0 {
+ max_line_num /= 10;
+ digits += 1;
+ }
+ digits
+}
+
+fn print_diagnostic(dst: &mut Destination,
+ topic: &str,
+ lvl: Level,
+ msg: &str,
+ code: Option<&str>)
+ -> io::Result<()> {
+ if !topic.is_empty() {
+ let old_school = check_old_skool();
+ if !old_school {
+ write!(dst, "{}: ", topic)?;
+ }
+ else {
+ write!(dst, "{} ", topic)?;
+ }
+ dst.reset_attrs()?;
+ }
+ dst.start_attr(term::Attr::Bold)?;
+ dst.start_attr(term::Attr::ForegroundColor(lvl.color()))?;
+ write!(dst, "{}", lvl.to_string())?;
+ dst.reset_attrs()?;
+ write!(dst, ": ")?;
+ dst.start_attr(term::Attr::Bold)?;
+ write!(dst, "{}", msg)?;
+
+ if let Some(code) = code {
+ let style = term::Attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
+ print_maybe_styled!(dst, style, " [{}]", code.clone())?;
+ }
+
+ dst.reset_attrs()?;
+ write!(dst, "\n")?;
+ Ok(())
+}
+
+#[cfg(unix)]
+fn stderr_isatty() -> bool {
+ use libc;
+ unsafe { libc::isatty(libc::STDERR_FILENO) != 0 }
+}
+#[cfg(windows)]
+fn stderr_isatty() -> bool {
+ type DWORD = u32;
+ type BOOL = i32;
+ type HANDLE = *mut u8;
+ const STD_ERROR_HANDLE: DWORD = -12i32 as DWORD;
+ extern "system" {
+ fn GetStdHandle(which: DWORD) -> HANDLE;
+ fn GetConsoleMode(hConsoleHandle: HANDLE,
+ lpMode: *mut DWORD) -> BOOL;
+ }
+ unsafe {
+ let handle = GetStdHandle(STD_ERROR_HANDLE);
+ let mut out = 0;
+ GetConsoleMode(handle, &mut out) != 0
+ }
+}
+
+enum Destination {
+ Terminal(Box<term::StderrTerminal>),
+ Raw(Box<Write + Send>),
+}
+
+impl Destination {
+ fn from_stderr() -> Destination {
+ match term::stderr() {
+ Some(t) => Terminal(t),
+ None => Raw(Box::new(io::stderr())),
+ }
+ }
+
+ fn apply_style(&mut self,
+ lvl: Level,
+ _kind: &RenderedLineKind,
+ style: Style)
+ -> io::Result<()> {
+ match style {
+ Style::FileNameStyle |
+ Style::LineAndColumn => {
+ }
+ Style::LineNumber => {
+ self.start_attr(term::Attr::Bold)?;
+ self.start_attr(term::Attr::ForegroundColor(term::color::BRIGHT_BLUE))?;
+ }
+ Style::Quotation => {
+ }
+ Style::OldSkoolNote => {
+ self.start_attr(term::Attr::Bold)?;
+ self.start_attr(term::Attr::ForegroundColor(term::color::BRIGHT_GREEN))?;
+ }
+ Style::OldSkoolNoteText => {
+ self.start_attr(term::Attr::Bold)?;
+ }
+ Style::UnderlinePrimary | Style::LabelPrimary => {
+ self.start_attr(term::Attr::Bold)?;
+ self.start_attr(term::Attr::ForegroundColor(lvl.color()))?;
+ }
+ Style::UnderlineSecondary | Style::LabelSecondary => {
+ self.start_attr(term::Attr::Bold)?;
+ self.start_attr(term::Attr::ForegroundColor(term::color::BRIGHT_BLUE))?;
+ }
+ Style::NoStyle => {
+ }
+ }
+ Ok(())
+ }
+
+ fn start_attr(&mut self, attr: term::Attr) -> io::Result<()> {
+ match *self {
+ Terminal(ref mut t) => { t.attr(attr)?; }
+ Raw(_) => { }
+ }
+ Ok(())
+ }
+
+ fn reset_attrs(&mut self) -> io::Result<()> {
+ match *self {
+ Terminal(ref mut t) => { t.reset()?; }
+ Raw(_) => { }
+ }
+ Ok(())
+ }
+
+ fn print_maybe_styled(&mut self,
+ args: fmt::Arguments,
+ color: term::Attr,
+ print_newline_at_end: bool)
+ -> io::Result<()> {
+ match *self {
+ Terminal(ref mut t) => {
+ t.attr(color)?;
+ // If `msg` ends in a newline, we need to reset the color before
+ // the newline. We're making the assumption that we end up writing
+ // to a `LineBufferedWriter`, which means that emitting the reset
+ // after the newline ends up buffering the reset until we print
+ // another line or exit. Buffering the reset is a problem if we're
+ // sharing the terminal with any other programs (e.g. other rustc
+ // instances via `make -jN`).
+ //
+ // Note that if `msg` contains any internal newlines, this will
+ // result in the `LineBufferedWriter` flushing twice instead of
+ // once, which still leaves the opportunity for interleaved output
+ // to be miscolored. We assume this is rare enough that we don't
+ // have to worry about it.
+ t.write_fmt(args)?;
+ t.reset()?;
+ if print_newline_at_end {
+ t.write_all(b"\n")
+ } else {
+ Ok(())
+ }
+ }
+ Raw(ref mut w) => {
+ w.write_fmt(args)?;
+ if print_newline_at_end {
+ w.write_all(b"\n")
+ } else {
+ Ok(())
+ }
+ }
+ }
+ }
+}
+
+impl Write for Destination {
+ fn write(&mut self, bytes: &[u8]) -> io::Result<usize> {
+ match *self {
+ Terminal(ref mut t) => t.write(bytes),
+ Raw(ref mut w) => w.write(bytes),
+ }
+ }
+ fn flush(&mut self) -> io::Result<()> {
+ match *self {
+ Terminal(ref mut t) => t.flush(),
+ Raw(ref mut w) => w.flush(),
+ }
+ }
+}
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_name = "rustc_errors"]
+#![unstable(feature = "rustc_private", issue = "27812")]
+#![crate_type = "dylib"]
+#![crate_type = "rlib"]
+#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
+ html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
+ html_root_url = "https://doc.rust-lang.org/nightly/")]
+#![cfg_attr(not(stage0), deny(warnings))]
+
+#![feature(custom_attribute)]
+#![allow(unused_attributes)]
+#![feature(rustc_private)]
+#![feature(staged_api)]
+#![feature(question_mark)]
+#![feature(range_contains)]
+#![feature(libc)]
+#![feature(unicode)]
+
+extern crate serialize;
+extern crate term;
+#[macro_use] extern crate log;
+#[macro_use] extern crate libc;
+extern crate rustc_unicode;
+extern crate serialize as rustc_serialize; // used by deriving
+extern crate syntax_pos;
+
+pub use emitter::ColorConfig;
+
+use self::Level::*;
+use self::RenderSpan::*;
+
+use emitter::{Emitter, EmitterWriter};
+
+use std::cell::{RefCell, Cell};
+use std::{error, fmt};
+use std::rc::Rc;
+use std::thread::panicking;
+
+pub mod emitter;
+pub mod snippet;
+pub mod registry;
+
+use syntax_pos::{BytePos, Loc, FileLinesResult, FileName, MultiSpan, Span, NO_EXPANSION };
+use syntax_pos::{MacroBacktrace};
+
+#[derive(Clone)]
+pub enum RenderSpan {
+ /// A FullSpan renders with both with an initial line for the
+ /// message, prefixed by file:linenum, followed by a summary of
+ /// the source code covered by the span.
+ FullSpan(MultiSpan),
+
+ /// A suggestion renders with both with an initial line for the
+ /// message, prefixed by file:linenum, followed by a summary
+ /// of hypothetical source code, where each `String` is spliced
+ /// into the lines in place of the code covered by each span.
+ Suggestion(CodeSuggestion),
+}
+
+#[derive(Clone)]
+pub struct CodeSuggestion {
+ pub msp: MultiSpan,
+ pub substitutes: Vec<String>,
+}
+
+pub trait CodeMapper {
+ fn lookup_char_pos(&self, pos: BytePos) -> Loc;
+ fn span_to_lines(&self, sp: Span) -> FileLinesResult;
+ fn span_to_string(&self, sp: Span) -> String;
+ fn span_to_filename(&self, sp: Span) -> FileName;
+ fn macro_backtrace(&self, span: Span) -> Vec<MacroBacktrace>;
+}
+
+impl RenderSpan {
+ fn span(&self) -> &MultiSpan {
+ match *self {
+ FullSpan(ref msp) |
+ Suggestion(CodeSuggestion { ref msp, .. }) =>
+ msp
+ }
+ }
+}
+
+impl CodeSuggestion {
+ /// Returns the assembled code suggestion.
+ pub fn splice_lines(&self, cm: &CodeMapper) -> String {
+ use syntax_pos::{CharPos, Loc, Pos};
+
+ fn push_trailing(buf: &mut String, line_opt: Option<&str>,
+ lo: &Loc, hi_opt: Option<&Loc>) {
+ let (lo, hi_opt) = (lo.col.to_usize(), hi_opt.map(|hi|hi.col.to_usize()));
+ if let Some(line) = line_opt {
+ if line.len() > lo {
+ buf.push_str(match hi_opt {
+ Some(hi) => &line[lo..hi],
+ None => &line[lo..],
+ });
+ }
+ if let None = hi_opt {
+ buf.push('\n');
+ }
+ }
+ }
+
+ let mut primary_spans = self.msp.primary_spans().to_owned();
+
+ assert_eq!(primary_spans.len(), self.substitutes.len());
+ if primary_spans.is_empty() {
+ return format!("");
+ }
+
+ // Assumption: all spans are in the same file, and all spans
+ // are disjoint. Sort in ascending order.
+ primary_spans.sort_by_key(|sp| sp.lo);
+
+ // Find the bounding span.
+ let lo = primary_spans.iter().map(|sp| sp.lo).min().unwrap();
+ let hi = primary_spans.iter().map(|sp| sp.hi).min().unwrap();
+ let bounding_span = Span { lo: lo, hi: hi, expn_id: NO_EXPANSION };
+ let lines = cm.span_to_lines(bounding_span).unwrap();
+ assert!(!lines.lines.is_empty());
+
+ // To build up the result, we do this for each span:
+ // - push the line segment trailing the previous span
+ // (at the beginning a "phantom" span pointing at the start of the line)
+ // - push lines between the previous and current span (if any)
+ // - if the previous and current span are not on the same line
+ // push the line segment leading up to the current span
+ // - splice in the span substitution
+ //
+ // Finally push the trailing line segment of the last span
+ let fm = &lines.file;
+ let mut prev_hi = cm.lookup_char_pos(bounding_span.lo);
+ prev_hi.col = CharPos::from_usize(0);
+
+ let mut prev_line = fm.get_line(lines.lines[0].line_index);
+ let mut buf = String::new();
+
+ for (sp, substitute) in primary_spans.iter().zip(self.substitutes.iter()) {
+ let cur_lo = cm.lookup_char_pos(sp.lo);
+ if prev_hi.line == cur_lo.line {
+ push_trailing(&mut buf, prev_line, &prev_hi, Some(&cur_lo));
+ } else {
+ push_trailing(&mut buf, prev_line, &prev_hi, None);
+ // push lines between the previous and current span (if any)
+ for idx in prev_hi.line..(cur_lo.line - 1) {
+ if let Some(line) = fm.get_line(idx) {
+ buf.push_str(line);
+ buf.push('\n');
+ }
+ }
+ if let Some(cur_line) = fm.get_line(cur_lo.line - 1) {
+ buf.push_str(&cur_line[.. cur_lo.col.to_usize()]);
+ }
+ }
+ buf.push_str(substitute);
+ prev_hi = cm.lookup_char_pos(sp.hi);
+ prev_line = fm.get_line(prev_hi.line - 1);
+ }
+ push_trailing(&mut buf, prev_line, &prev_hi, None);
+ // remove trailing newline
+ buf.pop();
+ buf
+ }
+}
+
+/// Used as a return value to signify a fatal error occurred. (It is also
+/// used as the argument to panic at the moment, but that will eventually
+/// not be true.)
+#[derive(Copy, Clone, Debug)]
+#[must_use]
+pub struct FatalError;
+
+impl fmt::Display for FatalError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
+ write!(f, "parser fatal error")
+ }
+}
+
+impl error::Error for FatalError {
+ fn description(&self) -> &str {
+ "The parser has encountered a fatal error"
+ }
+}
+
+/// Signifies that the compiler died with an explicit call to `.bug`
+/// or `.span_bug` rather than a failed assertion, etc.
+#[derive(Copy, Clone, Debug)]
+pub struct ExplicitBug;
+
+impl fmt::Display for ExplicitBug {
+ fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
+ write!(f, "parser internal bug")
+ }
+}
+
+impl error::Error for ExplicitBug {
+ fn description(&self) -> &str {
+ "The parser has encountered an internal bug"
+ }
+}
+
+/// Used for emitting structured error messages and other diagnostic information.
+#[must_use]
+#[derive(Clone)]
+pub struct DiagnosticBuilder<'a> {
+ handler: &'a Handler,
+ pub level: Level,
+ pub message: String,
+ pub code: Option<String>,
+ pub span: MultiSpan,
+ pub children: Vec<SubDiagnostic>,
+}
+
+/// For example a note attached to an error.
+#[derive(Clone)]
+pub struct SubDiagnostic {
+ pub level: Level,
+ pub message: String,
+ pub span: MultiSpan,
+ pub render_span: Option<RenderSpan>,
+}
+
+impl<'a> DiagnosticBuilder<'a> {
+ /// Emit the diagnostic.
+ pub fn emit(&mut self) {
+ if self.cancelled() {
+ return;
+ }
+
+ self.handler.emit.borrow_mut().emit_struct(&self);
+ self.cancel();
+ self.handler.panic_if_treat_err_as_bug();
+
+ // if self.is_fatal() {
+ // panic!(FatalError);
+ // }
+ }
+
+ /// Cancel the diagnostic (a structured diagnostic must either be emitted or
+ /// cancelled or it will panic when dropped).
+ /// BEWARE: if this DiagnosticBuilder is an error, then creating it will
+ /// bump the error count on the Handler and cancelling it won't undo that.
+ /// If you want to decrement the error count you should use `Handler::cancel`.
+ pub fn cancel(&mut self) {
+ self.level = Level::Cancelled;
+ }
+
+ pub fn cancelled(&self) -> bool {
+ self.level == Level::Cancelled
+ }
+
+ pub fn is_fatal(&self) -> bool {
+ self.level == Level::Fatal
+ }
+
+ /// Add a span/label to be included in the resulting snippet.
+ /// This is pushed onto the `MultiSpan` that was created when the
+ /// diagnostic was first built. If you don't call this function at
+ /// all, and you just supplied a `Span` to create the diagnostic,
+ /// then the snippet will just include that `Span`, which is
+ /// called the primary span.
+ pub fn span_label(&mut self, span: Span, label: &fmt::Display)
+ -> &mut DiagnosticBuilder<'a> {
+ self.span.push_span_label(span, format!("{}", label));
+ self
+ }
+
+ pub fn note_expected_found(&mut self,
+ label: &fmt::Display,
+ expected: &fmt::Display,
+ found: &fmt::Display)
+ -> &mut DiagnosticBuilder<'a>
+ {
+ // For now, just attach these as notes
+ self.note(&format!("expected {} `{}`", label, expected));
+ self.note(&format!(" found {} `{}`", label, found));
+ self
+ }
+
+ pub fn note(&mut self, msg: &str) -> &mut DiagnosticBuilder<'a> {
+ self.sub(Level::Note, msg, MultiSpan::new(), None);
+ self
+ }
+ pub fn span_note<S: Into<MultiSpan>>(&mut self,
+ sp: S,
+ msg: &str)
+ -> &mut DiagnosticBuilder<'a> {
+ self.sub(Level::Note, msg, sp.into(), None);
+ self
+ }
+ pub fn warn(&mut self, msg: &str) -> &mut DiagnosticBuilder<'a> {
+ self.sub(Level::Warning, msg, MultiSpan::new(), None);
+ self
+ }
+ pub fn span_warn<S: Into<MultiSpan>>(&mut self,
+ sp: S,
+ msg: &str)
+ -> &mut DiagnosticBuilder<'a> {
+ self.sub(Level::Warning, msg, sp.into(), None);
+ self
+ }
+ pub fn help(&mut self , msg: &str) -> &mut DiagnosticBuilder<'a> {
+ self.sub(Level::Help, msg, MultiSpan::new(), None);
+ self
+ }
+ pub fn span_help<S: Into<MultiSpan>>(&mut self,
+ sp: S,
+ msg: &str)
+ -> &mut DiagnosticBuilder<'a> {
+ self.sub(Level::Help, msg, sp.into(), None);
+ self
+ }
+ /// Prints out a message with a suggested edit of the code.
+ ///
+ /// See `diagnostic::RenderSpan::Suggestion` for more information.
+ pub fn span_suggestion<S: Into<MultiSpan>>(&mut self,
+ sp: S,
+ msg: &str,
+ suggestion: String)
+ -> &mut DiagnosticBuilder<'a> {
+ self.sub(Level::Help, msg, MultiSpan::new(), Some(Suggestion(CodeSuggestion {
+ msp: sp.into(),
+ substitutes: vec![suggestion],
+ })));
+ self
+ }
+
+ pub fn set_span<S: Into<MultiSpan>>(&mut self, sp: S) -> &mut Self {
+ self.span = sp.into();
+ self
+ }
+
+ pub fn code(&mut self, s: String) -> &mut Self {
+ self.code = Some(s);
+ self
+ }
+
+ pub fn message(&self) -> &str {
+ &self.message
+ }
+
+ pub fn level(&self) -> Level {
+ self.level
+ }
+
+ /// Convenience function for internal use, clients should use one of the
+ /// struct_* methods on Handler.
+ fn new(handler: &'a Handler,
+ level: Level,
+ message: &str) -> DiagnosticBuilder<'a> {
+ DiagnosticBuilder {
+ handler: handler,
+ level: level,
+ message: message.to_owned(),
+ code: None,
+ span: MultiSpan::new(),
+ children: vec![],
+ }
+ }
+
+ /// Convenience function for internal use, clients should use one of the
+ /// public methods above.
+ fn sub(&mut self,
+ level: Level,
+ message: &str,
+ span: MultiSpan,
+ render_span: Option<RenderSpan>) {
+ let sub = SubDiagnostic {
+ level: level,
+ message: message.to_owned(),
+ span: span,
+ render_span: render_span,
+ };
+ self.children.push(sub);
+ }
+}
+
+impl<'a> fmt::Debug for DiagnosticBuilder<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.message.fmt(f)
+ }
+}
+
+/// Destructor bomb - a DiagnosticBuilder must be either emitted or cancelled or
+/// we emit a bug.
+impl<'a> Drop for DiagnosticBuilder<'a> {
+ fn drop(&mut self) {
+ if !panicking() && !self.cancelled() {
+ self.handler.emit.borrow_mut().emit(&MultiSpan::new(),
+ "Error constructed but not emitted",
+ None,
+ Bug);
+ panic!();
+ }
+ }
+}
+
+/// A handler deals with errors; certain errors
+/// (fatal, bug, unimpl) may cause immediate exit,
+/// others log errors for later reporting.
+pub struct Handler {
+ err_count: Cell<usize>,
+ emit: RefCell<Box<Emitter>>,
+ pub can_emit_warnings: bool,
+ treat_err_as_bug: bool,
+ continue_after_error: Cell<bool>,
+ delayed_span_bug: RefCell<Option<(MultiSpan, String)>>,
+}
+
+impl Handler {
+ pub fn with_tty_emitter(color_config: ColorConfig,
+ registry: Option<registry::Registry>,
+ can_emit_warnings: bool,
+ treat_err_as_bug: bool,
+ cm: Rc<CodeMapper>)
+ -> Handler {
+ let emitter = Box::new(EmitterWriter::stderr(color_config, registry, cm,
+ snippet::FormatMode::EnvironmentSelected));
+ Handler::with_emitter(can_emit_warnings, treat_err_as_bug, emitter)
+ }
+
+ pub fn with_emitter(can_emit_warnings: bool,
+ treat_err_as_bug: bool,
+ e: Box<Emitter>) -> Handler {
+ Handler {
+ err_count: Cell::new(0),
+ emit: RefCell::new(e),
+ can_emit_warnings: can_emit_warnings,
+ treat_err_as_bug: treat_err_as_bug,
+ continue_after_error: Cell::new(true),
+ delayed_span_bug: RefCell::new(None),
+ }
+ }
+
+ pub fn set_continue_after_error(&self, continue_after_error: bool) {
+ self.continue_after_error.set(continue_after_error);
+ }
+
+ pub fn struct_dummy<'a>(&'a self) -> DiagnosticBuilder<'a> {
+ DiagnosticBuilder::new(self, Level::Cancelled, "")
+ }
+
+ pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self,
+ sp: S,
+ msg: &str)
+ -> DiagnosticBuilder<'a> {
+ let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
+ result.set_span(sp);
+ if !self.can_emit_warnings {
+ result.cancel();
+ }
+ result
+ }
+ pub fn struct_span_warn_with_code<'a, S: Into<MultiSpan>>(&'a self,
+ sp: S,
+ msg: &str,
+ code: &str)
+ -> DiagnosticBuilder<'a> {
+ let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
+ result.set_span(sp);
+ result.code(code.to_owned());
+ if !self.can_emit_warnings {
+ result.cancel();
+ }
+ result
+ }
+ pub fn struct_warn<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
+ let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
+ if !self.can_emit_warnings {
+ result.cancel();
+ }
+ result
+ }
+ pub fn struct_span_err<'a, S: Into<MultiSpan>>(&'a self,
+ sp: S,
+ msg: &str)
+ -> DiagnosticBuilder<'a> {
+ self.bump_err_count();
+ let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
+ result.set_span(sp);
+ result
+ }
+ pub fn struct_span_err_with_code<'a, S: Into<MultiSpan>>(&'a self,
+ sp: S,
+ msg: &str,
+ code: &str)
+ -> DiagnosticBuilder<'a> {
+ self.bump_err_count();
+ let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
+ result.set_span(sp);
+ result.code(code.to_owned());
+ result
+ }
+ pub fn struct_err<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
+ self.bump_err_count();
+ DiagnosticBuilder::new(self, Level::Error, msg)
+ }
+ pub fn struct_span_fatal<'a, S: Into<MultiSpan>>(&'a self,
+ sp: S,
+ msg: &str)
+ -> DiagnosticBuilder<'a> {
+ self.bump_err_count();
+ let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg);
+ result.set_span(sp);
+ result
+ }
+ pub fn struct_span_fatal_with_code<'a, S: Into<MultiSpan>>(&'a self,
+ sp: S,
+ msg: &str,
+ code: &str)
+ -> DiagnosticBuilder<'a> {
+ self.bump_err_count();
+ let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg);
+ result.set_span(sp);
+ result.code(code.to_owned());
+ result
+ }
+ pub fn struct_fatal<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
+ self.bump_err_count();
+ DiagnosticBuilder::new(self, Level::Fatal, msg)
+ }
+
+ pub fn cancel(&mut self, err: &mut DiagnosticBuilder) {
+ if err.level == Level::Error || err.level == Level::Fatal {
+ assert!(self.has_errors());
+ self.err_count.set(self.err_count.get() + 1);
+ }
+ err.cancel();
+ }
+
+ fn panic_if_treat_err_as_bug(&self) {
+ if self.treat_err_as_bug {
+ panic!("encountered error with `-Z treat_err_as_bug");
+ }
+ }
+
+ pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str)
+ -> FatalError {
+ self.emit(&sp.into(), msg, Fatal);
+ self.bump_err_count();
+ self.panic_if_treat_err_as_bug();
+ return FatalError;
+ }
+ pub fn span_fatal_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: &str)
+ -> FatalError {
+ self.emit_with_code(&sp.into(), msg, code, Fatal);
+ self.bump_err_count();
+ self.panic_if_treat_err_as_bug();
+ return FatalError;
+ }
+ pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
+ self.emit(&sp.into(), msg, Error);
+ self.bump_err_count();
+ self.panic_if_treat_err_as_bug();
+ }
+ pub fn span_err_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: &str) {
+ self.emit_with_code(&sp.into(), msg, code, Error);
+ self.bump_err_count();
+ self.panic_if_treat_err_as_bug();
+ }
+ pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
+ self.emit(&sp.into(), msg, Warning);
+ }
+ pub fn span_warn_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: &str) {
+ self.emit_with_code(&sp.into(), msg, code, Warning);
+ }
+ pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
+ self.emit(&sp.into(), msg, Bug);
+ panic!(ExplicitBug);
+ }
+ pub fn delay_span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
+ let mut delayed = self.delayed_span_bug.borrow_mut();
+ *delayed = Some((sp.into(), msg.to_string()));
+ }
+ pub fn span_bug_no_panic<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
+ self.emit(&sp.into(), msg, Bug);
+ self.bump_err_count();
+ }
+ pub fn span_note_without_error<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
+ self.emit.borrow_mut().emit(&sp.into(), msg, None, Note);
+ }
+ pub fn span_unimpl<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
+ self.span_bug(sp, &format!("unimplemented {}", msg));
+ }
+ pub fn fatal(&self, msg: &str) -> FatalError {
+ if self.treat_err_as_bug {
+ self.bug(msg);
+ }
+ self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Fatal);
+ self.bump_err_count();
+ FatalError
+ }
+ pub fn err(&self, msg: &str) {
+ if self.treat_err_as_bug {
+ self.bug(msg);
+ }
+ self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Error);
+ self.bump_err_count();
+ }
+ pub fn warn(&self, msg: &str) {
+ self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Warning);
+ }
+ pub fn note_without_error(&self, msg: &str) {
+ self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Note);
+ }
+ pub fn bug(&self, msg: &str) -> ! {
+ self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Bug);
+ panic!(ExplicitBug);
+ }
+ pub fn unimpl(&self, msg: &str) -> ! {
+ self.bug(&format!("unimplemented {}", msg));
+ }
+
+ pub fn bump_err_count(&self) {
+ self.err_count.set(self.err_count.get() + 1);
+ }
+
+ pub fn err_count(&self) -> usize {
+ self.err_count.get()
+ }
+
+ pub fn has_errors(&self) -> bool {
+ self.err_count.get() > 0
+ }
+ pub fn abort_if_errors(&self) {
+ let s;
+ match self.err_count.get() {
+ 0 => {
+ let delayed_bug = self.delayed_span_bug.borrow();
+ match *delayed_bug {
+ Some((ref span, ref errmsg)) => {
+ self.span_bug(span.clone(), errmsg);
+ },
+ _ => {}
+ }
+
+ return;
+ }
+ 1 => s = "aborting due to previous error".to_string(),
+ _ => {
+ s = format!("aborting due to {} previous errors",
+ self.err_count.get());
+ }
+ }
+
+ panic!(self.fatal(&s));
+ }
+ pub fn emit(&self,
+ msp: &MultiSpan,
+ msg: &str,
+ lvl: Level) {
+ if lvl == Warning && !self.can_emit_warnings { return }
+ self.emit.borrow_mut().emit(&msp, msg, None, lvl);
+ if !self.continue_after_error.get() { self.abort_if_errors(); }
+ }
+ pub fn emit_with_code(&self,
+ msp: &MultiSpan,
+ msg: &str,
+ code: &str,
+ lvl: Level) {
+ if lvl == Warning && !self.can_emit_warnings { return }
+ self.emit.borrow_mut().emit(&msp, msg, Some(code), lvl);
+ if !self.continue_after_error.get() { self.abort_if_errors(); }
+ }
+}
+
+
+#[derive(Copy, PartialEq, Clone, Debug)]
+pub enum Level {
+ Bug,
+ Fatal,
+ // An error which while not immediately fatal, should stop the compiler
+ // progressing beyond the current phase.
+ PhaseFatal,
+ Error,
+ Warning,
+ Note,
+ Help,
+ Cancelled,
+}
+
+impl fmt::Display for Level {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.to_str().fmt(f)
+ }
+}
+
+impl Level {
+ pub fn color(self) -> term::color::Color {
+ match self {
+ Bug | Fatal | PhaseFatal | Error => term::color::BRIGHT_RED,
+ Warning => term::color::YELLOW,
+ Note => term::color::BRIGHT_GREEN,
+ Help => term::color::BRIGHT_CYAN,
+ Cancelled => unreachable!(),
+ }
+ }
+
+ pub fn to_str(self) -> &'static str {
+ match self {
+ Bug => "error: internal compiler error",
+ Fatal | PhaseFatal | Error => "error",
+ Warning => "warning",
+ Note => "note",
+ Help => "help",
+ Cancelled => panic!("Shouldn't call on cancelled error"),
+ }
+ }
+}
+
+pub fn expect<T, M>(diag: &Handler, opt: Option<T>, msg: M) -> T where
+ M: FnOnce() -> String,
+{
+ match opt {
+ Some(t) => t,
+ None => diag.bug(&msg()),
+ }
+}
+
+/// True if we should use the old-skool error format style. This is
+/// the default setting until the new errors are deemed stable enough
+/// for general use.
+///
+/// FIXME(#33240)
+#[cfg(not(test))]
+pub fn check_old_skool() -> bool {
+ use std::env;
+ env::var("RUST_NEW_ERROR_FORMAT").is_err()
+}
+
+/// For unit tests, use the new format.
+#[cfg(test)]
+pub fn check_old_skool() -> bool {
+ false
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::collections::HashMap;
+
+#[derive(Clone)]
+pub struct Registry {
+ descriptions: HashMap<&'static str, &'static str>
+}
+
+impl Registry {
+ pub fn new(descriptions: &[(&'static str, &'static str)]) -> Registry {
+ Registry { descriptions: descriptions.iter().cloned().collect() }
+ }
+
+ pub fn find_description(&self, code: &str) -> Option<&'static str> {
+ self.descriptions.get(code).cloned()
+ }
+}
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Code for annotating snippets.
+
+use syntax_pos::{Span, FileMap, CharPos, LineInfo};
+use check_old_skool;
+use CodeMapper;
+use std::cmp;
+use std::rc::Rc;
+use std::mem;
+
+#[derive(Clone)]
+pub enum FormatMode {
+ NewErrorFormat,
+ OriginalErrorFormat,
+ EnvironmentSelected
+}
+
+#[derive(Clone)]
+pub struct SnippetData {
+ codemap: Rc<CodeMapper>,
+ files: Vec<FileInfo>,
+ format_mode: FormatMode,
+}
+
+#[derive(Clone)]
+pub struct FileInfo {
+ file: Rc<FileMap>,
+
+ /// The "primary file", if any, gets a `-->` marker instead of
+ /// `>>>`, and has a line-number/column printed and not just a
+ /// filename. It appears first in the listing. It is known to
+ /// contain at least one primary span, though primary spans (which
+ /// are designated with `^^^`) may also occur in other files.
+ primary_span: Option<Span>,
+
+ lines: Vec<Line>,
+
+ /// The type of error format to render. We keep it here so that
+ /// it's easy to configure for both tests and regular usage
+ format_mode: FormatMode,
+}
+
+#[derive(Clone, Debug)]
+struct Line {
+ line_index: usize,
+ annotations: Vec<Annotation>,
+}
+
+#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
+struct Annotation {
+ /// Start column, 0-based indexing -- counting *characters*, not
+ /// utf-8 bytes. Note that it is important that this field goes
+ /// first, so that when we sort, we sort orderings by start
+ /// column.
+ start_col: usize,
+
+ /// End column within the line (exclusive)
+ end_col: usize,
+
+ /// Is this annotation derived from primary span
+ is_primary: bool,
+
+ /// Is this a large span minimized down to a smaller span
+ is_minimized: bool,
+
+ /// Optional label to display adjacent to the annotation.
+ label: Option<String>,
+}
+
+#[derive(Debug)]
+pub struct RenderedLine {
+ pub text: Vec<StyledString>,
+ pub kind: RenderedLineKind,
+}
+
+#[derive(Debug)]
+pub struct StyledString {
+ pub text: String,
+ pub style: Style,
+}
+
+#[derive(Debug)]
+pub struct StyledBuffer {
+ text: Vec<Vec<char>>,
+ styles: Vec<Vec<Style>>
+}
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub enum Style {
+ FileNameStyle,
+ LineAndColumn,
+ LineNumber,
+ Quotation,
+ UnderlinePrimary,
+ UnderlineSecondary,
+ LabelPrimary,
+ LabelSecondary,
+ OldSkoolNoteText,
+ OldSkoolNote,
+ NoStyle,
+}
+
+#[derive(Debug, Clone)]
+pub enum RenderedLineKind {
+ PrimaryFileName,
+ OtherFileName,
+ SourceText {
+ file: Rc<FileMap>,
+ line_index: usize,
+ },
+ Annotations,
+ Elision,
+}
+
+impl SnippetData {
+ pub fn new(codemap: Rc<CodeMapper>,
+ primary_span: Option<Span>,
+ format_mode: FormatMode) // (*)
+ -> Self {
+ // (*) The primary span indicates the file that must appear
+ // first, and which will have a line number etc in its
+ // name. Outside of tests, this is always `Some`, but for many
+ // tests it's not relevant to test this portion of the logic,
+ // and it's tedious to pick a primary span (read: tedious to
+ // port older tests that predate the existence of a primary
+ // span).
+
+ debug!("SnippetData::new(primary_span={:?})", primary_span);
+
+ let mut data = SnippetData {
+ codemap: codemap.clone(),
+ files: vec![],
+ format_mode: format_mode.clone()
+ };
+ if let Some(primary_span) = primary_span {
+ let lo = codemap.lookup_char_pos(primary_span.lo);
+ data.files.push(
+ FileInfo {
+ file: lo.file,
+ primary_span: Some(primary_span),
+ lines: vec![],
+ format_mode: format_mode.clone(),
+ });
+ }
+ data
+ }
+
+ pub fn push(&mut self, span: Span, is_primary: bool, label: Option<String>) {
+ debug!("SnippetData::push(span={:?}, is_primary={}, label={:?})",
+ span, is_primary, label);
+
+ let file_lines = match self.codemap.span_to_lines(span) {
+ Ok(file_lines) => file_lines,
+ Err(_) => {
+ // ignore unprintable spans completely.
+ return;
+ }
+ };
+
+ self.file(&file_lines.file)
+ .push_lines(&file_lines.lines, is_primary, label);
+ }
+
+ fn file(&mut self, file_map: &Rc<FileMap>) -> &mut FileInfo {
+ let index = self.files.iter().position(|f| f.file.name == file_map.name);
+ if let Some(index) = index {
+ return &mut self.files[index];
+ }
+
+ self.files.push(
+ FileInfo {
+ file: file_map.clone(),
+ lines: vec![],
+ primary_span: None,
+ format_mode: self.format_mode.clone()
+ });
+ self.files.last_mut().unwrap()
+ }
+
+ pub fn render_lines(&self) -> Vec<RenderedLine> {
+ debug!("SnippetData::render_lines()");
+
+ let mut rendered_lines: Vec<_> =
+ self.files.iter()
+ .flat_map(|f| f.render_file_lines(&self.codemap))
+ .collect();
+ prepend_prefixes(&mut rendered_lines, &self.format_mode);
+ trim_lines(&mut rendered_lines);
+ rendered_lines
+ }
+}
+
+pub trait StringSource {
+ fn make_string(self) -> String;
+}
+
+impl StringSource for String {
+ fn make_string(self) -> String {
+ self
+ }
+}
+
+impl StringSource for Vec<char> {
+ fn make_string(self) -> String {
+ self.into_iter().collect()
+ }
+}
+
+impl<S> From<(S, Style, RenderedLineKind)> for RenderedLine
+ where S: StringSource
+{
+ fn from((text, style, kind): (S, Style, RenderedLineKind)) -> Self {
+ RenderedLine {
+ text: vec![StyledString {
+ text: text.make_string(),
+ style: style,
+ }],
+ kind: kind,
+ }
+ }
+}
+
+impl<S1,S2> From<(S1, Style, S2, Style, RenderedLineKind)> for RenderedLine
+ where S1: StringSource, S2: StringSource
+{
+ fn from(tuple: (S1, Style, S2, Style, RenderedLineKind)) -> Self {
+ let (text1, style1, text2, style2, kind) = tuple;
+ RenderedLine {
+ text: vec![
+ StyledString {
+ text: text1.make_string(),
+ style: style1,
+ },
+ StyledString {
+ text: text2.make_string(),
+ style: style2,
+ }
+ ],
+ kind: kind,
+ }
+ }
+}
+
+impl RenderedLine {
+ fn trim_last(&mut self) {
+ if let Some(last_text) = self.text.last_mut() {
+ let len = last_text.text.trim_right().len();
+ last_text.text.truncate(len);
+ }
+ }
+}
+
+impl RenderedLineKind {
+ fn prefix(&self) -> StyledString {
+ match *self {
+ RenderedLineKind::SourceText { file: _, line_index } =>
+ StyledString {
+ text: format!("{}", line_index + 1),
+ style: Style::LineNumber,
+ },
+ RenderedLineKind::Elision =>
+ StyledString {
+ text: String::from("..."),
+ style: Style::LineNumber,
+ },
+ RenderedLineKind::PrimaryFileName |
+ RenderedLineKind::OtherFileName |
+ RenderedLineKind::Annotations =>
+ StyledString {
+ text: String::from(""),
+ style: Style::LineNumber,
+ },
+ }
+ }
+}
+
+impl StyledBuffer {
+ fn new() -> StyledBuffer {
+ StyledBuffer { text: vec![], styles: vec![] }
+ }
+
+ fn render(&self, source_kind: RenderedLineKind) -> Vec<RenderedLine> {
+ let mut output: Vec<RenderedLine> = vec![];
+ let mut styled_vec: Vec<StyledString> = vec![];
+
+ for (row, row_style) in self.text.iter().zip(&self.styles) {
+ let mut current_style = Style::NoStyle;
+ let mut current_text = String::new();
+
+ for (&c, &s) in row.iter().zip(row_style) {
+ if s != current_style {
+ if !current_text.is_empty() {
+ styled_vec.push(StyledString { text: current_text, style: current_style });
+ }
+ current_style = s;
+ current_text = String::new();
+ }
+ current_text.push(c);
+ }
+ if !current_text.is_empty() {
+ styled_vec.push(StyledString { text: current_text, style: current_style });
+ }
+
+ if output.is_empty() {
+ //We know our first output line is source and the rest are highlights and labels
+ output.push(RenderedLine { text: styled_vec, kind: source_kind.clone() });
+ } else {
+ output.push(RenderedLine { text: styled_vec, kind: RenderedLineKind::Annotations });
+ }
+ styled_vec = vec![];
+ }
+
+ output
+ }
+
+ fn putc(&mut self, line: usize, col: usize, chr: char, style: Style) {
+ while line >= self.text.len() {
+ self.text.push(vec![]);
+ self.styles.push(vec![]);
+ }
+
+ if col < self.text[line].len() {
+ self.text[line][col] = chr;
+ self.styles[line][col] = style;
+ } else {
+ let mut i = self.text[line].len();
+ while i < col {
+ let s = match self.text[0].get(i) {
+ Some(&'\t') => '\t',
+ _ => ' '
+ };
+ self.text[line].push(s);
+ self.styles[line].push(Style::NoStyle);
+ i += 1;
+ }
+ self.text[line].push(chr);
+ self.styles[line].push(style);
+ }
+ }
+
+ fn puts(&mut self, line: usize, col: usize, string: &str, style: Style) {
+ let mut n = col;
+ for c in string.chars() {
+ self.putc(line, n, c, style);
+ n += 1;
+ }
+ }
+
+ fn set_style(&mut self, line: usize, col: usize, style: Style) {
+ if self.styles.len() > line && self.styles[line].len() > col {
+ self.styles[line][col] = style;
+ }
+ }
+
+ fn append(&mut self, line: usize, string: &str, style: Style) {
+ if line >= self.text.len() {
+ self.puts(line, 0, string, style);
+ } else {
+ let col = self.text[line].len();
+ self.puts(line, col, string, style);
+ }
+ }
+}
+
+impl FileInfo {
+ fn push_lines(&mut self,
+ lines: &[LineInfo],
+ is_primary: bool,
+ label: Option<String>) {
+ assert!(lines.len() > 0);
+
+ // If a span covers multiple lines, we reduce it to a single
+ // point at the start of the span. This means that instead
+ // of producing output like this:
+ //
+ // ```
+ // --> foo.rs:2:1
+ // 2 |> fn conflicting_items<'grammar>(state: &LR0State<'grammar>)
+ // |> ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ // 3 |> -> Set<LR0Item<'grammar>>
+ // |> ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ // (and so on)
+ // ```
+ //
+ // we produce:
+ //
+ // ```
+ // --> foo.rs:2:1
+ // 2 |> fn conflicting_items<'grammar>(state: &LR0State<'grammar>)
+ // ^
+ // ```
+ //
+ // Basically, although this loses information, multi-line spans just
+ // never look good.
+
+ let (line, start_col, mut end_col, is_minimized) = if lines.len() == 1 {
+ (lines[0].line_index, lines[0].start_col, lines[0].end_col, false)
+ } else {
+ (lines[0].line_index, lines[0].start_col, CharPos(lines[0].start_col.0 + 1), true)
+ };
+
+ // Watch out for "empty spans". If we get a span like 6..6, we
+ // want to just display a `^` at 6, so convert that to
+ // 6..7. This is degenerate input, but it's best to degrade
+ // gracefully -- and the parser likes to suply a span like
+ // that for EOF, in particular.
+ if start_col == end_col {
+ end_col.0 += 1;
+ }
+
+ let index = self.ensure_source_line(line);
+ self.lines[index].push_annotation(start_col,
+ end_col,
+ is_primary,
+ is_minimized,
+ label);
+ }
+
+ /// Ensure that we have a `Line` struct corresponding to
+ /// `line_index` in the file. If we already have some other lines,
+ /// then this will add the intervening lines to ensure that we
+ /// have a complete snippet. (Note that when we finally display,
+ /// some of those lines may be elided.)
+ fn ensure_source_line(&mut self, line_index: usize) -> usize {
+ if self.lines.is_empty() {
+ self.lines.push(Line::new(line_index));
+ return 0;
+ }
+
+ // Find the range of lines we have thus far.
+ let first_line_index = self.lines.first().unwrap().line_index;
+ let last_line_index = self.lines.last().unwrap().line_index;
+ assert!(first_line_index <= last_line_index);
+
+ // If the new line is lower than all the lines we have thus
+ // far, then insert the new line and any intervening lines at
+ // the front. In a silly attempt at micro-optimization, we
+ // don't just call `insert` repeatedly, but instead make a new
+ // (empty) vector, pushing the new lines onto it, and then
+ // appending the old vector.
+ if line_index < first_line_index {
+ let lines = mem::replace(&mut self.lines, vec![]);
+ self.lines.extend(
+ (line_index .. first_line_index)
+ .map(|line| Line::new(line))
+ .chain(lines));
+ return 0;
+ }
+
+ // If the new line comes after the ones we have so far, insert
+ // lines for it.
+ if line_index > last_line_index {
+ self.lines.extend(
+ (last_line_index+1 .. line_index+1)
+ .map(|line| Line::new(line)));
+ return self.lines.len() - 1;
+ }
+
+ // Otherwise it should already exist.
+ return line_index - first_line_index;
+ }
+
+ fn render_file_lines(&self, codemap: &Rc<CodeMapper>) -> Vec<RenderedLine> {
+ let old_school = match self.format_mode {
+ FormatMode::OriginalErrorFormat => true,
+ FormatMode::NewErrorFormat => false,
+ FormatMode::EnvironmentSelected => check_old_skool()
+ };
+
+ // As a first step, we elide any instance of more than one
+ // continuous unannotated line.
+
+ let mut lines_iter = self.lines.iter();
+ let mut output = vec![];
+
+ // First insert the name of the file.
+ if !old_school {
+ match self.primary_span {
+ Some(span) => {
+ let lo = codemap.lookup_char_pos(span.lo);
+ output.push(RenderedLine {
+ text: vec![StyledString {
+ text: lo.file.name.clone(),
+ style: Style::FileNameStyle,
+ }, StyledString {
+ text: format!(":{}:{}", lo.line, lo.col.0 + 1),
+ style: Style::LineAndColumn,
+ }],
+ kind: RenderedLineKind::PrimaryFileName,
+ });
+ output.push(RenderedLine {
+ text: vec![StyledString {
+ text: "".to_string(),
+ style: Style::FileNameStyle,
+ }],
+ kind: RenderedLineKind::Annotations,
+ });
+ }
+ None => {
+ output.push(RenderedLine {
+ text: vec![StyledString {
+ text: self.file.name.clone(),
+ style: Style::FileNameStyle,
+ }],
+ kind: RenderedLineKind::OtherFileName,
+ });
+ output.push(RenderedLine {
+ text: vec![StyledString {
+ text: "".to_string(),
+ style: Style::FileNameStyle,
+ }],
+ kind: RenderedLineKind::Annotations,
+ });
+ }
+ }
+ }
+
+ let mut next_line = lines_iter.next();
+ while next_line.is_some() {
+ // Consume lines with annotations.
+ while let Some(line) = next_line {
+ if line.annotations.is_empty() { break; }
+
+ let mut rendered_lines = self.render_line(line);
+ assert!(!rendered_lines.is_empty());
+ if old_school {
+ match self.primary_span {
+ Some(span) => {
+ let lo = codemap.lookup_char_pos(span.lo);
+ let hi = codemap.lookup_char_pos(span.hi);
+ //Before each secondary line in old skool-mode, print the label
+ //as an old-style note
+ if !line.annotations[0].is_primary {
+ if let Some(ann) = line.annotations[0].label.clone() {
+ output.push(RenderedLine {
+ text: vec![StyledString {
+ text: lo.file.name.clone(),
+ style: Style::FileNameStyle,
+ }, StyledString {
+ text: format!(":{}:{}: {}:{} ", lo.line, lo.col.0 + 1,
+ hi.line, hi.col.0+1),
+ style: Style::LineAndColumn,
+ }, StyledString {
+ text: format!("note: "),
+ style: Style::OldSkoolNote,
+ }, StyledString {
+ text: format!("{}", ann),
+ style: Style::OldSkoolNoteText,
+ }],
+ kind: RenderedLineKind::Annotations,
+ });
+ }
+ }
+ rendered_lines[0].text.insert(0, StyledString {
+ text: format!(":{} ", lo.line),
+ style: Style::LineAndColumn,
+ });
+ rendered_lines[0].text.insert(0, StyledString {
+ text: lo.file.name.clone(),
+ style: Style::FileNameStyle,
+ });
+ let gap_amount =
+ rendered_lines[0].text[0].text.len() +
+ rendered_lines[0].text[1].text.len();
+ assert!(rendered_lines.len() >= 2,
+ "no annotations resulted from: {:?}",
+ line);
+ for i in 1..rendered_lines.len() {
+ rendered_lines[i].text.insert(0, StyledString {
+ text: vec![" "; gap_amount].join(""),
+ style: Style::NoStyle
+ });
+ }
+ }
+ _ =>()
+ }
+ }
+ output.append(&mut rendered_lines);
+ next_line = lines_iter.next();
+ }
+
+ // Emit lines without annotations, but only if they are
+ // followed by a line with an annotation.
+ let unannotated_line = next_line;
+ let mut unannotated_lines = 0;
+ while let Some(line) = next_line {
+ if !line.annotations.is_empty() { break; }
+ unannotated_lines += 1;
+ next_line = lines_iter.next();
+ }
+ if unannotated_lines > 1 {
+ output.push(RenderedLine::from((String::new(),
+ Style::NoStyle,
+ RenderedLineKind::Elision)));
+ } else if let Some(line) = unannotated_line {
+ output.append(&mut self.render_line(line));
+ }
+ }
+
+ output
+ }
+
+ fn render_line(&self, line: &Line) -> Vec<RenderedLine> {
+ let old_school = match self.format_mode {
+ FormatMode::OriginalErrorFormat => true,
+ FormatMode::NewErrorFormat => false,
+ FormatMode::EnvironmentSelected => check_old_skool()
+ };
+
+ let source_string = self.file.get_line(line.line_index)
+ .unwrap_or("");
+ let source_kind = RenderedLineKind::SourceText {
+ file: self.file.clone(),
+ line_index: line.line_index,
+ };
+
+ let mut styled_buffer = StyledBuffer::new();
+
+ // First create the source line we will highlight.
+ styled_buffer.append(0, &source_string, Style::Quotation);
+
+ if line.annotations.is_empty() {
+ return styled_buffer.render(source_kind);
+ }
+
+ // We want to display like this:
+ //
+ // vec.push(vec.pop().unwrap());
+ // --- ^^^ _ previous borrow ends here
+ // | |
+ // | error occurs here
+ // previous borrow of `vec` occurs here
+ //
+ // But there are some weird edge cases to be aware of:
+ //
+ // vec.push(vec.pop().unwrap());
+ // -------- - previous borrow ends here
+ // ||
+ // |this makes no sense
+ // previous borrow of `vec` occurs here
+ //
+ // For this reason, we group the lines into "highlight lines"
+ // and "annotations lines", where the highlight lines have the `~`.
+
+ //let mut highlight_line = Self::whitespace(&source_string);
+
+ // Sort the annotations by (start, end col)
+ let mut annotations = line.annotations.clone();
+ annotations.sort();
+
+ // Next, create the highlight line.
+ for annotation in &annotations {
+ if old_school {
+ for p in annotation.start_col .. annotation.end_col {
+ if p == annotation.start_col {
+ styled_buffer.putc(1, p, '^',
+ if annotation.is_primary {
+ Style::UnderlinePrimary
+ } else {
+ Style::OldSkoolNote
+ });
+ }
+ else {
+ styled_buffer.putc(1, p, '~',
+ if annotation.is_primary {
+ Style::UnderlinePrimary
+ } else {
+ Style::OldSkoolNote
+ });
+ }
+ }
+ }
+ else {
+ for p in annotation.start_col .. annotation.end_col {
+ if annotation.is_primary {
+ styled_buffer.putc(1, p, '^', Style::UnderlinePrimary);
+ if !annotation.is_minimized {
+ styled_buffer.set_style(0, p, Style::UnderlinePrimary);
+ }
+ } else {
+ styled_buffer.putc(1, p, '-', Style::UnderlineSecondary);
+ if !annotation.is_minimized {
+ styled_buffer.set_style(0, p, Style::UnderlineSecondary);
+ }
+ }
+ }
+ }
+ }
+
+ // Now we are going to write labels in. To start, we'll exclude
+ // the annotations with no labels.
+ let (labeled_annotations, unlabeled_annotations): (Vec<_>, _) =
+ annotations.into_iter()
+ .partition(|a| a.label.is_some());
+
+ // If there are no annotations that need text, we're done.
+ if labeled_annotations.is_empty() {
+ return styled_buffer.render(source_kind);
+ }
+ if old_school {
+ return styled_buffer.render(source_kind);
+ }
+
+ // Now add the text labels. We try, when possible, to stick the rightmost
+ // annotation at the end of the highlight line:
+ //
+ // vec.push(vec.pop().unwrap());
+ // --- --- - previous borrow ends here
+ //
+ // But sometimes that's not possible because one of the other
+ // annotations overlaps it. For example, from the test
+ // `span_overlap_label`, we have the following annotations
+ // (written on distinct lines for clarity):
+ //
+ // fn foo(x: u32) {
+ // --------------
+ // -
+ //
+ // In this case, we can't stick the rightmost-most label on
+ // the highlight line, or we would get:
+ //
+ // fn foo(x: u32) {
+ // -------- x_span
+ // |
+ // fn_span
+ //
+ // which is totally weird. Instead we want:
+ //
+ // fn foo(x: u32) {
+ // --------------
+ // | |
+ // | x_span
+ // fn_span
+ //
+ // which is...less weird, at least. In fact, in general, if
+ // the rightmost span overlaps with any other span, we should
+ // use the "hang below" version, so we can at least make it
+ // clear where the span *starts*.
+ let mut labeled_annotations = &labeled_annotations[..];
+ match labeled_annotations.split_last().unwrap() {
+ (last, previous) => {
+ if previous.iter()
+ .chain(&unlabeled_annotations)
+ .all(|a| !overlaps(a, last))
+ {
+ // append the label afterwards; we keep it in a separate
+ // string
+ let highlight_label: String = format!(" {}", last.label.as_ref().unwrap());
+ if last.is_primary {
+ styled_buffer.append(1, &highlight_label, Style::LabelPrimary);
+ } else {
+ styled_buffer.append(1, &highlight_label, Style::LabelSecondary);
+ }
+ labeled_annotations = previous;
+ }
+ }
+ }
+
+ // If that's the last annotation, we're done
+ if labeled_annotations.is_empty() {
+ return styled_buffer.render(source_kind);
+ }
+
+ for (index, annotation) in labeled_annotations.iter().enumerate() {
+ // Leave:
+ // - 1 extra line
+ // - One line for each thing that comes after
+ let comes_after = labeled_annotations.len() - index - 1;
+ let blank_lines = 3 + comes_after;
+
+ // For each blank line, draw a `|` at our column. The
+ // text ought to be long enough for this.
+ for index in 2..blank_lines {
+ if annotation.is_primary {
+ styled_buffer.putc(index, annotation.start_col, '|', Style::UnderlinePrimary);
+ } else {
+ styled_buffer.putc(index, annotation.start_col, '|', Style::UnderlineSecondary);
+ }
+ }
+
+ if annotation.is_primary {
+ styled_buffer.puts(blank_lines, annotation.start_col,
+ annotation.label.as_ref().unwrap(), Style::LabelPrimary);
+ } else {
+ styled_buffer.puts(blank_lines, annotation.start_col,
+ annotation.label.as_ref().unwrap(), Style::LabelSecondary);
+ }
+ }
+
+ styled_buffer.render(source_kind)
+ }
+}
+
+fn prepend_prefixes(rendered_lines: &mut [RenderedLine], format_mode: &FormatMode) {
+ let old_school = match *format_mode {
+ FormatMode::OriginalErrorFormat => true,
+ FormatMode::NewErrorFormat => false,
+ FormatMode::EnvironmentSelected => check_old_skool()
+ };
+ if old_school {
+ return;
+ }
+
+ let prefixes: Vec<_> =
+ rendered_lines.iter()
+ .map(|rl| rl.kind.prefix())
+ .collect();
+
+ // find the max amount of spacing we need; add 1 to
+ // p.text.len() to leave space between the prefix and the
+ // source text
+ let padding_len =
+ prefixes.iter()
+ .map(|p| if p.text.len() == 0 { 0 } else { p.text.len() + 1 })
+ .max()
+ .unwrap_or(0);
+
+ // Ensure we insert at least one character of padding, so that the
+ // `-->` arrows can fit etc.
+ let padding_len = cmp::max(padding_len, 1);
+
+ for (mut prefix, line) in prefixes.into_iter().zip(rendered_lines) {
+ let extra_spaces = (prefix.text.len() .. padding_len).map(|_| ' ');
+ prefix.text.extend(extra_spaces);
+ match line.kind {
+ RenderedLineKind::Elision => {
+ line.text.insert(0, prefix);
+ }
+ RenderedLineKind::PrimaryFileName => {
+ // --> filename
+ // 22 |>
+ // ^
+ // padding_len
+ let dashes = (0..padding_len - 1).map(|_| ' ')
+ .chain(Some('-'))
+ .chain(Some('-'))
+ .chain(Some('>'))
+ .chain(Some(' '));
+ line.text.insert(0, StyledString {text: dashes.collect(),
+ style: Style::LineNumber})
+ }
+ RenderedLineKind::OtherFileName => {
+ // ::: filename
+ // 22 |>
+ // ^
+ // padding_len
+ let dashes = (0..padding_len - 1).map(|_| ' ')
+ .chain(Some(':'))
+ .chain(Some(':'))
+ .chain(Some(':'))
+ .chain(Some(' '));
+ line.text.insert(0, StyledString {text: dashes.collect(),
+ style: Style::LineNumber})
+ }
+ _ => {
+ line.text.insert(0, prefix);
+ line.text.insert(1, StyledString {text: String::from("|> "),
+ style: Style::LineNumber})
+ }
+ }
+ }
+}
+
+fn trim_lines(rendered_lines: &mut [RenderedLine]) {
+ for line in rendered_lines {
+ while !line.text.is_empty() {
+ line.trim_last();
+ if line.text.last().unwrap().text.is_empty() {
+ line.text.pop();
+ } else {
+ break;
+ }
+ }
+ }
+}
+
+impl Line {
+ fn new(line_index: usize) -> Line {
+ Line {
+ line_index: line_index,
+ annotations: vec![]
+ }
+ }
+
+ fn push_annotation(&mut self,
+ start: CharPos,
+ end: CharPos,
+ is_primary: bool,
+ is_minimized: bool,
+ label: Option<String>) {
+ self.annotations.push(Annotation {
+ start_col: start.0,
+ end_col: end.0,
+ is_primary: is_primary,
+ is_minimized: is_minimized,
+ label: label,
+ });
+ }
+}
+
+fn overlaps(a1: &Annotation,
+ a2: &Annotation)
+ -> bool
+{
+ (a2.start_col .. a2.end_col).contains(a1.start_col) ||
+ (a1.start_col .. a1.end_col).contains(a2.start_col)
+}
serialize = { path = "../libserialize" }
log = { path = "../liblog" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
\ No newline at end of file
use std::io::Write;
use syntax::ast;
use syntax::attr::AttrMetaMethods;
-use syntax::codemap::Span;
use syntax::parse::token::InternedString;
+use syntax_pos::Span;
const IF_THIS_CHANGED: &'static str = "rustc_if_this_changed";
const THEN_THIS_WOULD_NEED: &'static str = "rustc_then_this_would_need";
pub use self::SawStmtComponent::*;
use self::SawAbiComponent::*;
use syntax::ast::{self, Name, NodeId};
- use syntax::codemap::Span;
use syntax::parse::token;
+ use syntax_pos::Span;
use rustc::ty::TyCtxt;
use rustc::hir;
use rustc::hir::*;
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
+extern crate syntax_pos;
mod assert_dep_graph;
mod calculate_svh;
rustc_back = { path = "../librustc_back" }
rustc_const_eval = { path = "../librustc_const_eval" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
use syntax::ast;
use syntax::attr::{self, AttrMetaMethods};
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir::{self, PatKind};
use rustc::hir::intravisit::FnKind;
use syntax::{ast};
use syntax::attr::{self, AttrMetaMethods};
-use syntax::codemap::{self, Span};
+use syntax_pos::{self, Span};
use rustc::hir::{self, PatKind};
use rustc::hir::intravisit::FnKind;
if hints.iter().any(|attr| *attr == attr::ReprExtern) &&
self_type_def.dtor_kind().has_drop_flag() {
let drop_impl_span = ctx.tcx.map.def_id_span(drop_impl_did,
- codemap::DUMMY_SP);
+ syntax_pos::DUMMY_SP);
let self_defn_span = ctx.tcx.map.def_id_span(self_type_did,
- codemap::DUMMY_SP);
+ syntax_pos::DUMMY_SP);
ctx.span_lint_note(DROP_WITH_REPR_EXTERN,
drop_impl_span,
"implementing Drop adds hidden state to types, \
extern crate log;
extern crate rustc_back;
extern crate rustc_const_eval;
+extern crate syntax_pos;
pub use rustc::lint as lint;
pub use rustc::middle as middle;
use syntax::ast;
use syntax::abi::Abi;
use syntax::attr;
-use syntax::codemap::{self, Span};
+use syntax_pos::Span;
+use syntax::codemap;
use rustc::hir;
use syntax::ast;
use syntax::attr::{self, AttrMetaMethods};
-use syntax::codemap::Span;
use syntax::feature_gate::{KNOWN_ATTRIBUTES, AttributeType};
use syntax::ptr::P;
+use syntax_pos::Span;
use rustc_back::slice;
use rustc::hir;
fn check_stmt(&mut self, cx: &EarlyContext, s: &ast::Stmt) {
let (value, msg) = match s.node {
- ast::StmtKind::Decl(ref decl, _) => match decl.node {
- ast::DeclKind::Local(ref local) => match local.init {
- Some(ref value) => (value, "assigned value"),
- None => return
- },
- _ => return
+ ast::StmtKind::Local(ref local) => match local.init {
+ Some(ref value) => (value, "assigned value"),
+ None => return
},
_ => return
};
rustc_back = { path = "../librustc_back" }
rustc_bitflags = { path = "../librustc_bitflags" }
rustc_const_math = { path = "../librustc_const_math" }
+rustc_errors = { path = "../librustc_errors" }
rustc_llvm = { path = "../librustc_llvm" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
\ No newline at end of file
use rustc::ty::subst;
use rustc::ty::{self, Ty, TyCtxt};
-use syntax::{ast, codemap};
+use syntax::ast;
use syntax::ast::NodeIdAssigner;
use syntax::ptr::P;
+use syntax_pos;
use std::cell::Cell;
use std::io::SeekFrom;
fn new_def_id(&self, def_id: DefId) -> DefId {
self.tr_def_id(def_id)
}
- fn new_span(&self, span: codemap::Span) -> codemap::Span {
+ fn new_span(&self, span: syntax_pos::Span) -> syntax_pos::Span {
self.tr_span(span)
}
}
/// Translates a `Span` from an extern crate to the corresponding `Span`
/// within the local crate's codemap.
- pub fn tr_span(&self, span: codemap::Span) -> codemap::Span {
+ pub fn tr_span(&self, span: syntax_pos::Span) -> syntax_pos::Span {
decoder::translate_span(self.cdata,
self.tcx.sess.codemap(),
&self.last_filemap_index,
}
}
-impl tr for codemap::Span {
- fn tr(&self, dcx: &DecodeContext) -> codemap::Span {
+impl tr for syntax_pos::Span {
+ fn tr(&self, dcx: &DecodeContext) -> syntax_pos::Span {
dcx.tr_span(*self)
}
}
#[cfg(test)]
trait FakeExtCtxt {
- fn call_site(&self) -> codemap::Span;
+ fn call_site(&self) -> syntax_pos::Span;
fn cfg(&self) -> ast::CrateConfig;
fn ident_of(&self, st: &str) -> ast::Ident;
fn name_of(&self, st: &str) -> ast::Name;
#[cfg(test)]
impl FakeExtCtxt for parse::ParseSess {
- fn call_site(&self) -> codemap::Span {
- codemap::Span {
- lo: codemap::BytePos(0),
- hi: codemap::BytePos(0),
- expn_id: codemap::NO_EXPANSION,
+ fn call_site(&self) -> syntax_pos::Span {
+ syntax_pos::Span {
+ lo: syntax_pos::BytePos(0),
+ hi: syntax_pos::BytePos(0),
+ expn_id: syntax_pos::NO_EXPANSION,
}
}
fn cfg(&self) -> ast::CrateConfig { Vec::new() }
use syntax::ast;
use syntax::abi::Abi;
-use syntax::codemap::{self, Span, mk_sp, Pos};
+use syntax::codemap;
use syntax::parse;
use syntax::attr;
use syntax::attr::AttrMetaMethods;
use syntax::parse::token::InternedString;
use syntax::visit;
+use syntax_pos::{self, Span, mk_sp, Pos};
use log;
struct LocalCrateReader<'a> {
local_crate_name: String,
}
-impl<'a, 'ast> visit::Visitor<'ast> for LocalCrateReader<'a> {
- fn visit_item(&mut self, a: &'ast ast::Item) {
+impl<'a> visit::Visitor for LocalCrateReader<'a> {
+ fn visit_item(&mut self, a: &ast::Item) {
self.process_item(a);
visit::walk_item(self, a);
}
info!("panic runtime not found -- loading {}", name);
let (cnum, data, _) = self.resolve_crate(&None, name, name, None,
- codemap::DUMMY_SP,
+ syntax_pos::DUMMY_SP,
PathKind::Crate, false);
// Sanity check the loaded crate to ensure it is indeed a panic runtime
&self.sess.target.target.options.exe_allocation_crate
};
let (cnum, data, _) = self.resolve_crate(&None, name, name, None,
- codemap::DUMMY_SP,
+ syntax_pos::DUMMY_SP,
PathKind::Crate, false);
// Sanity check the crate we loaded to ensure that it is indeed an
None => {
// We can't reuse an existing FileMap, so allocate a new one
// containing the information we need.
- let codemap::FileMap {
+ let syntax_pos::FileMap {
name,
abs_path,
start_pos,
return imported_filemaps;
- fn are_equal_modulo_startpos(fm1: &codemap::FileMap,
- fm2: &codemap::FileMap)
+ fn are_equal_modulo_startpos(fm1: &syntax_pos::FileMap,
+ fm2: &syntax_pos::FileMap)
-> bool {
if fm1.name != fm2.name {
return false;
use syntax::attr;
use syntax::codemap;
use syntax::parse::token::IdentInterner;
+use syntax_pos;
pub use middle::cstore::{NativeLibraryKind, LinkagePreference};
pub use middle::cstore::{NativeStatic, NativeFramework, NativeUnknown};
MetadataArchive(loader::ArchiveMetadata),
}
-/// Holds information about a codemap::FileMap imported from another crate.
+/// Holds information about a syntax_pos::FileMap imported from another crate.
/// See creader::import_codemap() for more information.
pub struct ImportedFileMap {
/// This FileMap's byte-offset within the codemap of its original crate
- pub original_start_pos: codemap::BytePos,
+ pub original_start_pos: syntax_pos::BytePos,
/// The end of this FileMap within the codemap of its original crate
- pub original_end_pos: codemap::BytePos,
+ pub original_end_pos: syntax_pos::BytePos,
/// The imported FileMap's representation within the local codemap
- pub translated_filemap: Rc<codemap::FileMap>
+ pub translated_filemap: Rc<syntax_pos::FileMap>
}
pub struct crate_metadata {
use syntax::parse::token::{self, IdentInterner};
use syntax::ast;
use syntax::abi::Abi;
-use syntax::codemap::{self, Span, BytePos, NO_EXPANSION};
+use syntax::codemap;
use syntax::print::pprust;
use syntax::ptr::P;
-
+use syntax_pos::{self, Span, BytePos, NO_EXPANSION};
pub type Cmd<'a> = &'a crate_metadata;
value: meta_item,
is_sugared_doc: is_sugared_doc,
},
- span: codemap::DUMMY_SP
+ span: syntax_pos::DUMMY_SP
}
}).collect()
},
pub fn translate_span(cdata: Cmd,
codemap: &codemap::CodeMap,
last_filemap_index_hint: &Cell<usize>,
- span: codemap::Span)
- -> codemap::Span {
+ span: syntax_pos::Span)
+ -> syntax_pos::Span {
let span = if span.lo > span.hi {
// Currently macro expansion sometimes produces invalid Span values
// where lo > hi. In order not to crash the compiler when trying to
// least some of the time).
// This workaround is only necessary as long as macro expansion is
// not fixed. FIXME(#23480)
- codemap::mk_sp(span.lo, span.lo)
+ syntax_pos::mk_sp(span.lo, span.lo)
} else {
span
};
let hi = (span.hi - filemap.original_start_pos) +
filemap.translated_filemap.start_pos;
- codemap::mk_sp(lo, hi)
+ syntax_pos::mk_sp(lo, hi)
}
pub fn each_inherent_implementation_for_type<F>(cdata: Cmd,
item_family(impl_doc) == Family::DefaultImpl
}
-pub fn get_imported_filemaps(metadata: &[u8]) -> Vec<codemap::FileMap> {
+pub fn get_imported_filemaps(metadata: &[u8]) -> Vec<syntax_pos::FileMap> {
let crate_doc = rbml::Doc::new(metadata);
let cm_doc = reader::get_doc(crate_doc, tag_codemap);
use std::u32;
use syntax::abi::Abi;
use syntax::ast::{self, NodeId, Name, CRATE_NODE_ID, CrateNum};
-use syntax::codemap::BytePos;
use syntax::attr;
-use syntax::errors::Handler;
+use errors::Handler;
use syntax;
+use syntax_pos::BytePos;
use rbml::writer::Encoder;
use rustc::hir::{self, PatKind};
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
#[macro_use] #[no_link] extern crate rustc_bitflags;
-
+extern crate syntax_pos;
extern crate flate;
extern crate rbml;
extern crate serialize as rustc_serialize; // used by deriving
+extern crate rustc_errors as errors;
#[macro_use]
extern crate rustc;
use rustc_llvm as llvm;
use rustc_llvm::{False, ObjectFile, mk_section_iter};
use rustc_llvm::archive_ro::ArchiveRO;
-use syntax::codemap::Span;
-use syntax::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
+use syntax_pos::Span;
use rustc_back::target::Target;
use std::cmp;
use rustc::session::Session;
use std::collections::{HashSet, HashMap};
-use syntax::codemap::Span;
use syntax::parse::token;
use syntax::ast;
use syntax::attr;
use syntax::attr::AttrMetaMethods;
use syntax::ext;
+use syntax_pos::Span;
pub struct MacroLoader<'a> {
sess: &'a Session,
use syntax::abi::Abi;
use syntax::ast;
-use syntax::errors::Handler;
+use errors::Handler;
use rbml::leb128;
use encoder;
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_bitflags = { path = "../librustc_bitflags" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
use rustc::ty;
use rustc::mir::repr::*;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
/// Compile `expr`, yielding an rvalue.
use hair::*;
use rustc::middle::region::CodeExtent;
use rustc::mir::repr::*;
-use syntax::codemap::Span;
+use syntax_pos::Span;
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
use rustc::mir::repr::*;
use hair::*;
use syntax::ast::{Name, NodeId};
-use syntax::codemap::Span;
+use syntax_pos::Span;
// helper functions, broken out by category:
mod simplify;
use rustc::middle::const_val::ConstVal;
use rustc::ty::{self, Ty};
use rustc::mir::repr::*;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use std::cmp::Ordering;
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
use rustc::mir::repr::*;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
/// Add a new temporary value of type `ty` storing the result of
use rustc::hir;
use syntax::abi::Abi;
use syntax::ast;
-use syntax::codemap::Span;
use syntax::parse::token::keywords;
+use syntax_pos::Span;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use rustc::ty::subst::{Substs, Subst, VecPerParamSpace};
use rustc::ty::{Ty, TyCtxt};
use rustc::mir::repr::*;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc_data_structures::indexed_vec::Idx;
use rustc_data_structures::fnv::FnvHashMap;
use rustc::ty::{self, Ty};
use rustc::mir::repr::*;
use rustc::hir::{self, PatKind};
-use syntax::codemap::Span;
use syntax::ptr::P;
+use syntax_pos::Span;
/// When there are multiple patterns in a single arm, each one has its
/// own node-ids for the bindings. References to the variables always
use rustc::ty::{self, AdtDef, ClosureSubsts, Region, Ty};
use rustc::hir;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use self::cx::Cx;
pub mod cx;
extern crate rustc_bitflags;
#[macro_use]
extern crate syntax;
+extern crate syntax_pos;
extern crate rustc_const_math;
extern crate rustc_const_eval;
use rustc::hir;
use rustc::hir::intravisit::{self, FnKind, Visitor};
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use std::mem;
use rustc::mir::visit::{LvalueContext, MutVisitor, Visitor};
use rustc::mir::traversal::ReversePostorder;
use rustc::ty::{self, TyCtxt};
-use syntax::codemap::Span;
+use syntax_pos::Span;
use build::Location;
use rustc::mir::visit::{LvalueContext, Visitor};
use rustc::util::nodemap::DefIdMap;
use syntax::abi::Abi;
-use syntax::codemap::Span;
use syntax::feature_gate::UnstableFeatures;
+use syntax_pos::Span;
use std::collections::hash_map::Entry;
use std::fmt;
use rustc::mir::transform::{MirPass, MirSource, Pass};
use rustc::mir::visit::{self, Visitor};
use std::fmt;
-use syntax::codemap::{Span, DUMMY_SP};
+use syntax_pos::{Span, DUMMY_SP};
use rustc_data_structures::indexed_vec::Idx;
rustc_const_eval = { path = "../librustc_const_eval" }
rustc_const_math = { path = "../librustc_const_math" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
+rustc_errors = { path = "../librustc_errors" }
\ No newline at end of file
use rustc::lint;
use rustc::session::Session;
use syntax::ast::*;
-use syntax::codemap::Span;
-use syntax::errors;
use syntax::parse::token::{self, keywords};
use syntax::visit::{self, Visitor};
+use syntax_pos::Span;
+use errors;
struct AstValidator<'a> {
session: &'a Session,
}
}
-impl<'a, 'v> Visitor<'v> for AstValidator<'a> {
+impl<'a> Visitor for AstValidator<'a> {
fn visit_lifetime(&mut self, lt: &Lifetime) {
if lt.name.as_str() == "'_" {
self.session.add_lint(
match expr.node {
ExprKind::While(_, _, Some(ident)) | ExprKind::Loop(_, Some(ident)) |
ExprKind::WhileLet(_, _, _, Some(ident)) | ExprKind::ForLoop(_, _, _, Some(ident)) |
- ExprKind::Break(Some(ident)) | ExprKind::Again(Some(ident)) => {
+ ExprKind::Break(Some(ident)) | ExprKind::Continue(Some(ident)) => {
self.check_label(ident.node, ident.span, expr.id);
}
_ => {}
use rustc::hir::{self, PatKind};
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir::intravisit::{self, FnKind, Visitor};
use std::collections::hash_map::Entry;
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
+extern crate syntax_pos;
+extern crate rustc_errors as errors;
pub mod diagnostics;
use rustc::hir::map::Map;
use rustc::hir::intravisit::{self, Visitor};
use rustc::hir;
-use syntax::codemap::Span;
+use syntax_pos::Span;
#[derive(Clone, Copy, PartialEq)]
enum Context {
sess: &'a Session,
}
-impl<'a, 'v> Visitor<'v> for CheckNoAsm<'a> {
+impl<'a> Visitor for CheckNoAsm<'a> {
fn visit_expr(&mut self, e: &ast::Expr) {
match e.node {
ast::ExprKind::InlineAsm(_) => span_err!(self.sess, e.span, E0472,
use rustc::hir;
use rustc::hir::intravisit;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let mut rvcx = RvalueContext { tcx: tcx };
use rustc::util::nodemap::NodeMap;
use syntax::{ast};
-use syntax::codemap::Span;
use syntax::feature_gate::{GateIssue, emit_feature_err};
+use syntax_pos::Span;
use rustc::hir::intravisit::{self, Visitor};
use rustc::hir;
rustc_bitflags = { path = "../librustc_bitflags" }
rustc_metadata = { path = "../librustc_metadata" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
+rustc_errors = { path = "../librustc_errors" }
use syntax::ast;
use syntax::attr;
-use syntax::codemap::Span;
-use syntax::errors;
+use errors;
+use syntax_pos::Span;
use rustc::dep_graph::DepNode;
use rustc::hir::map::Map;
use rustc::hir::intravisit::Visitor;
extern crate rustc;
extern crate rustc_back;
extern crate rustc_metadata;
+extern crate syntax_pos;
+extern crate rustc_errors as errors;
pub use self::registry::Registry;
use std::mem;
use std::path::PathBuf;
use syntax::ast;
-use syntax::codemap::{Span, COMMAND_LINE_SP};
use syntax::ptr::P;
use syntax::attr::AttrMetaMethods;
+use syntax_pos::{Span, COMMAND_LINE_SP};
/// Pointer to a registrar function.
pub type PluginRegistrarFun =
use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT};
use syntax::ext::base::{IdentTT, MultiModifier, MultiDecorator};
use syntax::ext::base::{MacroExpanderFn, MacroRulesTT};
-use syntax::codemap::Span;
use syntax::parse::token;
use syntax::ptr::P;
use syntax::ast;
use syntax::feature_gate::AttributeType;
+use syntax_pos::Span;
use std::collections::HashMap;
use std::borrow::ToOwned;
[dependencies]
rustc = { path = "../librustc" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
extern crate rustc;
#[macro_use] extern crate syntax;
+extern crate syntax_pos;
use rustc::dep_graph::DepNode;
use rustc::hir::{self, PatKind};
use rustc::ty::{self, TyCtxt};
use rustc::util::nodemap::NodeSet;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use std::cmp;
use std::mem::replace;
syntax = { path = "../libsyntax" }
rustc = { path = "../librustc" }
arena = { path = "../libarena" }
+rustc_errors = { path = "../librustc_errors" }
+syntax_pos = { path = "../libsyntax_pos" }
use syntax::ast::Name;
use syntax::attr;
use syntax::parse::token;
-use syntax::codemap::{Span, DUMMY_SP};
-use syntax::ast::{Block, Crate, DeclKind};
+use syntax::ast::{Block, Crate};
use syntax::ast::{ForeignItem, ForeignItemKind, Item, ItemKind};
use syntax::ast::{Mutability, PathListItemKind};
-use syntax::ast::{Stmt, StmtKind, TraitItemKind};
+use syntax::ast::{StmtKind, TraitItemKind};
use syntax::ast::{Variant, ViewPathGlob, ViewPathList, ViewPathSimple};
use syntax::visit::{self, Visitor};
+use syntax_pos::{Span, DUMMY_SP};
+
trait ToNameBinding<'a> {
fn to_name_binding(self) -> NameBinding<'a>;
}
}
fn block_needs_anonymous_module(&mut self, block: &Block) -> bool {
- fn is_item(statement: &Stmt) -> bool {
- if let StmtKind::Decl(ref declaration, _) = statement.node {
- if let DeclKind::Item(_) = declaration.node {
- return true;
- }
- }
- false
- }
-
// If any statements are items, we need to create an anonymous module
- block.stmts.iter().any(is_item)
+ block.stmts.iter().any(|statement| match statement.node {
+ StmtKind::Item(_) => true,
+ _ => false,
+ })
}
/// Constructs the reduced graph for one item.
(Def::Method(item_def_id), ValueNS)
}
TraitItemKind::Type(..) => (Def::AssociatedTy(def_id, item_def_id), TypeNS),
+ TraitItemKind::Macro(_) => panic!("unexpanded macro in resolve!"),
};
self.define(module_parent, item.ident.name, ns, (def, item.span, vis));
parent: Module<'b>,
}
-impl<'a, 'b, 'v> Visitor<'v> for BuildReducedGraphVisitor<'a, 'b> {
+impl<'a, 'b> Visitor for BuildReducedGraphVisitor<'a, 'b> {
fn visit_item(&mut self, item: &Item) {
let old_parent = self.parent;
self.resolver.build_reduced_graph_for_item(item, &mut self.parent);
use rustc::lint;
use syntax::ast::{self, ViewPathGlob, ViewPathList, ViewPathSimple};
use syntax::visit::{self, Visitor};
-use syntax::codemap::{Span, DUMMY_SP};
+use syntax_pos::{Span, DUMMY_SP};
struct UnusedImportCheckVisitor<'a, 'b: 'a> {
}
}
-impl<'a, 'b, 'v> Visitor<'v> for UnusedImportCheckVisitor<'a, 'b> {
+impl<'a, 'b> Visitor for UnusedImportCheckVisitor<'a, 'b> {
fn visit_item(&mut self, item: &ast::Item) {
visit::walk_item(self, item);
// Ignore is_public import statements because there's no way to be sure
extern crate log;
#[macro_use]
extern crate syntax;
+extern crate syntax_pos;
+extern crate rustc_errors as errors;
extern crate arena;
#[macro_use]
extern crate rustc;
use syntax::ext::mtwt;
use syntax::ast::{self, FloatTy};
use syntax::ast::{CRATE_NODE_ID, Name, NodeId, CrateNum, IntTy, UintTy};
-use syntax::codemap::{self, Span};
-use syntax::errors::DiagnosticBuilder;
use syntax::parse::token::{self, keywords};
use syntax::util::lev_distance::find_best_match_for_name;
use syntax::ast::{Local, Mutability, Pat, PatKind, Path};
use syntax::ast::{PathSegment, PathParameters, QSelf, TraitItemKind, TraitRef, Ty, TyKind};
+use syntax_pos::Span;
+use errors::DiagnosticBuilder;
+
use std::collections::{HashMap, HashSet};
use std::cell::{Cell, RefCell};
use std::fmt;
}
fn resolve_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>,
- span: syntax::codemap::Span,
+ span: syntax_pos::Span,
resolution_error: ResolutionError<'c>) {
resolve_struct_error(resolver, span, resolution_error).emit();
}
fn resolve_struct_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>,
- span: syntax::codemap::Span,
+ span: syntax_pos::Span,
resolution_error: ResolutionError<'c>)
-> DiagnosticBuilder<'a> {
if !resolver.emit_errors {
ValueNS,
}
-impl<'a, 'v> Visitor<'v> for Resolver<'a> {
+impl<'a> Visitor for Resolver<'a> {
fn visit_item(&mut self, item: &Item) {
self.resolve_item(item);
}
});
}
fn visit_fn(&mut self,
- function_kind: FnKind<'v>,
- declaration: &'v FnDecl,
- block: &'v Block,
+ function_kind: FnKind,
+ declaration: &FnDecl,
+ block: &Block,
_: Span,
node_id: NodeId) {
let rib_kind = match function_kind {
visit::walk_trait_item(this, trait_item)
});
}
+ TraitItemKind::Macro(_) => panic!("unexpanded macro in resolve!"),
};
}
});
self.resolve_crate_relative_path(trait_path.span, segments, TypeNS)
} else {
self.resolve_module_relative_path(trait_path.span, segments, TypeNS)
- }.map(|binding| binding.span).unwrap_or(codemap::DUMMY_SP)
+ }.map(|binding| binding.span).unwrap_or(syntax_pos::DUMMY_SP)
};
- if definition_site != codemap::DUMMY_SP {
+ if definition_site != syntax_pos::DUMMY_SP {
err.span_label(definition_site,
&format!("type aliases cannot be used for traits"));
}
}, "variant or struct");
}
- PatKind::Path(ref path) => {
- self.resolve_pattern_path(pat.id, None, path, ValueNS, |def| {
+ PatKind::Path(ref qself, ref path) => {
+ self.resolve_pattern_path(pat.id, qself.as_ref(), path, ValueNS, |def| {
match def {
Def::Struct(..) | Def::Variant(..) |
Def::Const(..) | Def::AssociatedConst(..) | Def::Err => true,
}, "variant, struct or constant");
}
- PatKind::QPath(ref qself, ref path) => {
- self.resolve_pattern_path(pat.id, Some(qself), path, ValueNS, |def| {
- match def {
- Def::AssociatedConst(..) | Def::Err => true,
- _ => false,
- }
- }, "associated constant");
- }
-
PatKind::Struct(ref path, _, _) => {
self.resolve_pattern_path(pat.id, None, path, TypeNS, |def| {
match def {
})
}
- ExprKind::Break(Some(label)) | ExprKind::Again(Some(label)) => {
+ ExprKind::Break(Some(label)) | ExprKind::Continue(Some(label)) => {
match self.search_label(mtwt::resolve(label.node)) {
None => {
self.record_def(expr.id, err_path_resolution());
},
};
- if old_binding.span != codemap::DUMMY_SP {
+ if old_binding.span != syntax_pos::DUMMY_SP {
err.span_label(old_binding.span, &format!("previous {} of `{}` here", noun, name));
}
err.emit();
use rustc::hir::def::*;
use syntax::ast::{NodeId, Name};
-use syntax::codemap::{Span, DUMMY_SP};
use syntax::util::lev_distance::find_best_match_for_name;
+use syntax_pos::{Span, DUMMY_SP};
use std::cell::{Cell, RefCell};
rustc = { path = "../librustc" }
syntax = { path = "../libsyntax" }
serialize = { path = "../libserialize" }
+syntax_pos = { path = "../libsyntax_pos" }
\ No newline at end of file
use rustc::hir::def_id::DefId;
use syntax::ast::{CrateNum, NodeId};
-use syntax::codemap::Span;
+use syntax_pos::Span;
pub struct CrateData {
pub name: String,
use std::hash::*;
use syntax::ast::{self, NodeId, PatKind};
-use syntax::codemap::*;
use syntax::parse::token::{self, keywords};
use syntax::visit::{self, Visitor};
use syntax::print::pprust::{path_to_string, ty_to_string, bounds_to_string, generics_to_string};
use syntax::ptr::P;
+use syntax::codemap::Spanned;
+use syntax_pos::*;
use super::{escape, generated_code, SaveContext, PathCollector};
use super::data::*;
}
}
-impl<'v, 'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'v> for DumpVisitor<'l, 'tcx, 'll, D> {
+impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor for DumpVisitor<'l, 'tcx, 'll, D> {
fn visit_item(&mut self, item: &ast::Item) {
use syntax::ast::ItemKind::*;
self.process_macro_use(item.span, item.id);
trait_item.span);
}
ast::TraitItemKind::Const(_, None) |
- ast::TraitItemKind::Type(..) => {}
+ ast::TraitItemKind::Type(..) |
+ ast::TraitItemKind::Macro(_) => {}
}
}
}
fn visit_stmt(&mut self, s: &ast::Stmt) {
- let id = s.node.id();
- self.process_macro_use(s.span, id.unwrap());
+ self.process_macro_use(s.span, s.id);
visit::walk_stmt(self, s)
}
use rustc::hir::map::Map;
use rustc::ty::TyCtxt;
use syntax::ast::{CrateNum, NodeId};
-use syntax::codemap::{Span, CodeMap};
+use syntax::codemap::CodeMap;
+use syntax_pos::Span;
use data;
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
extern crate serialize as rustc_serialize;
+extern crate syntax_pos;
mod csv_dumper;
mod json_dumper;
use std::path::{Path, PathBuf};
use syntax::ast::{self, NodeId, PatKind};
-use syntax::codemap::*;
use syntax::parse::token::{self, keywords};
use syntax::visit::{self, Visitor};
use syntax::print::pprust::{ty_to_string, arg_to_string};
+use syntax::codemap::MacroAttribute;
+use syntax_pos::*;
pub use self::csv_dumper::CsvDumper;
pub use self::json_dumper::JsonDumper;
}
}
-impl<'v> Visitor<'v> for PathCollector {
+impl Visitor for PathCollector {
fn visit_pat(&mut self, p: &ast::Pat) {
match p.node {
PatKind::Struct(ref path, _, _) => {
ast::Mutability::Mutable, recorder::TypeRef));
}
PatKind::TupleStruct(ref path, _, _) |
- PatKind::Path(ref path) |
- PatKind::QPath(_, ref path) => {
+ PatKind::Path(_, ref path) => {
self.collected_paths.push((p.id, path.clone(),
ast::Mutability::Mutable, recorder::VarRef));
}
use std::path::Path;
use syntax::ast;
-use syntax::codemap::*;
use syntax::parse::lexer::{self, Reader, StringReader};
use syntax::parse::token::{self, keywords, Token};
+use syntax_pos::*;
#[derive(Clone)]
pub struct SpanUtils<'a> {
rustc_const_eval = { path = "../librustc_const_eval" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
+rustc_errors = { path = "../librustc_errors" }
rustc_incremental = { path = "../librustc_incremental" }
rustc_llvm = { path = "../librustc_llvm" }
rustc_platform_intrinsics = { path = "../librustc_platform_intrinsics" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
\ No newline at end of file
use std::rc::Rc;
use rustc::hir::{self, PatKind};
use syntax::ast::{self, DUMMY_NODE_ID, NodeId};
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir::fold::Folder;
use syntax::ptr::P;
use std::str;
use flate;
use syntax::ast;
-use syntax::codemap::Span;
use syntax::attr::AttrMetaMethods;
+use syntax_pos::Span;
// RLIB LLVM-BYTECODE OBJECT LAYOUT
// Version 1
use {CrateTranslation, ModuleTranslation};
use util::common::time;
use util::common::path2cstr;
-use syntax::codemap::MultiSpan;
-use syntax::errors::{self, Handler, Level, RenderSpan};
-use syntax::errors::emitter::CoreEmitter;
+use errors::{self, Handler, Level, RenderSpan};
+use errors::emitter::CoreEmitter;
+use syntax_pos::MultiSpan;
use std::collections::HashMap;
use std::ffi::{CStr, CString};
unsafe extern "C" fn report_inline_asm<'a, 'b>(cgcx: &'a CodegenContext<'a>,
msg: &'b str,
cookie: c_uint) {
- use syntax::codemap::ExpnId;
+ use syntax_pos::ExpnId;
match cgcx.lto_ctxt {
Some((sess, _)) => {
use std::collections::{HashMap, HashSet};
use std::str;
use std::{i8, i16, i32, i64};
-use syntax::codemap::{Span, DUMMY_SP};
+use syntax_pos::{Span, DUMMY_SP};
use syntax::parse::token::InternedString;
use syntax::attr::AttrMetaMethods;
use syntax::attr;
use llvm::{Opcode, IntPredicate, RealPredicate};
use llvm::{ValueRef, BasicBlockRef};
use common::*;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use builder::Builder;
use type_::Type;
use std::ffi::CString;
use std::ptr;
-use syntax::codemap::Span;
+use syntax_pos::Span;
pub struct Builder<'a, 'tcx: 'a> {
pub llbuilder: BuilderRef,
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc::hir;
-use syntax::codemap::DUMMY_SP;
-use syntax::errors;
+use syntax_pos::DUMMY_SP;
+use errors;
use syntax::ptr::P;
#[derive(Debug)]
use rustc::mir::visit::Visitor as MirVisitor;
use syntax::abi::Abi;
-use syntax::codemap::DUMMY_SP;
-use syntax::errors;
+use errors;
+use syntax_pos::DUMMY_SP;
use base::custom_coerce_unsize_info;
use context::SharedCrateContext;
use common::{fulfill_obligation, normalize_and_test_predicates, type_is_sized};
use std::cell::{Cell, RefCell};
use syntax::ast;
-use syntax::codemap::{DUMMY_SP, Span};
use syntax::parse::token::InternedString;
use syntax::parse::token;
+use syntax_pos::{DUMMY_SP, Span};
pub use context::{CrateContext, SharedCrateContext};
use libc::c_uint;
use syntax::ast::{self, LitKind};
use syntax::attr::{self, AttrMetaMethods};
-use syntax::codemap::Span;
use syntax::parse::token;
use syntax::ptr::P;
+use syntax_pos::Span;
pub type FnArgMap<'a> = Option<&'a NodeMap<ValueRef>>;
use std::fmt;
use syntax::ast;
-use syntax::codemap::DUMMY_SP;
+use syntax_pos::DUMMY_SP;
/// A `Datum` encapsulates the result of evaluating an expression. It
/// describes where the value is stored, what Rust type the value has,
use libc::c_uint;
use std::ptr;
-use syntax::codemap::{Span, Pos};
+use syntax_pos::{Span, Pos};
use syntax::{ast, codemap};
use rustc_data_structures::bitvec::BitVector;
use std::rc::Rc;
use syntax;
use syntax::util::interner::Interner;
-use syntax::codemap::Span;
-use syntax::{ast, codemap};
+use syntax::ast;
use syntax::parse::token;
+use syntax_pos::{self, Span};
// From DWARF 5.
&[],
containing_scope,
NO_FILE_METADATA,
- codemap::DUMMY_SP)
+ syntax_pos::DUMMY_SP)
}
pub fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
&[sole_struct_member_description],
self.containing_scope,
self.file_metadata,
- codemap::DUMMY_SP);
+ syntax_pos::DUMMY_SP);
// Encode the information about the null variant in the union
// member's name.
let discriminant_base_type_metadata =
type_metadata(cx,
adt::ty_of_inttype(cx.tcx(), inttype),
- codemap::DUMMY_SP);
+ syntax_pos::DUMMY_SP);
let discriminant_name = get_enum_discriminant_name(cx, enum_def_id);
let name = CString::new(discriminant_name.as_bytes()).unwrap();
let node_def_id = cx.tcx().map.local_def_id(node_id);
let (var_scope, span) = get_namespace_and_span_for_item(cx, node_def_id);
- let (file_metadata, line_number) = if span != codemap::DUMMY_SP {
+ let (file_metadata, line_number) = if span != syntax_pos::DUMMY_SP {
let loc = span_start(cx, span);
(file_metadata(cx, &loc.file.name, &loc.file.abs_path), loc.line as c_uint)
} else {
use std::ffi::CString;
use std::ptr;
-use syntax::codemap::{Span, Pos};
-use syntax::{ast, codemap};
+use syntax_pos::{self, Span, Pos};
+use syntax::ast;
use syntax::attr::IntType;
pub mod gdb;
let (containing_scope, span) = get_containing_scope_and_span(cx, instance);
// This can be the case for functions inlined from another crate
- if span == codemap::DUMMY_SP {
+ if span == syntax_pos::DUMMY_SP {
return FunctionDebugContext::FunctionWithoutDebugInfo;
}
signature.push(match sig.output {
ty::FnConverging(ret_ty) => match ret_ty.sty {
ty::TyTuple(ref tys) if tys.is_empty() => ptr::null_mut(),
- _ => type_metadata(cx, ret_ty, codemap::DUMMY_SP)
+ _ => type_metadata(cx, ret_ty, syntax_pos::DUMMY_SP)
},
ty::FnDiverging => diverging_type_metadata(cx)
});
// Arguments types
for &argument_type in inputs {
- signature.push(type_metadata(cx, argument_type, codemap::DUMMY_SP));
+ signature.push(type_metadata(cx, argument_type, syntax_pos::DUMMY_SP));
}
if abi == Abi::RustCall && !sig.inputs.is_empty() {
if let ty::TyTuple(args) = sig.inputs[sig.inputs.len() - 1].sty {
for &argument_type in args {
- signature.push(type_metadata(cx, argument_type, codemap::DUMMY_SP));
+ signature.push(type_metadata(cx, argument_type, syntax_pos::DUMMY_SP));
}
}
}
let template_params: Vec<_> = if cx.sess().opts.debuginfo == FullDebugInfo {
generics.types.as_slice().iter().enumerate().map(|(i, param)| {
let actual_type = cx.tcx().normalize_associated_type(&actual_types[i]);
- let actual_type_metadata = type_metadata(cx, actual_type, codemap::DUMMY_SP);
+ let actual_type_metadata = type_metadata(cx, actual_type, syntax_pos::DUMMY_SP);
let name = CString::new(param.name.as_str().as_bytes()).unwrap();
unsafe {
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
let impl_self_ty = monomorphize::apply_param_substs(cx.tcx(),
instance.substs,
&impl_self_ty);
- Some(type_metadata(cx, impl_self_ty, codemap::DUMMY_SP))
+ Some(type_metadata(cx, impl_self_ty, syntax_pos::DUMMY_SP))
} else {
// For trait method impls we still use the "parallel namespace"
// strategy
// Try to get some span information, if we have an inlined item.
let definition_span = match cx.external().borrow().get(&instance.def) {
Some(&Some(node_id)) => cx.tcx().map.span(node_id),
- _ => cx.tcx().map.def_id_span(instance.def, codemap::DUMMY_SP)
+ _ => cx.tcx().map.def_id_span(instance.def, syntax_pos::DUMMY_SP)
};
(containing_scope, definition_span)
use libc::c_uint;
use std::ffi::CString;
use std::ptr;
-use syntax::codemap::DUMMY_SP;
+use syntax_pos::DUMMY_SP;
pub fn mangled_name_of_item(ccx: &CrateContext, def_id: DefId, extra: &str) -> String {
fn fill_nested(ccx: &CrateContext, def_id: DefId, extra: &str, output: &mut String) {
use libc::c_uint;
use std::ptr;
-use syntax::codemap::{Span, Pos};
-use syntax::{ast, codemap};
+use syntax_pos::{self, Span, Pos};
+use syntax::ast;
pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
node_id: ast::NodeId,
if !bytes.is_empty() && &bytes[bytes.len()-1..] == b"}" {
cleanup_span = Span {
- lo: node_span.hi - codemap::BytePos(1),
+ lo: node_span.hi - syntax_pos::BytePos(1),
hi: node_span.hi,
expn_id: node_span.expn_id
};
use common::{CrateContext, FunctionContext};
use type_::Type;
-use syntax::codemap::Span;
-use syntax::{ast, codemap};
+use syntax_pos::{self, Span};
+use syntax::ast;
pub fn is_node_local_to_unit(cx: &CrateContext, node_id: ast::NodeId) -> bool
{
};
}
-/// Return codemap::Loc corresponding to the beginning of the span
-pub fn span_start(cx: &CrateContext, span: Span) -> codemap::Loc {
+/// Return syntax_pos::Loc corresponding to the beginning of the span
+pub fn span_start(cx: &CrateContext, span: Span) -> syntax_pos::Loc {
cx.sess().codemap().lookup_char_pos(span.lo)
}
// Try to get some span information, if we have an inlined item.
let definition_span = match cx.external().borrow().get(&def_id) {
Some(&Some(node_id)) => cx.tcx().map.span(node_id),
- _ => cx.tcx().map.def_id_span(def_id, codemap::DUMMY_SP)
+ _ => cx.tcx().map.def_id_span(def_id, syntax_pos::DUMMY_SP)
};
(containing_scope, definition_span)
use rustc::hir;
-use syntax::{ast, codemap};
+use syntax::ast;
use syntax::parse::token::InternedString;
+use syntax_pos;
use std::fmt;
use std::mem;
}
fn coerce_unsized<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
- span: codemap::Span,
+ span: syntax_pos::Span,
source: Datum<'tcx, Rvalue>,
target: Datum<'tcx, Rvalue>)
-> Block<'blk, 'tcx> {
fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
fields: &[hir::Field],
base: Option<&hir::Expr>,
- expr_span: codemap::Span,
+ expr_span: syntax_pos::Span,
expr_id: ast::NodeId,
ty: Ty<'tcx>,
dest: Dest) -> Block<'blk, 'tcx> {
use value::Value;
use arena::TypedArena;
-use syntax::codemap::DUMMY_SP;
+use syntax_pos::DUMMY_SP;
pub fn trans_exchange_free_dyn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
v: ValueRef,
use syntax::parse::token;
use rustc::session::Session;
-use syntax::codemap::{Span, DUMMY_SP};
+use syntax_pos::{Span, DUMMY_SP};
use std::cmp::Ordering;
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
+extern crate syntax_pos;
+extern crate rustc_errors as errors;
pub use rustc::session;
pub use rustc::middle;
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use syntax::ast::Name;
-use syntax::codemap::DUMMY_SP;
+use syntax_pos::DUMMY_SP;
// drop_glue pointer, size, align.
const VTABLE_OFFSET: usize = 3;
use type_::Type;
use value::Value;
-use syntax::codemap::{Span, DUMMY_SP};
+use syntax_pos::{Span, DUMMY_SP};
use std::ptr;
use machine;
use type_of;
-use syntax::codemap::DUMMY_SP;
+use syntax_pos::DUMMY_SP;
use syntax::parse::token::keywords;
use std::ops::Deref;
if let mir::AggregateKind::Closure(def_id, substs) = *kind {
use rustc::hir;
use syntax::ast::DUMMY_NODE_ID;
- use syntax::codemap::DUMMY_SP;
use syntax::ptr::P;
+ use syntax_pos::DUMMY_SP;
use closure;
closure::trans_closure_expr(closure::Dest::Ignore(bcx.ccx()),
use rustc::hir;
use syntax::attr;
-use syntax::errors;
+use errors;
use std::fmt;
rustc_const_eval = { path = "../librustc_const_eval" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_platform_intrinsics = { path = "../librustc_platform_intrinsics" }
+syntax_pos = { path = "../libsyntax_pos" }
+rustc_errors = { path = "../librustc_errors" }
use rustc_const_math::ConstInt;
use std::cell::RefCell;
use syntax::{abi, ast};
-use syntax::codemap::{Span, Pos};
-use syntax::errors::DiagnosticBuilder;
use syntax::feature_gate::{GateIssue, emit_feature_err};
use syntax::parse::token::{self, keywords};
+use syntax_pos::{Span, Pos};
+use errors::DiagnosticBuilder;
pub trait AstConv<'gcx, 'tcx> {
fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx>;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::cmp;
use syntax::ast;
-use syntax::codemap::{Span, Spanned};
+use syntax::codemap::Spanned;
use syntax::ptr::P;
+use syntax_pos::Span;
use rustc::hir::{self, PatKind};
use rustc::hir::print as pprust;
SelectionContext, ObligationCause};
use rustc::ty::fold::TypeFoldable;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
//FIXME(@jroesch): Ideally we should be able to drop the fulfillment_cx argument.
pub fn normalize_associated_types_in<'a, 'gcx, 'tcx, T>(
use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue};
use rustc::hir;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use syntax::parse::token;
#[derive(Copy, Clone, Debug)]
use hir::def_id::DefId;
use rustc::infer;
use rustc::ty::{self, LvaluePreference, Ty};
-use syntax::codemap::Span;
use syntax::parse::token;
use syntax::ptr::P;
+use syntax_pos::Span;
use rustc::hir;
use rustc::ty::{self, Ty, TypeFoldable};
use rustc::ty::cast::{CastKind, CastTy};
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use util::common::ErrorReported;
/// Reifies a cast check to be checked once we have full type information for
use rustc::ty::subst::{self, Subst, Substs, VecPerParamSpace};
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use CrateCtxt;
use super::assoc;
use rustc::ty::Ty;
use rustc::infer::{InferOk, TypeOrigin};
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir;
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
use util::nodemap::FnvHashSet;
use syntax::ast;
-use syntax::codemap::{self, Span};
+use syntax_pos::{self, Span};
/// check_drop_impl confirms that the Drop implementation identfied by
/// `drop_impl_did` is not any more specialized than the type it is
_ => {
// Destructors only work on nominal types. This was
// already checked by coherence, so we can panic here.
- let span = ccx.tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP);
+ let span = ccx.tcx.map.def_id_span(drop_impl_did, syntax_pos::DUMMY_SP);
span_bug!(span,
"should have been rejected by coherence check: {}",
dtor_self_type);
let named_type = tcx.lookup_item_type(self_type_did).ty;
let named_type = named_type.subst(tcx, &infcx.parameter_environment.free_substs);
- let drop_impl_span = tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP);
+ let drop_impl_span = tcx.map.def_id_span(drop_impl_did, syntax_pos::DUMMY_SP);
let fresh_impl_substs =
infcx.fresh_substs_for_generics(drop_impl_span, drop_impl_generics);
let fresh_impl_self_ty = drop_impl_ty.subst(tcx, &fresh_impl_substs);
let self_type_node_id = tcx.map.as_local_node_id(self_type_did).unwrap();
- let drop_impl_span = tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP);
+ let drop_impl_span = tcx.map.def_id_span(drop_impl_did, syntax_pos::DUMMY_SP);
// We can assume the predicates attached to struct/enum definition
// hold.
use std::collections::{HashMap};
use syntax::abi::Abi;
use syntax::ast;
-use syntax::codemap::Span;
use syntax::parse::token;
+use syntax_pos::Span;
use rustc::hir;
use rustc::ty::adjustment::{AdjustDerefRef, AutoDerefRef, AutoPtr};
use rustc::ty::fold::TypeFoldable;
use rustc::infer::{self, InferOk, TypeOrigin};
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir;
use std::ops::Deref;
use rustc::infer;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir;
use rustc::ty::{self, Ty, ToPolyTraitRef, TraitRef, TypeFoldable};
use rustc::infer::{InferOk, TypeOrigin};
use syntax::ast;
-use syntax::codemap::{Span, DUMMY_SP};
+use syntax_pos::{Span, DUMMY_SP};
use rustc::hir;
use std::collections::HashSet;
use std::mem;
use rustc::traits::{Obligation, SelectionContext};
use util::nodemap::{FnvHashSet};
-
use syntax::ast;
-use syntax::codemap::Span;
-use syntax::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
+use syntax_pos::Span;
+
use rustc::hir::print as pprust;
use rustc::hir;
use rustc::hir::Expr_;
use syntax::ast;
use syntax::attr;
use syntax::attr::AttrMetaMethods;
-use syntax::codemap::{self, Span, Spanned};
-use syntax::errors::DiagnosticBuilder;
+use syntax::codemap::{self, Spanned};
use syntax::parse::token::{self, InternedString, keywords};
use syntax::ptr::P;
use syntax::util::lev_distance::find_best_match_for_name;
+use syntax_pos::{self, Span};
+use errors::DiagnosticBuilder;
use rustc::hir::intravisit::{self, Visitor};
use rustc::hir::{self, PatKind};
for ty in &self.unsolved_variables() {
if let ty::TyInfer(_) = self.shallow_resolve(ty).sty {
debug!("default_type_parameters: defaulting `{:?}` to error", ty);
- self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx().types.err);
+ self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx().types.err);
}
}
return;
if self.type_var_diverges(resolved) {
debug!("default_type_parameters: defaulting `{:?}` to `()` because it diverges",
resolved);
- self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil());
+ self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.mk_nil());
} else {
match self.type_is_unconstrained_numeric(resolved) {
UnconstrainedInt => {
debug!("default_type_parameters: defaulting `{:?}` to `i32`",
resolved);
- self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.i32)
+ self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
},
UnconstrainedFloat => {
debug!("default_type_parameters: defaulting `{:?}` to `f32`",
resolved);
- self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.f64)
+ self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
}
Neither => { }
}
for ty in &unsolved_variables {
let resolved = self.resolve_type_vars_if_possible(ty);
if self.type_var_diverges(resolved) {
- self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil());
+ self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.mk_nil());
} else {
match self.type_is_unconstrained_numeric(resolved) {
UnconstrainedInt | UnconstrainedFloat => {
let _ = self.commit_if_ok(|_: &infer::CombinedSnapshot| {
for ty in &unbound_tyvars {
if self.type_var_diverges(ty) {
- self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil());
+ self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.mk_nil());
} else {
match self.type_is_unconstrained_numeric(ty) {
UnconstrainedInt => {
- self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.i32)
+ self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
},
UnconstrainedFloat => {
- self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.f64)
+ self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
}
Neither => {
if let Some(default) = default_map.get(ty) {
self.find_conflicting_default(&unbound_tyvars, &default_map, conflict)
.unwrap_or(type_variable::Default {
ty: self.next_ty_var(),
- origin_span: codemap::DUMMY_SP,
+ origin_span: syntax_pos::DUMMY_SP,
def_id: self.tcx.map.local_def_id(0) // what do I put here?
});
// reporting for more then one conflict.
for ty in &unbound_tyvars {
if self.type_var_diverges(ty) {
- self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil());
+ self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.mk_nil());
} else {
match self.type_is_unconstrained_numeric(ty) {
UnconstrainedInt => {
- self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.i32)
+ self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.i32)
},
UnconstrainedFloat => {
- self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.f64)
+ self.demand_eqtype(syntax_pos::DUMMY_SP, *ty, self.tcx.types.f64)
},
Neither => {
if let Some(default) = default_map.get(ty) {
use std::mem;
use std::ops::Deref;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir::intravisit::{self, Visitor};
use rustc::hir::{self, PatKind};
use rustc::infer::UpvarRegion;
use std::collections::HashSet;
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir;
use rustc::hir::intravisit::{self, Visitor};
use std::collections::HashSet;
use syntax::ast;
-use syntax::codemap::{Span};
-use syntax::errors::DiagnosticBuilder;
use syntax::parse::token::keywords;
+use syntax_pos::Span;
+use errors::DiagnosticBuilder;
+
use rustc::hir::intravisit::{self, Visitor};
use rustc::hir;
use std::cell::Cell;
use syntax::ast;
-use syntax::codemap::{DUMMY_SP, Span};
+use syntax_pos::{DUMMY_SP, Span};
+
use rustc::hir::print::pat_to_string;
use rustc::hir::intravisit::{self, Visitor};
use rustc::hir::{self, PatKind};
use rustc::ty::TyCtxt;
use syntax::ast;
-use syntax::codemap::{Span, DUMMY_SP};
+use syntax_pos::{Span, DUMMY_SP};
use rustc::hir;
use rustc::hir::intravisit::Visitor;
use rustc::infer::{self, InferCtxt, TypeOrigin};
use std::cell::RefCell;
use std::rc::Rc;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use util::nodemap::{DefIdMap, FnvHashMap};
use rustc::dep_graph::DepNode;
use rustc::hir::map as hir_map;
use rustc::traits;
use rustc::ty::{self, TyCtxt};
use syntax::ast;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::dep_graph::DepNode;
use rustc::hir::intravisit;
use rustc::hir;
use std::rc::Rc;
use syntax::{abi, ast, attr};
-use syntax::codemap::Span;
use syntax::parse::token::keywords;
use syntax::ptr::P;
+use syntax_pos::Span;
+
use rustc::hir::{self, PatKind};
use rustc::hir::intravisit;
use rustc::hir::print as pprust;
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
+extern crate syntax_pos;
extern crate arena;
extern crate fmt_macros;
extern crate rustc_back;
extern crate rustc_const_math;
extern crate rustc_const_eval;
+extern crate rustc_errors as errors;
pub use rustc::dep_graph;
pub use rustc::hir;
use session::{config, CompileResult};
use util::common::time;
-use syntax::codemap::Span;
use syntax::ast;
use syntax::abi::Abi;
+use syntax_pos::Span;
use std::cell::RefCell;
use util::nodemap::NodeMap;
use rustc::ty;
use std::cell::Cell;
-use syntax::codemap::Span;
+use syntax_pos::Span;
#[derive(Clone)]
pub struct ElisionFailureInfo {
rustc_back = { path = "../librustc_back" }
rustc_const_eval = { path = "../librustc_const_eval" }
rustc_driver = { path = "../librustc_driver" }
+rustc_errors = { path = "../librustc_errors" }
rustc_lint = { path = "../librustc_lint" }
rustc_metadata = { path = "../librustc_metadata" }
rustc_resolve = { path = "../librustc_resolve" }
rustc_trans = { path = "../librustc_trans" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
log = { path = "../liblog" }
[build-dependencies]
pub use self::FunctionRetTy::*;
pub use self::Visibility::*;
-use syntax;
use syntax::abi::Abi;
use syntax::ast;
use syntax::attr;
use syntax::attr::{AttributeMethods, AttrMetaMethods};
-use syntax::codemap;
-use syntax::codemap::{DUMMY_SP, Pos, Spanned};
+use syntax::codemap::Spanned;
use syntax::parse::token::{self, InternedString, keywords};
use syntax::ptr::P;
+use syntax_pos::{self, DUMMY_SP, Pos};
use rustc_trans::back::link;
use rustc::middle::cstore;
}
}
fn meta_item_list<'a>(&'a self) -> Option<&'a [P<ast::MetaItem>]> { None }
- fn span(&self) -> codemap::Span { unimplemented!() }
+ fn span(&self) -> syntax_pos::Span { unimplemented!() }
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)]
}
}
-impl Clean<Span> for syntax::codemap::Span {
+impl Clean<Span> for syntax_pos::Span {
fn clean(&self, cx: &DocContext) -> Span {
if *self == DUMMY_SP {
return Span::empty();
fn to_src(&self, cx: &DocContext) -> String;
}
-impl ToSource for syntax::codemap::Span {
+impl ToSource for syntax_pos::Span {
fn to_src(&self, cx: &DocContext) -> String {
debug!("converting span {:?} to snippet", self.clean(cx));
let sn = match cx.sess().codemap().span_to_snippet(*self) {
use rustc_resolve as resolve;
use rustc_metadata::cstore::CStore;
-use syntax::{ast, codemap, errors};
-use syntax::errors::emitter::ColorConfig;
+use syntax::{ast, codemap};
use syntax::feature_gate::UnstableFeatures;
use syntax::parse::token;
+use errors;
+use errors::emitter::ColorConfig;
use std::cell::{RefCell, Cell};
use std::collections::{HashMap, HashSet};
pub use self::StructType::*;
pub use self::TypeBound::*;
-use syntax;
-use syntax::codemap::Span;
use syntax::abi;
use syntax::ast;
use syntax::ast::{Name, NodeId};
use syntax::attr;
use syntax::ptr::P;
+use syntax_pos::{self, Span};
+
use rustc::hir;
pub struct Module {
vis: hir::Inherited,
stab: None,
depr: None,
- where_outer: syntax::codemap::DUMMY_SP,
- where_inner: syntax::codemap::DUMMY_SP,
+ where_outer: syntax_pos::DUMMY_SP,
+ where_inner: syntax_pos::DUMMY_SP,
attrs : hir::HirVec::new(),
extern_crates: Vec::new(),
imports : Vec::new(),
use std::io;
use std::io::prelude::*;
-use syntax::codemap::{CodeMap, Span};
+use syntax::codemap::CodeMap;
use syntax::parse::lexer::{self, Reader, TokenAndSpan};
use syntax::parse::token;
use syntax::parse;
+use syntax_pos::Span;
/// Highlights `src`, returning the HTML output.
pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>) -> String {
extern crate rustc_metadata;
extern crate serialize;
#[macro_use] extern crate syntax;
+extern crate syntax_pos;
extern crate test as testing;
extern crate rustc_unicode;
#[macro_use] extern crate log;
+extern crate rustc_errors as errors;
extern crate serialize as rustc_serialize; // used by deriving
use rustc_metadata::cstore::CStore;
use rustc_resolve::MakeGlobMap;
use syntax::codemap::CodeMap;
-use syntax::errors;
-use syntax::errors::emitter::ColorConfig;
+use errors;
+use errors::emitter::ColorConfig;
use syntax::parse::token;
use core;
let data = Arc::new(Mutex::new(Vec::new()));
let codemap = Rc::new(CodeMap::new());
let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()),
- None,
- codemap.clone());
+ None,
+ codemap.clone(),
+ errors::snippet::FormatMode::EnvironmentSelected);
let old = io::set_panic(box Sink(data.clone()));
let _bomb = Bomb(data.clone(), old.unwrap_or(box io::stdout()));
use syntax::ast;
use syntax::attr;
use syntax::attr::AttrMetaMethods;
-use syntax::codemap::Span;
+use syntax_pos::Span;
use rustc::hir::map as hir_map;
use rustc::hir::def::Def;
serialize = { path = "../libserialize" }
log = { path = "../liblog" }
rustc_bitflags = { path = "../librustc_bitflags" }
+syntax_pos = { path = "../libsyntax_pos" }
+rustc_errors = { path = "../librustc_errors" }
pub use self::UnsafeSource::*;
pub use self::ViewPath_::*;
pub use self::PathParameters::*;
+pub use util::ThinVec;
-use attr::{ThinAttributes, HasAttrs};
-use codemap::{mk_sp, respan, Span, Spanned, DUMMY_SP, ExpnId};
+use syntax_pos::{mk_sp, Span, DUMMY_SP, ExpnId};
+use codemap::{respan, Spanned};
use abi::Abi;
use errors;
-use ext::base;
-use ext::tt::macro_parser;
use parse::token::{self, keywords, InternedString};
-use parse::lexer;
-use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use print::pprust;
use ptr::P;
+use tokenstream::{TokenTree};
use std::fmt;
use std::rc::Rc;
-use std::borrow::Cow;
use std::hash::{Hash, Hasher};
use serialize::{Encodable, Decodable, Encoder, Decoder};
pub struct Block {
/// Statements in a block
pub stmts: Vec<Stmt>,
- /// An expression at the end of the block
- /// without a semicolon, if any
- pub expr: Option<P<Expr>>,
pub id: NodeId,
/// Distinguishes between `unsafe { ... }` and `{ ... }`
pub rules: BlockCheckMode,
PatKind::Range(_, _) |
PatKind::Ident(_, _, _) |
PatKind::Path(..) |
- PatKind::QPath(_, _) |
PatKind::Mac(_) => {
true
}
/// 0 <= position <= subpats.len()
TupleStruct(Path, Vec<P<Pat>>, Option<usize>),
- /// A path pattern.
- /// Such pattern can be resolved to a unit struct/variant or a constant.
- Path(Path),
-
- /// An associated const named using the qualified path `<T>::CONST` or
- /// `<T as Trait>::CONST`. Associated consts from inherent impls can be
- /// referred to as simply `T::CONST`, in which case they will end up as
- /// PatKind::Path, and the resolver will have to sort that out.
- QPath(QSelf, Path),
+ /// A possibly qualified path pattern.
+ /// Unquailfied path patterns `A::B::C` can legally refer to variants, structs, constants
+ /// or associated constants. Quailfied path patterns `<A>::B::C`/`<A as Trait>::B::C` can
+ /// only legally refer to associated constants.
+ Path(Option<QSelf>, Path),
/// A tuple pattern `(a, b)`.
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
}
/// A statement
-pub type Stmt = Spanned<StmtKind>;
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
+pub struct Stmt {
+ pub id: NodeId,
+ pub node: StmtKind,
+ pub span: Span,
+}
impl fmt::Debug for Stmt {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "stmt({}: {})",
- self.node.id()
- .map_or(Cow::Borrowed("<macro>"),|id|Cow::Owned(id.to_string())),
- pprust::stmt_to_string(self))
+ write!(f, "stmt({}: {})", self.id.to_string(), pprust::stmt_to_string(self))
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub enum StmtKind {
- /// Could be an item or a local (let) binding:
- Decl(P<Decl>, NodeId),
+ /// A local (let) binding.
+ Local(P<Local>),
- /// Expr without trailing semi-colon (must have unit type):
- Expr(P<Expr>, NodeId),
+ /// An item definition.
+ Item(P<Item>),
- /// Expr with trailing semi-colon (may have any type):
- Semi(P<Expr>, NodeId),
+ /// Expr without trailing semi-colon.
+ Expr(P<Expr>),
- Mac(P<Mac>, MacStmtStyle, ThinAttributes),
-}
-
-impl StmtKind {
- pub fn id(&self) -> Option<NodeId> {
- match *self {
- StmtKind::Decl(_, id) => Some(id),
- StmtKind::Expr(_, id) => Some(id),
- StmtKind::Semi(_, id) => Some(id),
- StmtKind::Mac(..) => None,
- }
- }
+ Semi(P<Expr>),
- pub fn attrs(&self) -> &[Attribute] {
- HasAttrs::attrs(self)
- }
+ Mac(P<(Mac, MacStmtStyle, ThinVec<Attribute>)>),
}
#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub init: Option<P<Expr>>,
pub id: NodeId,
pub span: Span,
- pub attrs: ThinAttributes,
-}
-
-impl Local {
- pub fn attrs(&self) -> &[Attribute] {
- HasAttrs::attrs(self)
- }
-}
-
-pub type Decl = Spanned<DeclKind>;
-
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
-pub enum DeclKind {
- /// A local (let) binding:
- Local(P<Local>),
- /// An item binding:
- Item(P<Item>),
-}
-
-impl Decl {
- pub fn attrs(&self) -> &[Attribute] {
- HasAttrs::attrs(self)
- }
+ pub attrs: ThinVec<Attribute>,
}
/// An arm of a 'match'.
pub id: NodeId,
pub node: ExprKind,
pub span: Span,
- pub attrs: ThinAttributes
-}
-
-impl Expr {
- pub fn attrs(&self) -> &[Attribute] {
- HasAttrs::attrs(self)
- }
+ pub attrs: ThinVec<Attribute>
}
impl fmt::Debug for Expr {
/// A `break`, with an optional label to break
Break(Option<SpannedIdent>),
/// A `continue`, with an optional label
- Again(Option<SpannedIdent>),
+ Continue(Option<SpannedIdent>),
/// A `return`, with an optional value to be returned
Ret(Option<P<Expr>>),
Ref,
}
-/// A delimited sequence of token trees
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
-pub struct Delimited {
- /// The type of delimiter
- pub delim: token::DelimToken,
- /// The span covering the opening delimiter
- pub open_span: Span,
- /// The delimited sequence of token trees
- pub tts: Vec<TokenTree>,
- /// The span covering the closing delimiter
- pub close_span: Span,
-}
-
-impl Delimited {
- /// Returns the opening delimiter as a token.
- pub fn open_token(&self) -> token::Token {
- token::OpenDelim(self.delim)
- }
-
- /// Returns the closing delimiter as a token.
- pub fn close_token(&self) -> token::Token {
- token::CloseDelim(self.delim)
- }
-
- /// Returns the opening delimiter as a token tree.
- pub fn open_tt(&self) -> TokenTree {
- TokenTree::Token(self.open_span, self.open_token())
- }
-
- /// Returns the closing delimiter as a token tree.
- pub fn close_tt(&self) -> TokenTree {
- TokenTree::Token(self.close_span, self.close_token())
- }
-}
-
-/// A sequence of token trees
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
-pub struct SequenceRepetition {
- /// The sequence of token trees
- pub tts: Vec<TokenTree>,
- /// The optional separator
- pub separator: Option<token::Token>,
- /// Whether the sequence can be repeated zero (*), or one or more times (+)
- pub op: KleeneOp,
- /// The number of `MatchNt`s that appear in the sequence (and subsequences)
- pub num_captures: usize,
-}
-
-/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
-/// for token sequences.
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
-pub enum KleeneOp {
- ZeroOrMore,
- OneOrMore,
-}
-
-/// When the main rust parser encounters a syntax-extension invocation, it
-/// parses the arguments to the invocation as a token-tree. This is a very
-/// loose structure, such that all sorts of different AST-fragments can
-/// be passed to syntax extensions using a uniform type.
-///
-/// If the syntax extension is an MBE macro, it will attempt to match its
-/// LHS token tree against the provided token tree, and if it finds a
-/// match, will transcribe the RHS token tree, splicing in any captured
-/// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
-///
-/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
-/// Nothing special happens to misnamed or misplaced `SubstNt`s.
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
-pub enum TokenTree {
- /// A single token
- Token(Span, token::Token),
- /// A delimited sequence of token trees
- Delimited(Span, Rc<Delimited>),
-
- // This only makes sense in MBE macros.
-
- /// A kleene-style repetition sequence with a span
- // FIXME(eddyb) #12938 Use DST.
- Sequence(Span, Rc<SequenceRepetition>),
-}
-
-impl TokenTree {
- pub fn len(&self) -> usize {
- match *self {
- TokenTree::Token(_, token::DocComment(name)) => {
- match doc_comment_style(&name.as_str()) {
- AttrStyle::Outer => 2,
- AttrStyle::Inner => 3
- }
- }
- TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
- TokenTree::Token(_, token::MatchNt(..)) => 3,
- TokenTree::Delimited(_, ref delimed) => {
- delimed.tts.len() + 2
- }
- TokenTree::Sequence(_, ref seq) => {
- seq.tts.len()
- }
- TokenTree::Token(..) => 0
- }
- }
-
- pub fn get_tt(&self, index: usize) -> TokenTree {
- match (self, index) {
- (&TokenTree::Token(sp, token::DocComment(_)), 0) => {
- TokenTree::Token(sp, token::Pound)
- }
- (&TokenTree::Token(sp, token::DocComment(name)), 1)
- if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
- TokenTree::Token(sp, token::Not)
- }
- (&TokenTree::Token(sp, token::DocComment(name)), _) => {
- let stripped = strip_doc_comment_decoration(&name.as_str());
-
- // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
- // required to wrap the text.
- let num_of_hashes = stripped.chars().scan(0, |cnt, x| {
- *cnt = if x == '"' {
- 1
- } else if *cnt != 0 && x == '#' {
- *cnt + 1
- } else {
- 0
- };
- Some(*cnt)
- }).max().unwrap_or(0);
-
- TokenTree::Delimited(sp, Rc::new(Delimited {
- delim: token::Bracket,
- open_span: sp,
- tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
- TokenTree::Token(sp, token::Eq),
- TokenTree::Token(sp, token::Literal(
- token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
- close_span: sp,
- }))
- }
- (&TokenTree::Delimited(_, ref delimed), _) => {
- if index == 0 {
- return delimed.open_tt();
- }
- if index == delimed.tts.len() + 1 {
- return delimed.close_tt();
- }
- delimed.tts[index - 1].clone()
- }
- (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
- let v = [TokenTree::Token(sp, token::Dollar),
- TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))];
- v[index].clone()
- }
- (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
- let v = [TokenTree::Token(sp, token::SubstNt(name)),
- TokenTree::Token(sp, token::Colon),
- TokenTree::Token(sp, token::Ident(kind))];
- v[index].clone()
- }
- (&TokenTree::Sequence(_, ref seq), _) => {
- seq.tts[index].clone()
- }
- _ => panic!("Cannot expand a token tree")
- }
- }
-
- /// Returns the `Span` corresponding to this token tree.
- pub fn get_span(&self) -> Span {
- match *self {
- TokenTree::Token(span, _) => span,
- TokenTree::Delimited(span, _) => span,
- TokenTree::Sequence(span, _) => span,
- }
- }
-
- /// Use this token tree as a matcher to parse given tts.
- pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
- -> macro_parser::NamedParseResult {
- // `None` is because we're not interpolating
- let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
- None,
- None,
- tts.iter().cloned().collect(),
- true);
- macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch)
- }
-}
-
pub type Mac = Spanned<Mac_>;
/// Represents a macro invocation. The Path indicates which macro
pub struct Mac_ {
pub path: Path,
pub tts: Vec<TokenTree>,
- pub ctxt: SyntaxContext,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
Const(P<Ty>, Option<P<Expr>>),
Method(MethodSig, Option<P<Block>>),
Type(TyParamBounds, Option<P<Ty>>),
+ Macro(Mac),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub span: Span,
}
-impl Item {
- pub fn attrs(&self) -> &[Attribute] {
- &self.attrs
- }
-}
-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ItemKind {
/// An`extern crate` item, with optional original crate name.
use ast;
use ast::{AttrId, Attribute, Attribute_, MetaItem, MetaItemKind};
-use ast::{Stmt, StmtKind, DeclKind};
-use ast::{Expr, Item, Local, Decl};
-use codemap::{Span, Spanned, spanned, dummy_spanned};
-use codemap::BytePos;
+use ast::{Expr, Item, Local, Stmt, StmtKind};
+use codemap::{spanned, dummy_spanned, Spanned};
+use syntax_pos::{Span, BytePos};
use errors::Handler;
use feature_gate::{Features, GatedCfg};
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use parse::token::InternedString;
use parse::{ParseSess, token};
use ptr::P;
+use util::ThinVec;
use std::cell::{RefCell, Cell};
use std::collections::HashSet;
}
}
-/// A list of attributes, behind a optional box as
-/// a space optimization.
-pub type ThinAttributes = Option<Box<Vec<Attribute>>>;
-
-pub trait ThinAttributesExt {
- fn map_thin_attrs<F>(self, f: F) -> Self
- where F: FnOnce(Vec<Attribute>) -> Vec<Attribute>;
- fn prepend(mut self, attrs: Self) -> Self;
- fn append(mut self, attrs: Self) -> Self;
- fn update<F>(&mut self, f: F)
- where Self: Sized,
- F: FnOnce(Self) -> Self;
- fn as_attr_slice(&self) -> &[Attribute];
- fn into_attr_vec(self) -> Vec<Attribute>;
-}
-
-impl ThinAttributesExt for ThinAttributes {
- fn map_thin_attrs<F>(self, f: F) -> Self
- where F: FnOnce(Vec<Attribute>) -> Vec<Attribute>
- {
- f(self.map(|b| *b).unwrap_or(Vec::new())).into_thin_attrs()
- }
-
- fn prepend(self, attrs: ThinAttributes) -> Self {
- attrs.map_thin_attrs(|mut attrs| {
- attrs.extend(self.into_attr_vec());
- attrs
- })
- }
-
- fn append(self, attrs: ThinAttributes) -> Self {
- self.map_thin_attrs(|mut self_| {
- self_.extend(attrs.into_attr_vec());
- self_
- })
- }
-
- fn update<F>(&mut self, f: F)
- where Self: Sized,
- F: FnOnce(ThinAttributes) -> ThinAttributes
- {
- let self_ = f(self.take());
- *self = self_;
- }
-
- fn as_attr_slice(&self) -> &[Attribute] {
- match *self {
- Some(ref b) => b,
- None => &[],
- }
- }
-
- fn into_attr_vec(self) -> Vec<Attribute> {
- match self {
- Some(b) => *b,
- None => Vec::new(),
- }
- }
-}
-
-pub trait AttributesExt {
- fn into_thin_attrs(self) -> ThinAttributes;
-}
-
-impl AttributesExt for Vec<Attribute> {
- fn into_thin_attrs(self) -> ThinAttributes {
- if self.len() == 0 {
- None
- } else {
- Some(Box::new(self))
- }
- }
-}
-
pub trait HasAttrs: Sized {
fn attrs(&self) -> &[ast::Attribute];
fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F) -> Self;
}
-/// A cheap way to add Attributes to an AST node.
-pub trait WithAttrs {
- // FIXME: Could be extended to anything IntoIter<Item=Attribute>
- fn with_attrs(self, attrs: ThinAttributes) -> Self;
-}
-
-impl<T: HasAttrs> WithAttrs for T {
- fn with_attrs(self, attrs: ThinAttributes) -> Self {
- self.map_attrs(|mut orig_attrs| {
- orig_attrs.extend(attrs.into_attr_vec());
- orig_attrs
- })
- }
-}
-
impl HasAttrs for Vec<Attribute> {
fn attrs(&self) -> &[Attribute] {
&self
}
}
-impl HasAttrs for ThinAttributes {
+impl HasAttrs for ThinVec<Attribute> {
fn attrs(&self) -> &[Attribute] {
- self.as_attr_slice()
+ &self
}
fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
- self.map_thin_attrs(f)
+ f(self.into()).into()
}
}
}
}
-impl HasAttrs for DeclKind {
- fn attrs(&self) -> &[Attribute] {
- match *self {
- DeclKind::Local(ref local) => local.attrs(),
- DeclKind::Item(ref item) => item.attrs(),
- }
- }
-
- fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
- match self {
- DeclKind::Local(local) => DeclKind::Local(local.map_attrs(f)),
- DeclKind::Item(item) => DeclKind::Item(item.map_attrs(f)),
- }
- }
-}
-
impl HasAttrs for StmtKind {
fn attrs(&self) -> &[Attribute] {
match *self {
- StmtKind::Decl(ref decl, _) => decl.attrs(),
- StmtKind::Expr(ref expr, _) | StmtKind::Semi(ref expr, _) => expr.attrs(),
- StmtKind::Mac(_, _, ref attrs) => attrs.attrs(),
+ StmtKind::Local(ref local) => local.attrs(),
+ StmtKind::Item(ref item) => item.attrs(),
+ StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.attrs(),
+ StmtKind::Mac(ref mac) => {
+ let (_, _, ref attrs) = **mac;
+ attrs.attrs()
+ }
}
}
fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
match self {
- StmtKind::Decl(decl, id) => StmtKind::Decl(decl.map_attrs(f), id),
- StmtKind::Expr(expr, id) => StmtKind::Expr(expr.map_attrs(f), id),
- StmtKind::Semi(expr, id) => StmtKind::Semi(expr.map_attrs(f), id),
- StmtKind::Mac(mac, style, attrs) =>
- StmtKind::Mac(mac, style, attrs.map_attrs(f)),
+ StmtKind::Local(local) => StmtKind::Local(local.map_attrs(f)),
+ StmtKind::Item(item) => StmtKind::Item(item.map_attrs(f)),
+ StmtKind::Expr(expr) => StmtKind::Expr(expr.map_attrs(f)),
+ StmtKind::Semi(expr) => StmtKind::Semi(expr.map_attrs(f)),
+ StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, style, attrs)| {
+ (mac, style, attrs.map_attrs(f))
+ })),
}
}
}
Item, Expr, Local, ast::ForeignItem, ast::StructField, ast::ImplItem, ast::TraitItem, ast::Arm
}
-derive_has_attrs_from_field! { Decl: .node, Stmt: .node, ast::Variant: .node.attrs }
+derive_has_attrs_from_field! { Stmt: .node, ast::Variant: .node.attrs }
pub use self::ExpnFormat::*;
-use std::cell::{Cell, RefCell};
-use std::ops::{Add, Sub};
+use std::cell::RefCell;
use std::path::{Path,PathBuf};
use std::rc::Rc;
-use std::cmp;
use std::env;
-use std::{fmt, fs};
+use std::fs;
use std::io::{self, Read};
-
-use serialize::{Encodable, Decodable, Encoder, Decoder};
+pub use syntax_pos::*;
+use errors::CodeMapper;
use ast::Name;
-// _____________________________________________________________________________
-// Pos, BytePos, CharPos
-//
-
-pub trait Pos {
- fn from_usize(n: usize) -> Self;
- fn to_usize(&self) -> usize;
-}
-
-/// A byte offset. Keep this small (currently 32-bits), as AST contains
-/// a lot of them.
-#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
-pub struct BytePos(pub u32);
-
-/// A character offset. Because of multibyte utf8 characters, a byte offset
-/// is not equivalent to a character offset. The CodeMap will convert BytePos
-/// values to CharPos values as necessary.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
-pub struct CharPos(pub usize);
-
-// FIXME: Lots of boilerplate in these impls, but so far my attempts to fix
-// have been unsuccessful
-
-impl Pos for BytePos {
- fn from_usize(n: usize) -> BytePos { BytePos(n as u32) }
- fn to_usize(&self) -> usize { let BytePos(n) = *self; n as usize }
-}
-
-impl Add for BytePos {
- type Output = BytePos;
-
- fn add(self, rhs: BytePos) -> BytePos {
- BytePos((self.to_usize() + rhs.to_usize()) as u32)
- }
-}
-
-impl Sub for BytePos {
- type Output = BytePos;
-
- fn sub(self, rhs: BytePos) -> BytePos {
- BytePos((self.to_usize() - rhs.to_usize()) as u32)
- }
-}
-
-impl Encodable for BytePos {
- fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- s.emit_u32(self.0)
- }
-}
-
-impl Decodable for BytePos {
- fn decode<D: Decoder>(d: &mut D) -> Result<BytePos, D::Error> {
- Ok(BytePos(d.read_u32()?))
- }
-}
-
-impl Pos for CharPos {
- fn from_usize(n: usize) -> CharPos { CharPos(n) }
- fn to_usize(&self) -> usize { let CharPos(n) = *self; n }
-}
-
-impl Add for CharPos {
- type Output = CharPos;
-
- fn add(self, rhs: CharPos) -> CharPos {
- CharPos(self.to_usize() + rhs.to_usize())
- }
-}
-
-impl Sub for CharPos {
- type Output = CharPos;
-
- fn sub(self, rhs: CharPos) -> CharPos {
- CharPos(self.to_usize() - rhs.to_usize())
- }
-}
-
-// _____________________________________________________________________________
-// Span, MultiSpan, Spanned
-//
-
-/// Spans represent a region of code, used for error reporting. Positions in spans
-/// are *absolute* positions from the beginning of the codemap, not positions
-/// relative to FileMaps. Methods on the CodeMap can be used to relate spans back
-/// to the original source.
-/// You must be careful if the span crosses more than one file - you will not be
-/// able to use many of the functions on spans in codemap and you cannot assume
-/// that the length of the span = hi - lo; there may be space in the BytePos
-/// range between files.
-#[derive(Clone, Copy, Hash, PartialEq, Eq)]
-pub struct Span {
- pub lo: BytePos,
- pub hi: BytePos,
- /// Information about where the macro came from, if this piece of
- /// code was created by a macro expansion.
- pub expn_id: ExpnId
-}
-
-/// A collection of spans. Spans have two orthogonal attributes:
-///
-/// - they can be *primary spans*. In this case they are the locus of
-/// the error, and would be rendered with `^^^`.
-/// - they can have a *label*. In this case, the label is written next
-/// to the mark in the snippet when we render.
-#[derive(Clone)]
-pub struct MultiSpan {
- primary_spans: Vec<Span>,
- span_labels: Vec<(Span, String)>,
-}
-
-#[derive(Clone, Debug)]
-pub struct SpanLabel {
- /// The span we are going to include in the final snippet.
- pub span: Span,
-
- /// Is this a primary span? This is the "locus" of the message,
- /// and is indicated with a `^^^^` underline, versus `----`.
- pub is_primary: bool,
-
- /// What label should we attach to this span (if any)?
- pub label: Option<String>,
-}
-
-pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION };
-
-// Generic span to be used for code originating from the command line
-pub const COMMAND_LINE_SP: Span = Span { lo: BytePos(0),
- hi: BytePos(0),
- expn_id: COMMAND_LINE_EXPN };
-
-impl Span {
- /// Returns a new span representing just the end-point of this span
- pub fn end_point(self) -> Span {
- let lo = cmp::max(self.hi.0 - 1, self.lo.0);
- Span { lo: BytePos(lo), hi: self.hi, expn_id: self.expn_id}
- }
-
- /// Returns `self` if `self` is not the dummy span, and `other` otherwise.
- pub fn substitute_dummy(self, other: Span) -> Span {
- if self.source_equal(&DUMMY_SP) { other } else { self }
- }
-
- pub fn contains(self, other: Span) -> bool {
- self.lo <= other.lo && other.hi <= self.hi
- }
-
- /// Return true if the spans are equal with regards to the source text.
- ///
- /// Use this instead of `==` when either span could be generated code,
- /// and you only care that they point to the same bytes of source text.
- pub fn source_equal(&self, other: &Span) -> bool {
- self.lo == other.lo && self.hi == other.hi
- }
-
- /// Returns `Some(span)`, a union of `self` and `other`, on overlap.
- pub fn merge(self, other: Span) -> Option<Span> {
- if self.expn_id != other.expn_id {
- return None;
- }
-
- if (self.lo <= other.lo && self.hi > other.lo) ||
- (self.lo >= other.lo && self.lo < other.hi) {
- Some(Span {
- lo: cmp::min(self.lo, other.lo),
- hi: cmp::max(self.hi, other.hi),
- expn_id: self.expn_id,
- })
- } else {
- None
- }
- }
-
- /// Returns `Some(span)`, where the start is trimmed by the end of `other`
- pub fn trim_start(self, other: Span) -> Option<Span> {
- if self.hi > other.hi {
- Some(Span { lo: cmp::max(self.lo, other.hi), .. self })
- } else {
- None
- }
- }
-}
-
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
-pub struct Spanned<T> {
- pub node: T,
- pub span: Span,
-}
-
-impl Encodable for Span {
- fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- s.emit_struct("Span", 2, |s| {
- s.emit_struct_field("lo", 0, |s| {
- self.lo.encode(s)
- })?;
-
- s.emit_struct_field("hi", 1, |s| {
- self.hi.encode(s)
- })
- })
- }
-}
-
-impl Decodable for Span {
- fn decode<D: Decoder>(d: &mut D) -> Result<Span, D::Error> {
- d.read_struct("Span", 2, |d| {
- let lo = d.read_struct_field("lo", 0, |d| {
- BytePos::decode(d)
- })?;
-
- let hi = d.read_struct_field("hi", 1, |d| {
- BytePos::decode(d)
- })?;
-
- Ok(mk_sp(lo, hi))
- })
- }
-}
-
-fn default_span_debug(span: Span, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "Span {{ lo: {:?}, hi: {:?}, expn_id: {:?} }}",
- span.lo, span.hi, span.expn_id)
-}
-
-thread_local!(pub static SPAN_DEBUG: Cell<fn(Span, &mut fmt::Formatter) -> fmt::Result> =
- Cell::new(default_span_debug));
-
-impl fmt::Debug for Span {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- SPAN_DEBUG.with(|span_debug| span_debug.get()(*self, f))
- }
-}
-
-pub fn spanned<T>(lo: BytePos, hi: BytePos, t: T) -> Spanned<T> {
- respan(mk_sp(lo, hi), t)
-}
-
-pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
- Spanned {node: t, span: sp}
-}
-
-pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
- respan(DUMMY_SP, t)
-}
-
-/* assuming that we're not in macro expansion */
-pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
- Span {lo: lo, hi: hi, expn_id: NO_EXPANSION}
-}
-
/// Return the span itself if it doesn't come from a macro expansion,
/// otherwise return the call site span up to the `enclosing_sp` by
/// following the `expn_info` chain.
}
}
-impl MultiSpan {
- pub fn new() -> MultiSpan {
- MultiSpan {
- primary_spans: vec![],
- span_labels: vec![]
- }
- }
-
- pub fn from_span(primary_span: Span) -> MultiSpan {
- MultiSpan {
- primary_spans: vec![primary_span],
- span_labels: vec![]
- }
- }
-
- pub fn from_spans(vec: Vec<Span>) -> MultiSpan {
- MultiSpan {
- primary_spans: vec,
- span_labels: vec![]
- }
- }
-
- pub fn push_span_label(&mut self, span: Span, label: String) {
- self.span_labels.push((span, label));
- }
-
- /// Selects the first primary span (if any)
- pub fn primary_span(&self) -> Option<Span> {
- self.primary_spans.first().cloned()
- }
-
- /// Returns all primary spans.
- pub fn primary_spans(&self) -> &[Span] {
- &self.primary_spans
- }
-
- /// Returns the strings to highlight. We always ensure that there
- /// is an entry for each of the primary spans -- for each primary
- /// span P, if there is at least one label with span P, we return
- /// those labels (marked as primary). But otherwise we return
- /// `SpanLabel` instances with empty labels.
- pub fn span_labels(&self) -> Vec<SpanLabel> {
- let is_primary = |span| self.primary_spans.contains(&span);
- let mut span_labels = vec![];
-
- for &(span, ref label) in &self.span_labels {
- span_labels.push(SpanLabel {
- span: span,
- is_primary: is_primary(span),
- label: Some(label.clone())
- });
- }
-
- for &span in &self.primary_spans {
- if !span_labels.iter().any(|sl| sl.span == span) {
- span_labels.push(SpanLabel {
- span: span,
- is_primary: true,
- label: None
- });
- }
- }
-
- span_labels
- }
+/// The source of expansion.
+#[derive(Clone, Hash, Debug, PartialEq, Eq)]
+pub enum ExpnFormat {
+ /// e.g. #[derive(...)] <item>
+ MacroAttribute(Name),
+ /// e.g. `format!()`
+ MacroBang(Name),
}
-impl From<Span> for MultiSpan {
- fn from(span: Span) -> MultiSpan {
- MultiSpan::from_span(span)
- }
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
+pub struct Spanned<T> {
+ pub node: T,
+ pub span: Span,
}
-// _____________________________________________________________________________
-// Loc, LocWithOpt, FileMapAndLine, FileMapAndBytePos
-//
-
-/// A source code location used for error reporting
-#[derive(Debug)]
-pub struct Loc {
- /// Information about the original source
- pub file: Rc<FileMap>,
- /// The (1-based) line number
- pub line: usize,
- /// The (0-based) column offset
- pub col: CharPos
+pub fn spanned<T>(lo: BytePos, hi: BytePos, t: T) -> Spanned<T> {
+ respan(mk_sp(lo, hi), t)
}
-/// A source code location used as the result of lookup_char_pos_adj
-// Actually, *none* of the clients use the filename *or* file field;
-// perhaps they should just be removed.
-#[derive(Debug)]
-pub struct LocWithOpt {
- pub filename: FileName,
- pub line: usize,
- pub col: CharPos,
- pub file: Option<Rc<FileMap>>,
+pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
+ Spanned {node: t, span: sp}
}
-// used to be structural records. Better names, anyone?
-#[derive(Debug)]
-pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: usize }
-#[derive(Debug)]
-pub struct FileMapAndBytePos { pub fm: Rc<FileMap>, pub pos: BytePos }
-
-
-// _____________________________________________________________________________
-// ExpnFormat, NameAndSpan, ExpnInfo, ExpnId
-//
-
-/// The source of expansion.
-#[derive(Clone, Hash, Debug, PartialEq, Eq)]
-pub enum ExpnFormat {
- /// e.g. #[derive(...)] <item>
- MacroAttribute(Name),
- /// e.g. `format!()`
- MacroBang(Name),
+pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
+ respan(DUMMY_SP, t)
}
#[derive(Clone, Hash, Debug)]
pub callee: NameAndSpan
}
-#[derive(PartialEq, Eq, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Copy)]
-pub struct ExpnId(u32);
-
-pub const NO_EXPANSION: ExpnId = ExpnId(!0);
-// For code appearing from the command line
-pub const COMMAND_LINE_EXPN: ExpnId = ExpnId(!1);
-
-impl ExpnId {
- pub fn from_u32(id: u32) -> ExpnId {
- ExpnId(id)
- }
-
- pub fn into_u32(self) -> u32 {
- self.0
- }
-}
-
// _____________________________________________________________________________
// FileMap, MultiByteChar, FileName, FileLines
//
-pub type FileName = String;
-
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub struct LineInfo {
- /// Index of line, starting from 0.
- pub line_index: usize,
-
- /// Column in line where span begins, starting from 0.
- pub start_col: CharPos,
-
- /// Column in line where span ends, starting from 0, exclusive.
- pub end_col: CharPos,
-}
-
-pub struct FileLines {
- pub file: Rc<FileMap>,
- pub lines: Vec<LineInfo>
-}
-
-/// Identifies an offset of a multi-byte character in a FileMap
-#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)]
-pub struct MultiByteChar {
- /// The absolute offset of the character in the CodeMap
- pub pos: BytePos,
- /// The number of bytes, >=2
- pub bytes: usize,
-}
-
-/// A single source in the CodeMap.
-pub struct FileMap {
- /// The name of the file that the source came from, source that doesn't
- /// originate from files has names between angle brackets by convention,
- /// e.g. `<anon>`
- pub name: FileName,
- /// The absolute path of the file that the source came from.
- pub abs_path: Option<FileName>,
- /// The complete source code
- pub src: Option<Rc<String>>,
- /// The start position of this source in the CodeMap
- pub start_pos: BytePos,
- /// The end position of this source in the CodeMap
- pub end_pos: BytePos,
- /// Locations of lines beginnings in the source code
- pub lines: RefCell<Vec<BytePos>>,
- /// Locations of multi-byte characters in the source code
- pub multibyte_chars: RefCell<Vec<MultiByteChar>>,
-}
-
-impl Encodable for FileMap {
- fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- s.emit_struct("FileMap", 6, |s| {
- s.emit_struct_field("name", 0, |s| self.name.encode(s))?;
- s.emit_struct_field("abs_path", 1, |s| self.abs_path.encode(s))?;
- s.emit_struct_field("start_pos", 2, |s| self.start_pos.encode(s))?;
- s.emit_struct_field("end_pos", 3, |s| self.end_pos.encode(s))?;
- s.emit_struct_field("lines", 4, |s| {
- let lines = self.lines.borrow();
- // store the length
- s.emit_u32(lines.len() as u32)?;
-
- if !lines.is_empty() {
- // In order to preserve some space, we exploit the fact that
- // the lines list is sorted and individual lines are
- // probably not that long. Because of that we can store lines
- // as a difference list, using as little space as possible
- // for the differences.
- let max_line_length = if lines.len() == 1 {
- 0
- } else {
- lines.windows(2)
- .map(|w| w[1] - w[0])
- .map(|bp| bp.to_usize())
- .max()
- .unwrap()
- };
-
- let bytes_per_diff: u8 = match max_line_length {
- 0 ... 0xFF => 1,
- 0x100 ... 0xFFFF => 2,
- _ => 4
- };
-
- // Encode the number of bytes used per diff.
- bytes_per_diff.encode(s)?;
-
- // Encode the first element.
- lines[0].encode(s)?;
-
- let diff_iter = (&lines[..]).windows(2)
- .map(|w| (w[1] - w[0]));
-
- match bytes_per_diff {
- 1 => for diff in diff_iter { (diff.0 as u8).encode(s)? },
- 2 => for diff in diff_iter { (diff.0 as u16).encode(s)? },
- 4 => for diff in diff_iter { diff.0.encode(s)? },
- _ => unreachable!()
- }
- }
-
- Ok(())
- })?;
- s.emit_struct_field("multibyte_chars", 5, |s| {
- (*self.multibyte_chars.borrow()).encode(s)
- })
- })
- }
-}
-
-impl Decodable for FileMap {
- fn decode<D: Decoder>(d: &mut D) -> Result<FileMap, D::Error> {
-
- d.read_struct("FileMap", 6, |d| {
- let name: String = d.read_struct_field("name", 0, |d| Decodable::decode(d))?;
- let abs_path: Option<String> =
- d.read_struct_field("abs_path", 1, |d| Decodable::decode(d))?;
- let start_pos: BytePos = d.read_struct_field("start_pos", 2, |d| Decodable::decode(d))?;
- let end_pos: BytePos = d.read_struct_field("end_pos", 3, |d| Decodable::decode(d))?;
- let lines: Vec<BytePos> = d.read_struct_field("lines", 4, |d| {
- let num_lines: u32 = Decodable::decode(d)?;
- let mut lines = Vec::with_capacity(num_lines as usize);
-
- if num_lines > 0 {
- // Read the number of bytes used per diff.
- let bytes_per_diff: u8 = Decodable::decode(d)?;
-
- // Read the first element.
- let mut line_start: BytePos = Decodable::decode(d)?;
- lines.push(line_start);
-
- for _ in 1..num_lines {
- let diff = match bytes_per_diff {
- 1 => d.read_u8()? as u32,
- 2 => d.read_u16()? as u32,
- 4 => d.read_u32()?,
- _ => unreachable!()
- };
-
- line_start = line_start + BytePos(diff);
-
- lines.push(line_start);
- }
- }
-
- Ok(lines)
- })?;
- let multibyte_chars: Vec<MultiByteChar> =
- d.read_struct_field("multibyte_chars", 5, |d| Decodable::decode(d))?;
- Ok(FileMap {
- name: name,
- abs_path: abs_path,
- start_pos: start_pos,
- end_pos: end_pos,
- src: None,
- lines: RefCell::new(lines),
- multibyte_chars: RefCell::new(multibyte_chars)
- })
- })
- }
-}
-
-impl fmt::Debug for FileMap {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- write!(fmt, "FileMap({})", self.name)
- }
-}
-
-impl FileMap {
- /// EFFECT: register a start-of-line offset in the
- /// table of line-beginnings.
- /// UNCHECKED INVARIANT: these offsets must be added in the right
- /// order and must be in the right places; there is shared knowledge
- /// about what ends a line between this file and parse.rs
- /// WARNING: pos param here is the offset relative to start of CodeMap,
- /// and CodeMap will append a newline when adding a filemap without a newline at the end,
- /// so the safe way to call this is with value calculated as
- /// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap.
- pub fn next_line(&self, pos: BytePos) {
- // the new charpos must be > the last one (or it's the first one).
- let mut lines = self.lines.borrow_mut();
- let line_len = lines.len();
- assert!(line_len == 0 || ((*lines)[line_len - 1] < pos));
- lines.push(pos);
- }
-
- /// get a line from the list of pre-computed line-beginnings.
- /// line-number here is 0-based.
- pub fn get_line(&self, line_number: usize) -> Option<&str> {
- match self.src {
- Some(ref src) => {
- let lines = self.lines.borrow();
- lines.get(line_number).map(|&line| {
- let begin: BytePos = line - self.start_pos;
- let begin = begin.to_usize();
- // We can't use `lines.get(line_number+1)` because we might
- // be parsing when we call this function and thus the current
- // line is the last one we have line info for.
- let slice = &src[begin..];
- match slice.find('\n') {
- Some(e) => &slice[..e],
- None => slice
- }
- })
- }
- None => None
- }
- }
-
- pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) {
- assert!(bytes >=2 && bytes <= 4);
- let mbc = MultiByteChar {
- pos: pos,
- bytes: bytes,
- };
- self.multibyte_chars.borrow_mut().push(mbc);
- }
-
- pub fn is_real_file(&self) -> bool {
- !(self.name.starts_with("<") &&
- self.name.ends_with(">"))
- }
-
- pub fn is_imported(&self) -> bool {
- self.src.is_none()
- }
-
- fn count_lines(&self) -> usize {
- self.lines.borrow().len()
- }
-}
-
/// An abstraction over the fs operations used by the Parser.
pub trait FileLoader {
/// Query the existence of a file.
}
}
-pub struct MacroBacktrace {
- /// span where macro was applied to generate this code
- pub call_site: Span,
-
- /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]")
- pub macro_decl_name: String,
-
- /// span where macro was defined (if known)
- pub def_site_span: Option<Span>,
-}
-
-// _____________________________________________________________________________
-// SpanLinesError, SpanSnippetError, DistinctSources, MalformedCodemapPositions
-//
-
-pub type FileLinesResult = Result<FileLines, SpanLinesError>;
-
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub enum SpanLinesError {
- IllFormedSpan(Span),
- DistinctSources(DistinctSources),
-}
-
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub enum SpanSnippetError {
- IllFormedSpan(Span),
- DistinctSources(DistinctSources),
- MalformedForCodemap(MalformedCodemapPositions),
- SourceNotAvailable { filename: String }
-}
-
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub struct DistinctSources {
- begin: (String, BytePos),
- end: (String, BytePos)
-}
-
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub struct MalformedCodemapPositions {
- name: String,
- source_len: usize,
- begin_pos: BytePos,
- end_pos: BytePos
+impl CodeMapper for CodeMap {
+ fn lookup_char_pos(&self, pos: BytePos) -> Loc {
+ self.lookup_char_pos(pos)
+ }
+ fn span_to_lines(&self, sp: Span) -> FileLinesResult {
+ self.span_to_lines(sp)
+ }
+ fn span_to_string(&self, sp: Span) -> String {
+ self.span_to_string(sp)
+ }
+ fn span_to_filename(&self, sp: Span) -> FileName {
+ self.span_to_filename(sp)
+ }
+ fn macro_backtrace(&self, span: Span) -> Vec<MacroBacktrace> {
+ self.macro_backtrace(span)
+ }
}
-
// _____________________________________________________________________________
// Tests
//
#[cfg(test)]
mod tests {
use super::*;
+ use errors::{Level, CodeSuggestion};
+ use errors::emitter::EmitterWriter;
+ use errors::snippet::{SnippetData, RenderedLine, FormatMode};
+ use std::sync::{Arc, Mutex};
+ use std::io::{self, Write};
+ use std::str::from_utf8;
+ use std::rc::Rc;
#[test]
fn t1 () {
blork.rs:1:1: 1:12\n `first line.`\n");
}
+ /// Returns the span corresponding to the `n`th occurrence of
+ /// `substring` in `source_text`.
+ trait CodeMapExtension {
+ fn span_substr(&self,
+ file: &Rc<FileMap>,
+ source_text: &str,
+ substring: &str,
+ n: usize)
+ -> Span;
+ }
+
+ impl CodeMapExtension for CodeMap {
+ fn span_substr(&self,
+ file: &Rc<FileMap>,
+ source_text: &str,
+ substring: &str,
+ n: usize)
+ -> Span
+ {
+ println!("span_substr(file={:?}/{:?}, substring={:?}, n={})",
+ file.name, file.start_pos, substring, n);
+ let mut i = 0;
+ let mut hi = 0;
+ loop {
+ let offset = source_text[hi..].find(substring).unwrap_or_else(|| {
+ panic!("source_text `{}` does not have {} occurrences of `{}`, only {}",
+ source_text, n, substring, i);
+ });
+ let lo = hi + offset;
+ hi = lo + substring.len();
+ if i == n {
+ let span = Span {
+ lo: BytePos(lo as u32 + file.start_pos.0),
+ hi: BytePos(hi as u32 + file.start_pos.0),
+ expn_id: NO_EXPANSION,
+ };
+ assert_eq!(&self.span_to_snippet(span).unwrap()[..],
+ substring);
+ return span;
+ }
+ i += 1;
+ }
+ }
+ }
+
+ fn splice(start: Span, end: Span) -> Span {
+ Span {
+ lo: start.lo,
+ hi: end.hi,
+ expn_id: NO_EXPANSION,
+ }
+ }
+
+ fn make_string(lines: &[RenderedLine]) -> String {
+ lines.iter()
+ .flat_map(|rl| {
+ rl.text.iter()
+ .map(|s| &s.text[..])
+ .chain(Some("\n"))
+ })
+ .collect()
+ }
+
fn init_expansion_chain(cm: &CodeMap) -> Span {
// Creates an expansion chain containing two recursive calls
// root -> expA -> expA -> expB -> expB -> end
";
assert_eq!(sstr, res_str);
}
+
+ struct Sink(Arc<Mutex<Vec<u8>>>);
+ impl Write for Sink {
+ fn write(&mut self, data: &[u8]) -> io::Result<usize> {
+ Write::write(&mut *self.0.lock().unwrap(), data)
+ }
+ fn flush(&mut self) -> io::Result<()> { Ok(()) }
+ }
+
+ // Diagnostic doesn't align properly in span where line number increases by one digit
+ #[test]
+ fn test_hilight_suggestion_issue_11715() {
+ let data = Arc::new(Mutex::new(Vec::new()));
+ let cm = Rc::new(CodeMap::new());
+ let mut ew = EmitterWriter::new(Box::new(Sink(data.clone())),
+ None,
+ cm.clone(),
+ FormatMode::NewErrorFormat);
+ let content = "abcdefg
+ koksi
+ line3
+ line4
+ cinq
+ line6
+ line7
+ line8
+ line9
+ line10
+ e-lä-vän
+ tolv
+ dreizehn
+ ";
+ let file = cm.new_filemap_and_lines("dummy.txt", None, content);
+ let start = file.lines.borrow()[10];
+ let end = file.lines.borrow()[11];
+ let sp = mk_sp(start, end);
+ let lvl = Level::Error;
+ println!("highlight_lines");
+ ew.highlight_lines(&sp.into(), lvl).unwrap();
+ println!("done");
+ let vec = data.lock().unwrap().clone();
+ let vec: &[u8] = &vec;
+ let str = from_utf8(vec).unwrap();
+ println!("r#\"\n{}\"#", str);
+ assert_eq!(str, &r#"
+ --> dummy.txt:11:1
+ |>
+11 |> e-lä-vän
+ |> ^
+"#[1..]);
+ }
+
+ #[test]
+ fn test_single_span_splice() {
+ // Test that a `MultiSpan` containing a single span splices a substition correctly
+ let cm = CodeMap::new();
+ let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
+ let selection = " \n ~~\n~~~\n~~~~~ \n \n";
+ cm.new_filemap_and_lines("blork.rs", None, inputtext);
+ let sp = span_from_selection(inputtext, selection);
+ let msp: MultiSpan = sp.into();
+
+ // check that we are extracting the text we thought we were extracting
+ assert_eq!(&cm.span_to_snippet(sp).unwrap(), "BB\nCCC\nDDDDD");
+
+ let substitute = "ZZZZZZ".to_owned();
+ let expected = "bbbbZZZZZZddddd";
+ let suggest = CodeSuggestion {
+ msp: msp,
+ substitutes: vec![substitute],
+ };
+ assert_eq!(suggest.splice_lines(&cm), expected);
+ }
+
+ #[test]
+ fn test_multi_span_splice() {
+ // Test that a `MultiSpan` containing multiple spans splices a substition correctly
+ let cm = CodeMap::new();
+ let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
+ let selection1 = " \n \n \n \n ~ \n"; // intentionally out of order
+ let selection2 = " \n ~~\n~~~\n~~~~~ \n \n";
+ cm.new_filemap_and_lines("blork.rs", None, inputtext);
+ let sp1 = span_from_selection(inputtext, selection1);
+ let sp2 = span_from_selection(inputtext, selection2);
+ let msp: MultiSpan = MultiSpan::from_spans(vec![sp1, sp2]);
+
+ let expected = "bbbbZZZZZZddddd\neXYZe";
+ let suggest = CodeSuggestion {
+ msp: msp,
+ substitutes: vec!["ZZZZZZ".to_owned(),
+ "XYZ".to_owned()]
+ };
+
+ assert_eq!(suggest.splice_lines(&cm), expected);
+ }
+
+ #[test]
+ fn test_multispan_highlight() {
+ let data = Arc::new(Mutex::new(Vec::new()));
+ let cm = Rc::new(CodeMap::new());
+ let mut diag = EmitterWriter::new(Box::new(Sink(data.clone())),
+ None,
+ cm.clone(),
+ FormatMode::NewErrorFormat);
+
+ let inp = "_____aaaaaa____bbbbbb__cccccdd_";
+ let sp1 = " ~~~~~~ ";
+ let sp2 = " ~~~~~~ ";
+ let sp3 = " ~~~~~ ";
+ let sp4 = " ~~~~ ";
+ let sp34 = " ~~~~~~~ ";
+
+ let expect_start = &r#"
+ --> dummy.txt:1:6
+ |>
+1 |> _____aaaaaa____bbbbbb__cccccdd_
+ |> ^^^^^^ ^^^^^^ ^^^^^^^
+"#[1..];
+
+ let span = |sp, expected| {
+ let sp = span_from_selection(inp, sp);
+ assert_eq!(&cm.span_to_snippet(sp).unwrap(), expected);
+ sp
+ };
+ cm.new_filemap_and_lines("dummy.txt", None, inp);
+ let sp1 = span(sp1, "aaaaaa");
+ let sp2 = span(sp2, "bbbbbb");
+ let sp3 = span(sp3, "ccccc");
+ let sp4 = span(sp4, "ccdd");
+ let sp34 = span(sp34, "cccccdd");
+
+ let spans = vec![sp1, sp2, sp3, sp4];
+
+ let test = |expected, highlight: &mut FnMut()| {
+ data.lock().unwrap().clear();
+ highlight();
+ let vec = data.lock().unwrap().clone();
+ let actual = from_utf8(&vec[..]).unwrap();
+ println!("actual=\n{}", actual);
+ assert_eq!(actual, expected);
+ };
+
+ let msp = MultiSpan::from_spans(vec![sp1, sp2, sp34]);
+ test(expect_start, &mut || {
+ diag.highlight_lines(&msp, Level::Error).unwrap();
+ });
+ test(expect_start, &mut || {
+ let msp = MultiSpan::from_spans(spans.clone());
+ diag.highlight_lines(&msp, Level::Error).unwrap();
+ });
+ }
+
+ #[test]
+ fn test_huge_multispan_highlight() {
+ let data = Arc::new(Mutex::new(Vec::new()));
+ let cm = Rc::new(CodeMap::new());
+ let mut diag = EmitterWriter::new(Box::new(Sink(data.clone())),
+ None,
+ cm.clone(),
+ FormatMode::NewErrorFormat);
+
+ let inp = "aaaaa\n\
+ aaaaa\n\
+ aaaaa\n\
+ bbbbb\n\
+ ccccc\n\
+ xxxxx\n\
+ yyyyy\n\
+ _____\n\
+ ddd__eee_\n\
+ elided\n\
+ __f_gg";
+ let file = cm.new_filemap_and_lines("dummy.txt", None, inp);
+
+ let span = |lo, hi, (off_lo, off_hi)| {
+ let lines = file.lines.borrow();
+ let (mut lo, mut hi): (BytePos, BytePos) = (lines[lo], lines[hi]);
+ lo.0 += off_lo;
+ hi.0 += off_hi;
+ mk_sp(lo, hi)
+ };
+ let sp0 = span(4, 6, (0, 5));
+ let sp1 = span(0, 6, (0, 5));
+ let sp2 = span(8, 8, (0, 3));
+ let sp3 = span(8, 8, (5, 8));
+ let sp4 = span(10, 10, (2, 3));
+ let sp5 = span(10, 10, (4, 6));
+
+ let expect0 = &r#"
+ --> dummy.txt:5:1
+ |>
+5 |> ccccc
+ |> ^
+...
+9 |> ddd__eee_
+ |> ^^^ ^^^
+10 |> elided
+11 |> __f_gg
+ |> ^ ^^
+"#[1..];
+
+ let expect = &r#"
+ --> dummy.txt:1:1
+ |>
+1 |> aaaaa
+ |> ^
+...
+9 |> ddd__eee_
+ |> ^^^ ^^^
+10 |> elided
+11 |> __f_gg
+ |> ^ ^^
+"#[1..];
+
+ macro_rules! test {
+ ($expected: expr, $highlight: expr) => ({
+ data.lock().unwrap().clear();
+ $highlight();
+ let vec = data.lock().unwrap().clone();
+ let actual = from_utf8(&vec[..]).unwrap();
+ println!("actual:");
+ println!("{}", actual);
+ println!("expected:");
+ println!("{}", $expected);
+ assert_eq!(&actual[..], &$expected[..]);
+ });
+ }
+
+ let msp0 = MultiSpan::from_spans(vec![sp0, sp2, sp3, sp4, sp5]);
+ let msp = MultiSpan::from_spans(vec![sp1, sp2, sp3, sp4, sp5]);
+
+ test!(expect0, || {
+ diag.highlight_lines(&msp0, Level::Error).unwrap();
+ });
+ test!(expect, || {
+ diag.highlight_lines(&msp, Level::Error).unwrap();
+ });
+ }
+
+ #[test]
+ fn tab() {
+ let file_text = "
+fn foo() {
+\tbar;
+}
+";
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+ let span_bar = cm.span_substr(&foo, file_text, "bar", 0);
+
+ let mut snippet = SnippetData::new(cm, Some(span_bar), FormatMode::NewErrorFormat);
+ snippet.push(span_bar, true, None);
+
+ let lines = snippet.render_lines();
+ let text = make_string(&lines);
+ assert_eq!(&text[..], &"
+ --> foo.rs:3:2
+ |>
+3 |> \tbar;
+ |> \t^^^
+"[1..]);
+ }
+
+ #[test]
+ fn one_line() {
+ let file_text = r#"
+fn foo() {
+ vec.push(vec.pop().unwrap());
+}
+"#;
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+ let span_vec0 = cm.span_substr(&foo, file_text, "vec", 0);
+ let span_vec1 = cm.span_substr(&foo, file_text, "vec", 1);
+ let span_semi = cm.span_substr(&foo, file_text, ";", 0);
+
+ let mut snippet = SnippetData::new(cm, None, FormatMode::NewErrorFormat);
+ snippet.push(span_vec0, false, Some(format!("previous borrow of `vec` occurs here")));
+ snippet.push(span_vec1, false, Some(format!("error occurs here")));
+ snippet.push(span_semi, false, Some(format!("previous borrow ends here")));
+
+ let lines = snippet.render_lines();
+ println!("{:#?}", lines);
+
+ let text: String = make_string(&lines);
+
+ println!("text=\n{}", text);
+ assert_eq!(&text[..], &r#"
+ ::: foo.rs
+ |>
+3 |> vec.push(vec.pop().unwrap());
+ |> --- --- - previous borrow ends here
+ |> | |
+ |> | error occurs here
+ |> previous borrow of `vec` occurs here
+"#[1..]);
+ }
+
+ #[test]
+ fn two_files() {
+ let file_text_foo = r#"
+fn foo() {
+ vec.push(vec.pop().unwrap());
+}
+"#;
+
+ let file_text_bar = r#"
+fn bar() {
+ // these blank links here
+ // serve to ensure that the line numbers
+ // from bar.rs
+ // require more digits
+
+
+
+
+
+
+
+
+
+
+ vec.push();
+
+ // this line will get elided
+
+ vec.pop().unwrap());
+}
+"#;
+
+ let cm = Rc::new(CodeMap::new());
+ let foo_map = cm.new_filemap_and_lines("foo.rs", None, file_text_foo);
+ let span_foo_vec0 = cm.span_substr(&foo_map, file_text_foo, "vec", 0);
+ let span_foo_vec1 = cm.span_substr(&foo_map, file_text_foo, "vec", 1);
+ let span_foo_semi = cm.span_substr(&foo_map, file_text_foo, ";", 0);
+
+ let bar_map = cm.new_filemap_and_lines("bar.rs", None, file_text_bar);
+ let span_bar_vec0 = cm.span_substr(&bar_map, file_text_bar, "vec", 0);
+ let span_bar_vec1 = cm.span_substr(&bar_map, file_text_bar, "vec", 1);
+ let span_bar_semi = cm.span_substr(&bar_map, file_text_bar, ";", 0);
+
+ let mut snippet = SnippetData::new(cm, Some(span_foo_vec1), FormatMode::NewErrorFormat);
+ snippet.push(span_foo_vec0, false, Some(format!("a")));
+ snippet.push(span_foo_vec1, true, Some(format!("b")));
+ snippet.push(span_foo_semi, false, Some(format!("c")));
+ snippet.push(span_bar_vec0, false, Some(format!("d")));
+ snippet.push(span_bar_vec1, false, Some(format!("e")));
+ snippet.push(span_bar_semi, false, Some(format!("f")));
+
+ let lines = snippet.render_lines();
+ println!("{:#?}", lines);
+
+ let text: String = make_string(&lines);
+
+ println!("text=\n{}", text);
+
+ // Note that the `|>` remain aligned across both files:
+ assert_eq!(&text[..], &r#"
+ --> foo.rs:3:14
+ |>
+3 |> vec.push(vec.pop().unwrap());
+ |> --- ^^^ - c
+ |> | |
+ |> | b
+ |> a
+ ::: bar.rs
+ |>
+17 |> vec.push();
+ |> --- - f
+ |> |
+ |> d
+...
+21 |> vec.pop().unwrap());
+ |> --- e
+"#[1..]);
+ }
+
+ #[test]
+ fn multi_line() {
+ let file_text = r#"
+fn foo() {
+ let name = find_id(&data, 22).unwrap();
+
+ // Add one more item we forgot to the vector. Silly us.
+ data.push(Data { name: format!("Hera"), id: 66 });
+
+ // Print everything out.
+ println!("Name: {:?}", name);
+ println!("Data: {:?}", data);
+}
+"#;
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+ let span_data0 = cm.span_substr(&foo, file_text, "data", 0);
+ let span_data1 = cm.span_substr(&foo, file_text, "data", 1);
+ let span_rbrace = cm.span_substr(&foo, file_text, "}", 3);
+
+ let mut snippet = SnippetData::new(cm, None, FormatMode::NewErrorFormat);
+ snippet.push(span_data0, false, Some(format!("immutable borrow begins here")));
+ snippet.push(span_data1, false, Some(format!("mutable borrow occurs here")));
+ snippet.push(span_rbrace, false, Some(format!("immutable borrow ends here")));
+
+ let lines = snippet.render_lines();
+ println!("{:#?}", lines);
+
+ let text: String = make_string(&lines);
+
+ println!("text=\n{}", text);
+ assert_eq!(&text[..], &r#"
+ ::: foo.rs
+ |>
+3 |> let name = find_id(&data, 22).unwrap();
+ |> ---- immutable borrow begins here
+...
+6 |> data.push(Data { name: format!("Hera"), id: 66 });
+ |> ---- mutable borrow occurs here
+...
+11 |> }
+ |> - immutable borrow ends here
+"#[1..]);
+ }
+
+ #[test]
+ fn overlapping() {
+ let file_text = r#"
+fn foo() {
+ vec.push(vec.pop().unwrap());
+}
+"#;
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+ let span0 = cm.span_substr(&foo, file_text, "vec.push", 0);
+ let span1 = cm.span_substr(&foo, file_text, "vec", 0);
+ let span2 = cm.span_substr(&foo, file_text, "ec.push", 0);
+ let span3 = cm.span_substr(&foo, file_text, "unwrap", 0);
+
+ let mut snippet = SnippetData::new(cm, None, FormatMode::NewErrorFormat);
+ snippet.push(span0, false, Some(format!("A")));
+ snippet.push(span1, false, Some(format!("B")));
+ snippet.push(span2, false, Some(format!("C")));
+ snippet.push(span3, false, Some(format!("D")));
+
+ let lines = snippet.render_lines();
+ println!("{:#?}", lines);
+ let text: String = make_string(&lines);
+
+ println!("text=r#\"\n{}\".trim_left()", text);
+ assert_eq!(&text[..], &r#"
+ ::: foo.rs
+ |>
+3 |> vec.push(vec.pop().unwrap());
+ |> -------- ------ D
+ |> ||
+ |> |C
+ |> A
+ |> B
+"#[1..]);
+ }
+
+ #[test]
+ fn one_line_out_of_order() {
+ let file_text = r#"
+fn foo() {
+ vec.push(vec.pop().unwrap());
+}
+"#;
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+ let span_vec0 = cm.span_substr(&foo, file_text, "vec", 0);
+ let span_vec1 = cm.span_substr(&foo, file_text, "vec", 1);
+ let span_semi = cm.span_substr(&foo, file_text, ";", 0);
+
+ // intentionally don't push the snippets left to right
+ let mut snippet = SnippetData::new(cm, None, FormatMode::NewErrorFormat);
+ snippet.push(span_vec1, false, Some(format!("error occurs here")));
+ snippet.push(span_vec0, false, Some(format!("previous borrow of `vec` occurs here")));
+ snippet.push(span_semi, false, Some(format!("previous borrow ends here")));
+
+ let lines = snippet.render_lines();
+ println!("{:#?}", lines);
+ let text: String = make_string(&lines);
+
+ println!("text=r#\"\n{}\".trim_left()", text);
+ assert_eq!(&text[..], &r#"
+ ::: foo.rs
+ |>
+3 |> vec.push(vec.pop().unwrap());
+ |> --- --- - previous borrow ends here
+ |> | |
+ |> | error occurs here
+ |> previous borrow of `vec` occurs here
+"#[1..]);
+ }
+
+ #[test]
+ fn elide_unnecessary_lines() {
+ let file_text = r#"
+fn foo() {
+ let mut vec = vec![0, 1, 2];
+ let mut vec2 = vec;
+ vec2.push(3);
+ vec2.push(4);
+ vec2.push(5);
+ vec2.push(6);
+ vec.push(7);
+}
+"#;
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+ let span_vec0 = cm.span_substr(&foo, file_text, "vec", 3);
+ let span_vec1 = cm.span_substr(&foo, file_text, "vec", 8);
+
+ let mut snippet = SnippetData::new(cm, None, FormatMode::NewErrorFormat);
+ snippet.push(span_vec0, false, Some(format!("`vec` moved here because it \
+ has type `collections::vec::Vec<i32>`")));
+ snippet.push(span_vec1, false, Some(format!("use of moved value: `vec`")));
+
+ let lines = snippet.render_lines();
+ println!("{:#?}", lines);
+ let text: String = make_string(&lines);
+ println!("text=r#\"\n{}\".trim_left()", text);
+ assert_eq!(&text[..], &r#"
+ ::: foo.rs
+ |>
+4 |> let mut vec2 = vec;
+ |> --- `vec` moved here because it has type `collections::vec::Vec<i32>`
+...
+9 |> vec.push(7);
+ |> --- use of moved value: `vec`
+"#[1..]);
+ }
+
+ #[test]
+ fn spans_without_labels() {
+ let file_text = r#"
+fn foo() {
+ let mut vec = vec![0, 1, 2];
+ let mut vec2 = vec;
+ vec2.push(3);
+ vec2.push(4);
+ vec2.push(5);
+ vec2.push(6);
+ vec.push(7);
+}
+"#;
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+
+ let mut snippet = SnippetData::new(cm.clone(), None, FormatMode::NewErrorFormat);
+ for i in 0..4 {
+ let span_veci = cm.span_substr(&foo, file_text, "vec", i);
+ snippet.push(span_veci, false, None);
+ }
+
+ let lines = snippet.render_lines();
+ let text: String = make_string(&lines);
+ println!("text=&r#\"\n{}\n\"#[1..]", text);
+ assert_eq!(text, &r#"
+ ::: foo.rs
+ |>
+3 |> let mut vec = vec![0, 1, 2];
+ |> --- ---
+4 |> let mut vec2 = vec;
+ |> --- ---
+"#[1..]);
+ }
+
+ #[test]
+ fn span_long_selection() {
+ let file_text = r#"
+impl SomeTrait for () {
+ fn foo(x: u32) {
+ // impl 1
+ // impl 2
+ // impl 3
+ }
+}
+"#;
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+
+ let mut snippet = SnippetData::new(cm.clone(), None, FormatMode::NewErrorFormat);
+ let fn_span = cm.span_substr(&foo, file_text, "fn", 0);
+ let rbrace_span = cm.span_substr(&foo, file_text, "}", 0);
+ snippet.push(splice(fn_span, rbrace_span), false, None);
+ let lines = snippet.render_lines();
+ let text: String = make_string(&lines);
+ println!("r#\"\n{}\"", text);
+ assert_eq!(text, &r#"
+ ::: foo.rs
+ |>
+3 |> fn foo(x: u32) {
+ |> -
+"#[1..]);
+ }
+
+ #[test]
+ fn span_overlap_label() {
+ // Test that we don't put `x_span` to the right of its highlight,
+ // since there is another highlight that overlaps it.
+
+ let file_text = r#"
+ fn foo(x: u32) {
+ }
+}
+"#;
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+
+ let mut snippet = SnippetData::new(cm.clone(), None, FormatMode::NewErrorFormat);
+ let fn_span = cm.span_substr(&foo, file_text, "fn foo(x: u32)", 0);
+ let x_span = cm.span_substr(&foo, file_text, "x", 0);
+ snippet.push(fn_span, false, Some(format!("fn_span")));
+ snippet.push(x_span, false, Some(format!("x_span")));
+ let lines = snippet.render_lines();
+ let text: String = make_string(&lines);
+ println!("r#\"\n{}\"", text);
+ assert_eq!(text, &r#"
+ ::: foo.rs
+ |>
+2 |> fn foo(x: u32) {
+ |> --------------
+ |> | |
+ |> | x_span
+ |> fn_span
+"#[1..]);
+ }
+
+ #[test]
+ fn span_overlap_label2() {
+ // Test that we don't put `x_span` to the right of its highlight,
+ // since there is another highlight that overlaps it. In this
+ // case, the overlap is only at the beginning, but it's still
+ // better to show the beginning more clearly.
+
+ let file_text = r#"
+ fn foo(x: u32) {
+ }
+}
+"#;
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+
+ let mut snippet = SnippetData::new(cm.clone(), None, FormatMode::NewErrorFormat);
+ let fn_span = cm.span_substr(&foo, file_text, "fn foo(x", 0);
+ let x_span = cm.span_substr(&foo, file_text, "x: u32)", 0);
+ snippet.push(fn_span, false, Some(format!("fn_span")));
+ snippet.push(x_span, false, Some(format!("x_span")));
+ let lines = snippet.render_lines();
+ let text: String = make_string(&lines);
+ println!("r#\"\n{}\"", text);
+ assert_eq!(text, &r#"
+ ::: foo.rs
+ |>
+2 |> fn foo(x: u32) {
+ |> --------------
+ |> | |
+ |> | x_span
+ |> fn_span
+"#[1..]);
+ }
+
+ #[test]
+ fn span_overlap_label3() {
+ // Test that we don't put `x_span` to the right of its highlight,
+ // since there is another highlight that overlaps it. In this
+ // case, the overlap is only at the beginning, but it's still
+ // better to show the beginning more clearly.
+
+ let file_text = r#"
+ fn foo() {
+ let closure = || {
+ inner
+ };
+ }
+}
+"#;
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+
+ let mut snippet = SnippetData::new(cm.clone(), None, FormatMode::NewErrorFormat);
+
+ let closure_span = {
+ let closure_start_span = cm.span_substr(&foo, file_text, "||", 0);
+ let closure_end_span = cm.span_substr(&foo, file_text, "}", 0);
+ splice(closure_start_span, closure_end_span)
+ };
+
+ let inner_span = cm.span_substr(&foo, file_text, "inner", 0);
+
+ snippet.push(closure_span, false, Some(format!("foo")));
+ snippet.push(inner_span, false, Some(format!("bar")));
+
+ let lines = snippet.render_lines();
+ let text: String = make_string(&lines);
+ println!("r#\"\n{}\"", text);
+ assert_eq!(text, &r#"
+ ::: foo.rs
+ |>
+3 |> let closure = || {
+ |> - foo
+4 |> inner
+ |> ----- bar
+"#[1..]);
+ }
+
+ #[test]
+ fn span_empty() {
+ // In one of the unit tests, we found that the parser sometimes
+ // gives empty spans, and in particular it supplied an EOF span
+ // like this one, which points at the very end. We want to
+ // fallback gracefully in this case.
+
+ let file_text = r#"
+fn main() {
+ struct Foo;
+
+ impl !Sync for Foo {}
+
+ unsafe impl Send for &'static Foo {
+ // error: cross-crate traits with a default impl, like `core::marker::Send`,
+ // can only be implemented for a struct/enum type, not
+ // `&'static Foo`
+}"#;
+
+
+ let cm = Rc::new(CodeMap::new());
+ let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
+
+ let mut rbrace_span = cm.span_substr(&foo, file_text, "}", 1);
+ rbrace_span.lo = rbrace_span.hi;
+
+ let mut snippet = SnippetData::new(cm.clone(),
+ Some(rbrace_span),
+ FormatMode::NewErrorFormat);
+ snippet.push(rbrace_span, false, None);
+ let lines = snippet.render_lines();
+ let text: String = make_string(&lines);
+ println!("r#\"\n{}\"", text);
+ assert_eq!(text, &r#"
+ --> foo.rs:11:2
+ |>
+11 |> }
+ |> -
+"#[1..]);
+ }
}
use attr::{AttrMetaMethods, HasAttrs};
use feature_gate::{emit_feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features, GateIssue};
use fold::Folder;
-use {ast, fold, attr};
+use {fold, attr};
+use ast;
use codemap::{Spanned, respan};
use parse::{ParseSess, token};
use ptr::P;
}
fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVector<ast::Stmt> {
- let is_item = match stmt.node {
- ast::StmtKind::Decl(ref decl, _) => match decl.node {
- ast::DeclKind::Item(_) => true,
- _ => false,
- },
- _ => false,
- };
-
// avoid calling `visit_stmt_or_expr_attrs` on items
- if !is_item {
- self.visit_stmt_or_expr_attrs(stmt.attrs());
+ match stmt.node {
+ ast::StmtKind::Item(_) => {}
+ _ => self.visit_stmt_or_expr_attrs(stmt.attrs()),
}
self.configure(stmt).map(|stmt| fold::noop_fold_stmt(stmt, self))
use std::error::Error;
use rustc_serialize::json::as_json;
-use codemap::Span;
+use syntax_pos::Span;
use ext::base::ExtCtxt;
use diagnostics::plugin::{ErrorMap, ErrorInfo};
use std::env;
use ast;
-use ast::{Ident, Name, TokenTree};
-use codemap::Span;
+use ast::{Ident, Name};
+use syntax_pos::Span;
use ext::base::{ExtCtxt, MacEager, MacResult};
use ext::build::AstBuilder;
use parse::token;
use ptr::P;
+use tokenstream::{TokenTree};
use util::small_vector::SmallVector;
use diagnostics::metadata::output_metadata;
+pub use errors::*;
+
// Maximum width of any line in an extended error description (inclusive).
const MAX_DESCRIPTION_WIDTH: usize = 80;
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use std::collections::HashMap;
-
-#[derive(Clone)]
-pub struct Registry {
- descriptions: HashMap<&'static str, &'static str>
-}
-
-impl Registry {
- pub fn new(descriptions: &[(&'static str, &'static str)]) -> Registry {
- Registry { descriptions: descriptions.iter().cloned().collect() }
- }
-
- pub fn find_description(&self, code: &str) -> Option<&'static str> {
- self.descriptions.get(code).cloned()
- }
-}
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use self::Destination::*;
-
-use codemap::{self, COMMAND_LINE_SP, DUMMY_SP, Span, MultiSpan};
-use diagnostics;
-
-use errors::check_old_skool;
-use errors::{Level, RenderSpan, CodeSuggestion, DiagnosticBuilder};
-use errors::RenderSpan::*;
-use errors::Level::*;
-use errors::snippet::{RenderedLineKind, SnippetData, Style};
-
-use std::{cmp, fmt};
-use std::io::prelude::*;
-use std::io;
-use std::rc::Rc;
-use term;
-
-/// Emitter trait for emitting errors. Do not implement this directly:
-/// implement `CoreEmitter` instead.
-pub trait Emitter {
- /// Emit a standalone diagnostic message.
- fn emit(&mut self, span: &MultiSpan, msg: &str, code: Option<&str>, lvl: Level);
-
- /// Emit a structured diagnostic.
- fn emit_struct(&mut self, db: &DiagnosticBuilder);
-}
-
-pub trait CoreEmitter {
- fn emit_message(&mut self,
- rsp: &RenderSpan,
- msg: &str,
- code: Option<&str>,
- lvl: Level,
- is_header: bool,
- show_snippet: bool);
-}
-
-impl<T: CoreEmitter> Emitter for T {
- fn emit(&mut self,
- msp: &MultiSpan,
- msg: &str,
- code: Option<&str>,
- lvl: Level) {
- self.emit_message(&FullSpan(msp.clone()),
- msg,
- code,
- lvl,
- true,
- true);
- }
-
- fn emit_struct(&mut self, db: &DiagnosticBuilder) {
- let old_school = check_old_skool();
- let db_span = FullSpan(db.span.clone());
- self.emit_message(&FullSpan(db.span.clone()),
- &db.message,
- db.code.as_ref().map(|s| &**s),
- db.level,
- true,
- true);
- for child in &db.children {
- let render_span = child.render_span
- .clone()
- .unwrap_or_else(
- || FullSpan(child.span.clone()));
-
- if !old_school {
- self.emit_message(&render_span,
- &child.message,
- None,
- child.level,
- false,
- true);
- } else {
- let (render_span, show_snippet) = match render_span.span().primary_span() {
- None => (db_span.clone(), false),
- _ => (render_span, true)
- };
- self.emit_message(&render_span,
- &child.message,
- None,
- child.level,
- false,
- show_snippet);
- }
- }
- }
-}
-
-/// maximum number of lines we will print for each error; arbitrary.
-pub const MAX_HIGHLIGHT_LINES: usize = 6;
-
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub enum ColorConfig {
- Auto,
- Always,
- Never,
-}
-
-impl ColorConfig {
- fn use_color(&self) -> bool {
- match *self {
- ColorConfig::Always => true,
- ColorConfig::Never => false,
- ColorConfig::Auto => stderr_isatty(),
- }
- }
-}
-
-/// A basic emitter for when we don't have access to a codemap or registry. Used
-/// for reporting very early errors, etc.
-pub struct BasicEmitter {
- dst: Destination,
-}
-
-impl CoreEmitter for BasicEmitter {
- fn emit_message(&mut self,
- _rsp: &RenderSpan,
- msg: &str,
- code: Option<&str>,
- lvl: Level,
- _is_header: bool,
- _show_snippet: bool) {
- // we ignore the span as we have no access to a codemap at this point
- if let Err(e) = print_diagnostic(&mut self.dst, "", lvl, msg, code) {
- panic!("failed to print diagnostics: {:?}", e);
- }
- }
-}
-
-impl BasicEmitter {
- pub fn stderr(color_config: ColorConfig) -> BasicEmitter {
- if color_config.use_color() {
- let dst = Destination::from_stderr();
- BasicEmitter { dst: dst }
- } else {
- BasicEmitter { dst: Raw(Box::new(io::stderr())) }
- }
- }
-}
-
-pub struct EmitterWriter {
- dst: Destination,
- registry: Option<diagnostics::registry::Registry>,
- cm: Rc<codemap::CodeMap>,
-
- /// Is this the first error emitted thus far? If not, we emit a
- /// `\n` before the top-level errors.
- first: bool,
-
- // For now, allow an old-school mode while we transition
- old_school: bool,
-}
-
-impl CoreEmitter for EmitterWriter {
- fn emit_message(&mut self,
- rsp: &RenderSpan,
- msg: &str,
- code: Option<&str>,
- lvl: Level,
- is_header: bool,
- show_snippet: bool) {
- match self.emit_message_(rsp, msg, code, lvl, is_header, show_snippet) {
- Ok(()) => { }
- Err(e) => panic!("failed to emit error: {}", e)
- }
- }
-}
-
-/// Do not use this for messages that end in `\n` – use `println_maybe_styled` instead. See
-/// `EmitterWriter::print_maybe_styled` for details.
-macro_rules! print_maybe_styled {
- ($dst: expr, $style: expr, $($arg: tt)*) => {
- $dst.print_maybe_styled(format_args!($($arg)*), $style, false)
- }
-}
-
-macro_rules! println_maybe_styled {
- ($dst: expr, $style: expr, $($arg: tt)*) => {
- $dst.print_maybe_styled(format_args!($($arg)*), $style, true)
- }
-}
-
-impl EmitterWriter {
- pub fn stderr(color_config: ColorConfig,
- registry: Option<diagnostics::registry::Registry>,
- code_map: Rc<codemap::CodeMap>)
- -> EmitterWriter {
- let old_school = check_old_skool();
- if color_config.use_color() {
- let dst = Destination::from_stderr();
- EmitterWriter { dst: dst,
- registry: registry,
- cm: code_map,
- first: true,
- old_school: old_school }
- } else {
- EmitterWriter { dst: Raw(Box::new(io::stderr())),
- registry: registry,
- cm: code_map,
- first: true,
- old_school: old_school }
- }
- }
-
- pub fn new(dst: Box<Write + Send>,
- registry: Option<diagnostics::registry::Registry>,
- code_map: Rc<codemap::CodeMap>)
- -> EmitterWriter {
- let old_school = check_old_skool();
- EmitterWriter { dst: Raw(dst),
- registry: registry,
- cm: code_map,
- first: true,
- old_school: old_school }
- }
-
- fn emit_message_(&mut self,
- rsp: &RenderSpan,
- msg: &str,
- code: Option<&str>,
- lvl: Level,
- is_header: bool,
- show_snippet: bool)
- -> io::Result<()> {
- if is_header {
- if self.first {
- self.first = false;
- } else {
- if !self.old_school {
- write!(self.dst, "\n")?;
- }
- }
- }
-
- match code {
- Some(code) if self.registry.as_ref()
- .and_then(|registry| registry.find_description(code))
- .is_some() => {
- let code_with_explain = String::from("--explain ") + code;
- if self.old_school {
- let loc = match rsp.span().primary_span() {
- Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(),
- Some(ps) => self.cm.span_to_string(ps),
- None => "".to_string()
- };
- print_diagnostic(&mut self.dst, &loc, lvl, msg, Some(code))?
- }
- else {
- print_diagnostic(&mut self.dst, "", lvl, msg, Some(&code_with_explain))?
- }
- }
- _ => {
- if self.old_school {
- let loc = match rsp.span().primary_span() {
- Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(),
- Some(ps) => self.cm.span_to_string(ps),
- None => "".to_string()
- };
- print_diagnostic(&mut self.dst, &loc, lvl, msg, code)?
- }
- else {
- print_diagnostic(&mut self.dst, "", lvl, msg, code)?
- }
- }
- }
-
- if !show_snippet {
- return Ok(());
- }
-
- // Watch out for various nasty special spans; don't try to
- // print any filename or anything for those.
- match rsp.span().primary_span() {
- Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => {
- return Ok(());
- }
- _ => { }
- }
-
- // Otherwise, print out the snippet etc as needed.
- match *rsp {
- FullSpan(ref msp) => {
- self.highlight_lines(msp, lvl)?;
- if let Some(primary_span) = msp.primary_span() {
- self.print_macro_backtrace(primary_span)?;
- }
- }
- Suggestion(ref suggestion) => {
- self.highlight_suggestion(suggestion)?;
- if let Some(primary_span) = rsp.span().primary_span() {
- self.print_macro_backtrace(primary_span)?;
- }
- }
- }
- if self.old_school {
- match code {
- Some(code) if self.registry.as_ref()
- .and_then(|registry| registry.find_description(code))
- .is_some() => {
- let loc = match rsp.span().primary_span() {
- Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(),
- Some(ps) => self.cm.span_to_string(ps),
- None => "".to_string()
- };
- let msg = "run `rustc --explain ".to_string() + &code.to_string() +
- "` to see a detailed explanation";
- print_diagnostic(&mut self.dst, &loc, Level::Help, &msg,
- None)?
- }
- _ => ()
- }
- }
- Ok(())
- }
-
- fn highlight_suggestion(&mut self, suggestion: &CodeSuggestion) -> io::Result<()>
- {
- let primary_span = suggestion.msp.primary_span().unwrap();
- let lines = self.cm.span_to_lines(primary_span).unwrap();
- assert!(!lines.lines.is_empty());
-
- let complete = suggestion.splice_lines(&self.cm);
- let line_count = cmp::min(lines.lines.len(), MAX_HIGHLIGHT_LINES);
- let display_lines = &lines.lines[..line_count];
-
- let fm = &*lines.file;
- // Calculate the widest number to format evenly
- let max_digits = line_num_max_digits(display_lines.last().unwrap());
-
- // print the suggestion without any line numbers, but leave
- // space for them. This helps with lining up with previous
- // snippets from the actual error being reported.
- let mut lines = complete.lines();
- for line in lines.by_ref().take(MAX_HIGHLIGHT_LINES) {
- write!(&mut self.dst, "{0}:{1:2$} {3}\n",
- fm.name, "", max_digits, line)?;
- }
-
- // if we elided some lines, add an ellipsis
- if let Some(_) = lines.next() {
- write!(&mut self.dst, "{0:1$} {0:2$} ...\n",
- "", fm.name.len(), max_digits)?;
- }
-
- Ok(())
- }
-
- fn highlight_lines(&mut self,
- msp: &MultiSpan,
- lvl: Level)
- -> io::Result<()>
- {
- let mut snippet_data = SnippetData::new(self.cm.clone(),
- msp.primary_span());
- if self.old_school {
- let mut output_vec = vec![];
-
- for span_label in msp.span_labels() {
- let mut snippet_data = SnippetData::new(self.cm.clone(),
- Some(span_label.span));
-
- snippet_data.push(span_label.span,
- span_label.is_primary,
- span_label.label);
- if span_label.is_primary {
- output_vec.insert(0, snippet_data);
- }
- else {
- output_vec.push(snippet_data);
- }
- }
-
- for snippet_data in output_vec.iter() {
- let rendered_lines = snippet_data.render_lines();
- for rendered_line in &rendered_lines {
- for styled_string in &rendered_line.text {
- self.dst.apply_style(lvl, &rendered_line.kind, styled_string.style)?;
- write!(&mut self.dst, "{}", styled_string.text)?;
- self.dst.reset_attrs()?;
- }
- write!(&mut self.dst, "\n")?;
- }
- }
- }
- else {
- for span_label in msp.span_labels() {
- snippet_data.push(span_label.span,
- span_label.is_primary,
- span_label.label);
- }
- let rendered_lines = snippet_data.render_lines();
- for rendered_line in &rendered_lines {
- for styled_string in &rendered_line.text {
- self.dst.apply_style(lvl, &rendered_line.kind, styled_string.style)?;
- write!(&mut self.dst, "{}", styled_string.text)?;
- self.dst.reset_attrs()?;
- }
- write!(&mut self.dst, "\n")?;
- }
- }
- Ok(())
- }
-
- fn print_macro_backtrace(&mut self,
- sp: Span)
- -> io::Result<()> {
- for trace in self.cm.macro_backtrace(sp) {
- let mut diag_string =
- format!("in this expansion of {}", trace.macro_decl_name);
- if let Some(def_site_span) = trace.def_site_span {
- diag_string.push_str(
- &format!(" (defined in {})",
- self.cm.span_to_filename(def_site_span)));
- }
- let snippet = self.cm.span_to_string(trace.call_site);
- print_diagnostic(&mut self.dst, &snippet, Note, &diag_string, None)?;
- }
- Ok(())
- }
-}
-
-fn line_num_max_digits(line: &codemap::LineInfo) -> usize {
- let mut max_line_num = line.line_index + 1;
- let mut digits = 0;
- while max_line_num > 0 {
- max_line_num /= 10;
- digits += 1;
- }
- digits
-}
-
-fn print_diagnostic(dst: &mut Destination,
- topic: &str,
- lvl: Level,
- msg: &str,
- code: Option<&str>)
- -> io::Result<()> {
- if !topic.is_empty() {
- let old_school = check_old_skool();
- if !old_school {
- write!(dst, "{}: ", topic)?;
- }
- else {
- write!(dst, "{} ", topic)?;
- }
- dst.reset_attrs()?;
- }
- dst.start_attr(term::Attr::Bold)?;
- dst.start_attr(term::Attr::ForegroundColor(lvl.color()))?;
- write!(dst, "{}", lvl.to_string())?;
- dst.reset_attrs()?;
- write!(dst, ": ")?;
- dst.start_attr(term::Attr::Bold)?;
- write!(dst, "{}", msg)?;
-
- if let Some(code) = code {
- let style = term::Attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
- print_maybe_styled!(dst, style, " [{}]", code.clone())?;
- }
-
- dst.reset_attrs()?;
- write!(dst, "\n")?;
- Ok(())
-}
-
-#[cfg(unix)]
-fn stderr_isatty() -> bool {
- use libc;
- unsafe { libc::isatty(libc::STDERR_FILENO) != 0 }
-}
-#[cfg(windows)]
-fn stderr_isatty() -> bool {
- type DWORD = u32;
- type BOOL = i32;
- type HANDLE = *mut u8;
- const STD_ERROR_HANDLE: DWORD = -12i32 as DWORD;
- extern "system" {
- fn GetStdHandle(which: DWORD) -> HANDLE;
- fn GetConsoleMode(hConsoleHandle: HANDLE,
- lpMode: *mut DWORD) -> BOOL;
- }
- unsafe {
- let handle = GetStdHandle(STD_ERROR_HANDLE);
- let mut out = 0;
- GetConsoleMode(handle, &mut out) != 0
- }
-}
-
-enum Destination {
- Terminal(Box<term::StderrTerminal>),
- Raw(Box<Write + Send>),
-}
-
-impl Destination {
- fn from_stderr() -> Destination {
- match term::stderr() {
- Some(t) => Terminal(t),
- None => Raw(Box::new(io::stderr())),
- }
- }
-
- fn apply_style(&mut self,
- lvl: Level,
- _kind: &RenderedLineKind,
- style: Style)
- -> io::Result<()> {
- match style {
- Style::FileNameStyle |
- Style::LineAndColumn => {
- }
- Style::LineNumber => {
- self.start_attr(term::Attr::Bold)?;
- self.start_attr(term::Attr::ForegroundColor(term::color::BRIGHT_BLUE))?;
- }
- Style::Quotation => {
- }
- Style::OldSkoolNote => {
- self.start_attr(term::Attr::Bold)?;
- self.start_attr(term::Attr::ForegroundColor(term::color::BRIGHT_GREEN))?;
- }
- Style::OldSkoolNoteText => {
- self.start_attr(term::Attr::Bold)?;
- }
- Style::UnderlinePrimary | Style::LabelPrimary => {
- self.start_attr(term::Attr::Bold)?;
- self.start_attr(term::Attr::ForegroundColor(lvl.color()))?;
- }
- Style::UnderlineSecondary | Style::LabelSecondary => {
- self.start_attr(term::Attr::Bold)?;
- self.start_attr(term::Attr::ForegroundColor(term::color::BRIGHT_BLUE))?;
- }
- Style::NoStyle => {
- }
- }
- Ok(())
- }
-
- fn start_attr(&mut self, attr: term::Attr) -> io::Result<()> {
- match *self {
- Terminal(ref mut t) => { t.attr(attr)?; }
- Raw(_) => { }
- }
- Ok(())
- }
-
- fn reset_attrs(&mut self) -> io::Result<()> {
- match *self {
- Terminal(ref mut t) => { t.reset()?; }
- Raw(_) => { }
- }
- Ok(())
- }
-
- fn print_maybe_styled(&mut self,
- args: fmt::Arguments,
- color: term::Attr,
- print_newline_at_end: bool)
- -> io::Result<()> {
- match *self {
- Terminal(ref mut t) => {
- t.attr(color)?;
- // If `msg` ends in a newline, we need to reset the color before
- // the newline. We're making the assumption that we end up writing
- // to a `LineBufferedWriter`, which means that emitting the reset
- // after the newline ends up buffering the reset until we print
- // another line or exit. Buffering the reset is a problem if we're
- // sharing the terminal with any other programs (e.g. other rustc
- // instances via `make -jN`).
- //
- // Note that if `msg` contains any internal newlines, this will
- // result in the `LineBufferedWriter` flushing twice instead of
- // once, which still leaves the opportunity for interleaved output
- // to be miscolored. We assume this is rare enough that we don't
- // have to worry about it.
- t.write_fmt(args)?;
- t.reset()?;
- if print_newline_at_end {
- t.write_all(b"\n")
- } else {
- Ok(())
- }
- }
- Raw(ref mut w) => {
- w.write_fmt(args)?;
- if print_newline_at_end {
- w.write_all(b"\n")
- } else {
- Ok(())
- }
- }
- }
- }
-}
-
-impl Write for Destination {
- fn write(&mut self, bytes: &[u8]) -> io::Result<usize> {
- match *self {
- Terminal(ref mut t) => t.write(bytes),
- Raw(ref mut w) => w.write(bytes),
- }
- }
- fn flush(&mut self) -> io::Result<()> {
- match *self {
- Terminal(ref mut t) => t.flush(),
- Raw(ref mut w) => w.flush(),
- }
- }
-}
-
-
-#[cfg(test)]
-mod test {
- use errors::{Level, CodeSuggestion};
- use super::EmitterWriter;
- use codemap::{mk_sp, CodeMap, Span, MultiSpan, BytePos, NO_EXPANSION};
- use std::sync::{Arc, Mutex};
- use std::io::{self, Write};
- use std::str::from_utf8;
- use std::rc::Rc;
-
- struct Sink(Arc<Mutex<Vec<u8>>>);
- impl Write for Sink {
- fn write(&mut self, data: &[u8]) -> io::Result<usize> {
- Write::write(&mut *self.0.lock().unwrap(), data)
- }
- fn flush(&mut self) -> io::Result<()> { Ok(()) }
- }
-
- /// Given a string like " ^~~~~~~~~~~~ ", produces a span
- /// coverting that range. The idea is that the string has the same
- /// length as the input, and we uncover the byte positions. Note
- /// that this can span lines and so on.
- fn span_from_selection(input: &str, selection: &str) -> Span {
- assert_eq!(input.len(), selection.len());
- let left_index = selection.find('~').unwrap() as u32;
- let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index);
- Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), expn_id: NO_EXPANSION }
- }
-
- // Diagnostic doesn't align properly in span where line number increases by one digit
- #[test]
- fn test_hilight_suggestion_issue_11715() {
- let data = Arc::new(Mutex::new(Vec::new()));
- let cm = Rc::new(CodeMap::new());
- let mut ew = EmitterWriter::new(Box::new(Sink(data.clone())), None, cm.clone());
- let content = "abcdefg
- koksi
- line3
- line4
- cinq
- line6
- line7
- line8
- line9
- line10
- e-lä-vän
- tolv
- dreizehn
- ";
- let file = cm.new_filemap_and_lines("dummy.txt", None, content);
- let start = file.lines.borrow()[10];
- let end = file.lines.borrow()[11];
- let sp = mk_sp(start, end);
- let lvl = Level::Error;
- println!("highlight_lines");
- ew.highlight_lines(&sp.into(), lvl).unwrap();
- println!("done");
- let vec = data.lock().unwrap().clone();
- let vec: &[u8] = &vec;
- let str = from_utf8(vec).unwrap();
- println!("r#\"\n{}\"#", str);
- assert_eq!(str, &r#"
- --> dummy.txt:11:1
- |>
-11 |> e-lä-vän
- |> ^
-"#[1..]);
- }
-
- #[test]
- fn test_single_span_splice() {
- // Test that a `MultiSpan` containing a single span splices a substition correctly
- let cm = CodeMap::new();
- let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
- let selection = " \n ~~\n~~~\n~~~~~ \n \n";
- cm.new_filemap_and_lines("blork.rs", None, inputtext);
- let sp = span_from_selection(inputtext, selection);
- let msp: MultiSpan = sp.into();
-
- // check that we are extracting the text we thought we were extracting
- assert_eq!(&cm.span_to_snippet(sp).unwrap(), "BB\nCCC\nDDDDD");
-
- let substitute = "ZZZZZZ".to_owned();
- let expected = "bbbbZZZZZZddddd";
- let suggest = CodeSuggestion {
- msp: msp,
- substitutes: vec![substitute],
- };
- assert_eq!(suggest.splice_lines(&cm), expected);
- }
-
- #[test]
- fn test_multi_span_splice() {
- // Test that a `MultiSpan` containing multiple spans splices a substition correctly
- let cm = CodeMap::new();
- let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
- let selection1 = " \n \n \n \n ~ \n"; // intentionally out of order
- let selection2 = " \n ~~\n~~~\n~~~~~ \n \n";
- cm.new_filemap_and_lines("blork.rs", None, inputtext);
- let sp1 = span_from_selection(inputtext, selection1);
- let sp2 = span_from_selection(inputtext, selection2);
- let msp: MultiSpan = MultiSpan::from_spans(vec![sp1, sp2]);
-
- let expected = "bbbbZZZZZZddddd\neXYZe";
- let suggest = CodeSuggestion {
- msp: msp,
- substitutes: vec!["ZZZZZZ".to_owned(),
- "XYZ".to_owned()]
- };
-
- assert_eq!(suggest.splice_lines(&cm), expected);
- }
-
- #[test]
- fn test_multispan_highlight() {
- let data = Arc::new(Mutex::new(Vec::new()));
- let cm = Rc::new(CodeMap::new());
- let mut diag = EmitterWriter::new(Box::new(Sink(data.clone())), None, cm.clone());
-
- let inp = "_____aaaaaa____bbbbbb__cccccdd_";
- let sp1 = " ~~~~~~ ";
- let sp2 = " ~~~~~~ ";
- let sp3 = " ~~~~~ ";
- let sp4 = " ~~~~ ";
- let sp34 = " ~~~~~~~ ";
-
- let expect_start = &r#"
- --> dummy.txt:1:6
- |>
-1 |> _____aaaaaa____bbbbbb__cccccdd_
- |> ^^^^^^ ^^^^^^ ^^^^^^^
-"#[1..];
-
- let span = |sp, expected| {
- let sp = span_from_selection(inp, sp);
- assert_eq!(&cm.span_to_snippet(sp).unwrap(), expected);
- sp
- };
- cm.new_filemap_and_lines("dummy.txt", None, inp);
- let sp1 = span(sp1, "aaaaaa");
- let sp2 = span(sp2, "bbbbbb");
- let sp3 = span(sp3, "ccccc");
- let sp4 = span(sp4, "ccdd");
- let sp34 = span(sp34, "cccccdd");
-
- let spans = vec![sp1, sp2, sp3, sp4];
-
- let test = |expected, highlight: &mut FnMut()| {
- data.lock().unwrap().clear();
- highlight();
- let vec = data.lock().unwrap().clone();
- let actual = from_utf8(&vec[..]).unwrap();
- println!("actual=\n{}", actual);
- assert_eq!(actual, expected);
- };
-
- let msp = MultiSpan::from_spans(vec![sp1, sp2, sp34]);
- test(expect_start, &mut || {
- diag.highlight_lines(&msp, Level::Error).unwrap();
- });
- test(expect_start, &mut || {
- let msp = MultiSpan::from_spans(spans.clone());
- diag.highlight_lines(&msp, Level::Error).unwrap();
- });
- }
-
- #[test]
- fn test_huge_multispan_highlight() {
- let data = Arc::new(Mutex::new(Vec::new()));
- let cm = Rc::new(CodeMap::new());
- let mut diag = EmitterWriter::new(Box::new(Sink(data.clone())), None, cm.clone());
-
- let inp = "aaaaa\n\
- aaaaa\n\
- aaaaa\n\
- bbbbb\n\
- ccccc\n\
- xxxxx\n\
- yyyyy\n\
- _____\n\
- ddd__eee_\n\
- elided\n\
- __f_gg";
- let file = cm.new_filemap_and_lines("dummy.txt", None, inp);
-
- let span = |lo, hi, (off_lo, off_hi)| {
- let lines = file.lines.borrow();
- let (mut lo, mut hi): (BytePos, BytePos) = (lines[lo], lines[hi]);
- lo.0 += off_lo;
- hi.0 += off_hi;
- mk_sp(lo, hi)
- };
- let sp0 = span(4, 6, (0, 5));
- let sp1 = span(0, 6, (0, 5));
- let sp2 = span(8, 8, (0, 3));
- let sp3 = span(8, 8, (5, 8));
- let sp4 = span(10, 10, (2, 3));
- let sp5 = span(10, 10, (4, 6));
-
- let expect0 = &r#"
- --> dummy.txt:5:1
- |>
-5 |> ccccc
- |> ^
-...
-9 |> ddd__eee_
- |> ^^^ ^^^
-10 |> elided
-11 |> __f_gg
- |> ^ ^^
-"#[1..];
-
- let expect = &r#"
- --> dummy.txt:1:1
- |>
-1 |> aaaaa
- |> ^
-...
-9 |> ddd__eee_
- |> ^^^ ^^^
-10 |> elided
-11 |> __f_gg
- |> ^ ^^
-"#[1..];
-
- macro_rules! test {
- ($expected: expr, $highlight: expr) => ({
- data.lock().unwrap().clear();
- $highlight();
- let vec = data.lock().unwrap().clone();
- let actual = from_utf8(&vec[..]).unwrap();
- println!("actual:");
- println!("{}", actual);
- println!("expected:");
- println!("{}", $expected);
- assert_eq!(&actual[..], &$expected[..]);
- });
- }
-
- let msp0 = MultiSpan::from_spans(vec![sp0, sp2, sp3, sp4, sp5]);
- let msp = MultiSpan::from_spans(vec![sp1, sp2, sp3, sp4, sp5]);
-
- test!(expect0, || {
- diag.highlight_lines(&msp0, Level::Error).unwrap();
- });
- test!(expect, || {
- diag.highlight_lines(&msp, Level::Error).unwrap();
- });
- }
-}
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! A JSON emitter for errors.
-//!
-//! This works by converting errors to a simplified structural format (see the
-//! structs at the start of the file) and then serialising them. These should
-//! contain as much information about the error as possible.
-//!
-//! The format of the JSON output should be considered *unstable*. For now the
-//! structs at the end of this file (Diagnostic*) specify the error format.
-
-// FIXME spec the JSON output properly.
-
-
-use codemap::{self, MacroBacktrace, Span, SpanLabel, MultiSpan, CodeMap};
-use diagnostics::registry::Registry;
-use errors::{Level, DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion};
-use errors::emitter::Emitter;
-
-use std::rc::Rc;
-use std::io::{self, Write};
-use std::vec;
-
-use rustc_serialize::json::as_json;
-
-pub struct JsonEmitter {
- dst: Box<Write + Send>,
- registry: Option<Registry>,
- cm: Rc<CodeMap>,
-}
-
-impl JsonEmitter {
- pub fn basic() -> JsonEmitter {
- JsonEmitter::stderr(None, Rc::new(CodeMap::new()))
- }
-
- pub fn stderr(registry: Option<Registry>,
- code_map: Rc<CodeMap>) -> JsonEmitter {
- JsonEmitter {
- dst: Box::new(io::stderr()),
- registry: registry,
- cm: code_map,
- }
- }
-}
-
-impl Emitter for JsonEmitter {
- fn emit(&mut self, span: &MultiSpan, msg: &str, code: Option<&str>, level: Level) {
- let data = Diagnostic::new(span, msg, code, level, self);
- if let Err(e) = writeln!(&mut self.dst, "{}", as_json(&data)) {
- panic!("failed to print diagnostics: {:?}", e);
- }
- }
-
- fn emit_struct(&mut self, db: &DiagnosticBuilder) {
- let data = Diagnostic::from_diagnostic_builder(db, self);
- if let Err(e) = writeln!(&mut self.dst, "{}", as_json(&data)) {
- panic!("failed to print diagnostics: {:?}", e);
- }
- }
-}
-
-// The following data types are provided just for serialisation.
-
-#[derive(RustcEncodable)]
-struct Diagnostic<'a> {
- /// The primary error message.
- message: &'a str,
- code: Option<DiagnosticCode>,
- /// "error: internal compiler error", "error", "warning", "note", "help".
- level: &'static str,
- spans: Vec<DiagnosticSpan>,
- /// Associated diagnostic messages.
- children: Vec<Diagnostic<'a>>,
- /// The message as rustc would render it. Currently this is only
- /// `Some` for "suggestions", but eventually it will include all
- /// snippets.
- rendered: Option<String>,
-}
-
-#[derive(RustcEncodable)]
-struct DiagnosticSpan {
- file_name: String,
- byte_start: u32,
- byte_end: u32,
- /// 1-based.
- line_start: usize,
- line_end: usize,
- /// 1-based, character offset.
- column_start: usize,
- column_end: usize,
- /// Is this a "primary" span -- meaning the point, or one of the points,
- /// where the error occurred?
- is_primary: bool,
- /// Source text from the start of line_start to the end of line_end.
- text: Vec<DiagnosticSpanLine>,
- /// Label that should be placed at this location (if any)
- label: Option<String>,
- /// If we are suggesting a replacement, this will contain text
- /// that should be sliced in atop this span. You may prefer to
- /// load the fully rendered version from the parent `Diagnostic`,
- /// however.
- suggested_replacement: Option<String>,
- /// Macro invocations that created the code at this span, if any.
- expansion: Option<Box<DiagnosticSpanMacroExpansion>>,
-}
-
-#[derive(RustcEncodable)]
-struct DiagnosticSpanLine {
- text: String,
-
- /// 1-based, character offset in self.text.
- highlight_start: usize,
-
- highlight_end: usize,
-}
-
-#[derive(RustcEncodable)]
-struct DiagnosticSpanMacroExpansion {
- /// span where macro was applied to generate this code; note that
- /// this may itself derive from a macro (if
- /// `span.expansion.is_some()`)
- span: DiagnosticSpan,
-
- /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]")
- macro_decl_name: String,
-
- /// span where macro was defined (if known)
- def_site_span: Option<DiagnosticSpan>,
-}
-
-#[derive(RustcEncodable)]
-struct DiagnosticCode {
- /// The code itself.
- code: String,
- /// An explanation for the code.
- explanation: Option<&'static str>,
-}
-
-impl<'a> Diagnostic<'a> {
- fn new(msp: &MultiSpan,
- msg: &'a str,
- code: Option<&str>,
- level: Level,
- je: &JsonEmitter)
- -> Diagnostic<'a> {
- Diagnostic {
- message: msg,
- code: DiagnosticCode::map_opt_string(code.map(|c| c.to_owned()), je),
- level: level.to_str(),
- spans: DiagnosticSpan::from_multispan(msp, je),
- children: vec![],
- rendered: None,
- }
- }
-
- fn from_diagnostic_builder<'c>(db: &'c DiagnosticBuilder,
- je: &JsonEmitter)
- -> Diagnostic<'c> {
- Diagnostic {
- message: &db.message,
- code: DiagnosticCode::map_opt_string(db.code.clone(), je),
- level: db.level.to_str(),
- spans: DiagnosticSpan::from_multispan(&db.span, je),
- children: db.children.iter().map(|c| {
- Diagnostic::from_sub_diagnostic(c, je)
- }).collect(),
- rendered: None,
- }
- }
-
- fn from_sub_diagnostic<'c>(db: &'c SubDiagnostic, je: &JsonEmitter) -> Diagnostic<'c> {
- Diagnostic {
- message: &db.message,
- code: None,
- level: db.level.to_str(),
- spans: db.render_span.as_ref()
- .map(|sp| DiagnosticSpan::from_render_span(sp, je))
- .unwrap_or_else(|| DiagnosticSpan::from_multispan(&db.span, je)),
- children: vec![],
- rendered: db.render_span.as_ref()
- .and_then(|rsp| je.render(rsp)),
- }
- }
-}
-
-impl DiagnosticSpan {
- fn from_span_label(span: SpanLabel,
- suggestion: Option<&String>,
- je: &JsonEmitter)
- -> DiagnosticSpan {
- Self::from_span_etc(span.span,
- span.is_primary,
- span.label,
- suggestion,
- je)
- }
-
- fn from_span_etc(span: Span,
- is_primary: bool,
- label: Option<String>,
- suggestion: Option<&String>,
- je: &JsonEmitter)
- -> DiagnosticSpan {
- // obtain the full backtrace from the `macro_backtrace`
- // helper; in some ways, it'd be better to expand the
- // backtrace ourselves, but the `macro_backtrace` helper makes
- // some decision, such as dropping some frames, and I don't
- // want to duplicate that logic here.
- let backtrace = je.cm.macro_backtrace(span).into_iter();
- DiagnosticSpan::from_span_full(span,
- is_primary,
- label,
- suggestion,
- backtrace,
- je)
- }
-
- fn from_span_full(span: Span,
- is_primary: bool,
- label: Option<String>,
- suggestion: Option<&String>,
- mut backtrace: vec::IntoIter<MacroBacktrace>,
- je: &JsonEmitter)
- -> DiagnosticSpan {
- let start = je.cm.lookup_char_pos(span.lo);
- let end = je.cm.lookup_char_pos(span.hi);
- let backtrace_step = backtrace.next().map(|bt| {
- let call_site =
- Self::from_span_full(bt.call_site,
- false,
- None,
- None,
- backtrace,
- je);
- let def_site_span = bt.def_site_span.map(|sp| {
- Self::from_span_full(sp,
- false,
- None,
- None,
- vec![].into_iter(),
- je)
- });
- Box::new(DiagnosticSpanMacroExpansion {
- span: call_site,
- macro_decl_name: bt.macro_decl_name,
- def_site_span: def_site_span,
- })
- });
- DiagnosticSpan {
- file_name: start.file.name.clone(),
- byte_start: span.lo.0,
- byte_end: span.hi.0,
- line_start: start.line,
- line_end: end.line,
- column_start: start.col.0 + 1,
- column_end: end.col.0 + 1,
- is_primary: is_primary,
- text: DiagnosticSpanLine::from_span(span, je),
- suggested_replacement: suggestion.cloned(),
- expansion: backtrace_step,
- label: label,
- }
- }
-
- fn from_multispan(msp: &MultiSpan, je: &JsonEmitter) -> Vec<DiagnosticSpan> {
- msp.span_labels()
- .into_iter()
- .map(|span_str| Self::from_span_label(span_str, None, je))
- .collect()
- }
-
- fn from_suggestion(suggestion: &CodeSuggestion, je: &JsonEmitter)
- -> Vec<DiagnosticSpan> {
- assert_eq!(suggestion.msp.span_labels().len(), suggestion.substitutes.len());
- suggestion.msp.span_labels()
- .into_iter()
- .zip(&suggestion.substitutes)
- .map(|(span_label, suggestion)| {
- DiagnosticSpan::from_span_label(span_label,
- Some(suggestion),
- je)
- })
- .collect()
- }
-
- fn from_render_span(rsp: &RenderSpan, je: &JsonEmitter) -> Vec<DiagnosticSpan> {
- match *rsp {
- RenderSpan::FullSpan(ref msp) =>
- DiagnosticSpan::from_multispan(msp, je),
- RenderSpan::Suggestion(ref suggestion) =>
- DiagnosticSpan::from_suggestion(suggestion, je),
- }
- }
-}
-
-impl DiagnosticSpanLine {
- fn line_from_filemap(fm: &codemap::FileMap,
- index: usize,
- h_start: usize,
- h_end: usize)
- -> DiagnosticSpanLine {
- DiagnosticSpanLine {
- text: fm.get_line(index).unwrap().to_owned(),
- highlight_start: h_start,
- highlight_end: h_end,
- }
- }
-
- /// Create a list of DiagnosticSpanLines from span - each line with any part
- /// of `span` gets a DiagnosticSpanLine, with the highlight indicating the
- /// `span` within the line.
- fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
- je.cm.span_to_lines(span)
- .map(|lines| {
- let fm = &*lines.file;
- lines.lines
- .iter()
- .map(|line| {
- DiagnosticSpanLine::line_from_filemap(fm,
- line.line_index,
- line.start_col.0 + 1,
- line.end_col.0 + 1)
- })
- .collect()
- })
- .unwrap_or(vec![])
- }
-}
-
-impl DiagnosticCode {
- fn map_opt_string(s: Option<String>, je: &JsonEmitter) -> Option<DiagnosticCode> {
- s.map(|s| {
-
- let explanation = je.registry
- .as_ref()
- .and_then(|registry| registry.find_description(&s));
-
- DiagnosticCode {
- code: s,
- explanation: explanation,
- }
- })
- }
-}
-
-impl JsonEmitter {
- fn render(&self, render_span: &RenderSpan) -> Option<String> {
- match *render_span {
- RenderSpan::FullSpan(_) => {
- None
- }
- RenderSpan::Suggestion(ref suggestion) => {
- Some(suggestion.splice_lines(&self.cm))
- }
- }
- }
-}
-
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-pub use errors::emitter::ColorConfig;
-
-use self::Level::*;
-use self::RenderSpan::*;
-
-use codemap::{self, CodeMap, MultiSpan, NO_EXPANSION, Span};
-use diagnostics;
-use errors::emitter::{Emitter, EmitterWriter};
-
-use std::cell::{RefCell, Cell};
-use std::{error, fmt};
-use std::rc::Rc;
-use std::thread::panicking;
-use term;
-
-pub mod emitter;
-pub mod json;
-pub mod snippet;
-
-#[derive(Clone)]
-pub enum RenderSpan {
- /// A FullSpan renders with both with an initial line for the
- /// message, prefixed by file:linenum, followed by a summary of
- /// the source code covered by the span.
- FullSpan(MultiSpan),
-
- /// A suggestion renders with both with an initial line for the
- /// message, prefixed by file:linenum, followed by a summary
- /// of hypothetical source code, where each `String` is spliced
- /// into the lines in place of the code covered by each span.
- Suggestion(CodeSuggestion),
-}
-
-#[derive(Clone)]
-pub struct CodeSuggestion {
- msp: MultiSpan,
- substitutes: Vec<String>,
-}
-
-impl RenderSpan {
- fn span(&self) -> &MultiSpan {
- match *self {
- FullSpan(ref msp) |
- Suggestion(CodeSuggestion { ref msp, .. }) =>
- msp
- }
- }
-}
-
-impl CodeSuggestion {
- /// Returns the assembled code suggestion.
- pub fn splice_lines(&self, cm: &CodeMap) -> String {
- use codemap::{CharPos, Loc, Pos};
-
- fn push_trailing(buf: &mut String, line_opt: Option<&str>,
- lo: &Loc, hi_opt: Option<&Loc>) {
- let (lo, hi_opt) = (lo.col.to_usize(), hi_opt.map(|hi|hi.col.to_usize()));
- if let Some(line) = line_opt {
- if line.len() > lo {
- buf.push_str(match hi_opt {
- Some(hi) => &line[lo..hi],
- None => &line[lo..],
- });
- }
- if let None = hi_opt {
- buf.push('\n');
- }
- }
- }
-
- let mut primary_spans = self.msp.primary_spans().to_owned();
-
- assert_eq!(primary_spans.len(), self.substitutes.len());
- if primary_spans.is_empty() {
- return format!("");
- }
-
- // Assumption: all spans are in the same file, and all spans
- // are disjoint. Sort in ascending order.
- primary_spans.sort_by_key(|sp| sp.lo);
-
- // Find the bounding span.
- let lo = primary_spans.iter().map(|sp| sp.lo).min().unwrap();
- let hi = primary_spans.iter().map(|sp| sp.hi).min().unwrap();
- let bounding_span = Span { lo: lo, hi: hi, expn_id: NO_EXPANSION };
- let lines = cm.span_to_lines(bounding_span).unwrap();
- assert!(!lines.lines.is_empty());
-
- // To build up the result, we do this for each span:
- // - push the line segment trailing the previous span
- // (at the beginning a "phantom" span pointing at the start of the line)
- // - push lines between the previous and current span (if any)
- // - if the previous and current span are not on the same line
- // push the line segment leading up to the current span
- // - splice in the span substitution
- //
- // Finally push the trailing line segment of the last span
- let fm = &lines.file;
- let mut prev_hi = cm.lookup_char_pos(bounding_span.lo);
- prev_hi.col = CharPos::from_usize(0);
-
- let mut prev_line = fm.get_line(lines.lines[0].line_index);
- let mut buf = String::new();
-
- for (sp, substitute) in primary_spans.iter().zip(self.substitutes.iter()) {
- let cur_lo = cm.lookup_char_pos(sp.lo);
- if prev_hi.line == cur_lo.line {
- push_trailing(&mut buf, prev_line, &prev_hi, Some(&cur_lo));
- } else {
- push_trailing(&mut buf, prev_line, &prev_hi, None);
- // push lines between the previous and current span (if any)
- for idx in prev_hi.line..(cur_lo.line - 1) {
- if let Some(line) = fm.get_line(idx) {
- buf.push_str(line);
- buf.push('\n');
- }
- }
- if let Some(cur_line) = fm.get_line(cur_lo.line - 1) {
- buf.push_str(&cur_line[.. cur_lo.col.to_usize()]);
- }
- }
- buf.push_str(substitute);
- prev_hi = cm.lookup_char_pos(sp.hi);
- prev_line = fm.get_line(prev_hi.line - 1);
- }
- push_trailing(&mut buf, prev_line, &prev_hi, None);
- // remove trailing newline
- buf.pop();
- buf
- }
-}
-
-/// Used as a return value to signify a fatal error occurred. (It is also
-/// used as the argument to panic at the moment, but that will eventually
-/// not be true.)
-#[derive(Copy, Clone, Debug)]
-#[must_use]
-pub struct FatalError;
-
-impl fmt::Display for FatalError {
- fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
- write!(f, "parser fatal error")
- }
-}
-
-impl error::Error for FatalError {
- fn description(&self) -> &str {
- "The parser has encountered a fatal error"
- }
-}
-
-/// Signifies that the compiler died with an explicit call to `.bug`
-/// or `.span_bug` rather than a failed assertion, etc.
-#[derive(Copy, Clone, Debug)]
-pub struct ExplicitBug;
-
-impl fmt::Display for ExplicitBug {
- fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
- write!(f, "parser internal bug")
- }
-}
-
-impl error::Error for ExplicitBug {
- fn description(&self) -> &str {
- "The parser has encountered an internal bug"
- }
-}
-
-/// Used for emitting structured error messages and other diagnostic information.
-#[must_use]
-#[derive(Clone)]
-pub struct DiagnosticBuilder<'a> {
- handler: &'a Handler,
- level: Level,
- message: String,
- code: Option<String>,
- span: MultiSpan,
- children: Vec<SubDiagnostic>,
-}
-
-/// For example a note attached to an error.
-#[derive(Clone)]
-struct SubDiagnostic {
- level: Level,
- message: String,
- span: MultiSpan,
- render_span: Option<RenderSpan>,
-}
-
-impl<'a> DiagnosticBuilder<'a> {
- /// Emit the diagnostic.
- pub fn emit(&mut self) {
- if self.cancelled() {
- return;
- }
-
- self.handler.emit.borrow_mut().emit_struct(&self);
- self.cancel();
- self.handler.panic_if_treat_err_as_bug();
-
- // if self.is_fatal() {
- // panic!(FatalError);
- // }
- }
-
- /// Cancel the diagnostic (a structured diagnostic must either be emitted or
- /// cancelled or it will panic when dropped).
- /// BEWARE: if this DiagnosticBuilder is an error, then creating it will
- /// bump the error count on the Handler and cancelling it won't undo that.
- /// If you want to decrement the error count you should use `Handler::cancel`.
- pub fn cancel(&mut self) {
- self.level = Level::Cancelled;
- }
-
- pub fn cancelled(&self) -> bool {
- self.level == Level::Cancelled
- }
-
- pub fn is_fatal(&self) -> bool {
- self.level == Level::Fatal
- }
-
- /// Add a span/label to be included in the resulting snippet.
- /// This is pushed onto the `MultiSpan` that was created when the
- /// diagnostic was first built. If you don't call this function at
- /// all, and you just supplied a `Span` to create the diagnostic,
- /// then the snippet will just include that `Span`, which is
- /// called the primary span.
- pub fn span_label(&mut self, span: Span, label: &fmt::Display)
- -> &mut DiagnosticBuilder<'a> {
- self.span.push_span_label(span, format!("{}", label));
- self
- }
-
- pub fn note_expected_found(&mut self,
- label: &fmt::Display,
- expected: &fmt::Display,
- found: &fmt::Display)
- -> &mut DiagnosticBuilder<'a>
- {
- // For now, just attach these as notes
- self.note(&format!("expected {} `{}`", label, expected));
- self.note(&format!(" found {} `{}`", label, found));
- self
- }
-
- pub fn note(&mut self, msg: &str) -> &mut DiagnosticBuilder<'a> {
- self.sub(Level::Note, msg, MultiSpan::new(), None);
- self
- }
- pub fn span_note<S: Into<MultiSpan>>(&mut self,
- sp: S,
- msg: &str)
- -> &mut DiagnosticBuilder<'a> {
- self.sub(Level::Note, msg, sp.into(), None);
- self
- }
- pub fn warn(&mut self, msg: &str) -> &mut DiagnosticBuilder<'a> {
- self.sub(Level::Warning, msg, MultiSpan::new(), None);
- self
- }
- pub fn span_warn<S: Into<MultiSpan>>(&mut self,
- sp: S,
- msg: &str)
- -> &mut DiagnosticBuilder<'a> {
- self.sub(Level::Warning, msg, sp.into(), None);
- self
- }
- pub fn help(&mut self , msg: &str) -> &mut DiagnosticBuilder<'a> {
- self.sub(Level::Help, msg, MultiSpan::new(), None);
- self
- }
- pub fn span_help<S: Into<MultiSpan>>(&mut self,
- sp: S,
- msg: &str)
- -> &mut DiagnosticBuilder<'a> {
- self.sub(Level::Help, msg, sp.into(), None);
- self
- }
- /// Prints out a message with a suggested edit of the code.
- ///
- /// See `diagnostic::RenderSpan::Suggestion` for more information.
- pub fn span_suggestion<S: Into<MultiSpan>>(&mut self,
- sp: S,
- msg: &str,
- suggestion: String)
- -> &mut DiagnosticBuilder<'a> {
- self.sub(Level::Help, msg, MultiSpan::new(), Some(Suggestion(CodeSuggestion {
- msp: sp.into(),
- substitutes: vec![suggestion],
- })));
- self
- }
-
- pub fn set_span<S: Into<MultiSpan>>(&mut self, sp: S) -> &mut Self {
- self.span = sp.into();
- self
- }
-
- pub fn code(&mut self, s: String) -> &mut Self {
- self.code = Some(s);
- self
- }
-
- pub fn message(&self) -> &str {
- &self.message
- }
-
- pub fn level(&self) -> Level {
- self.level
- }
-
- /// Convenience function for internal use, clients should use one of the
- /// struct_* methods on Handler.
- fn new(handler: &'a Handler,
- level: Level,
- message: &str) -> DiagnosticBuilder<'a> {
- DiagnosticBuilder {
- handler: handler,
- level: level,
- message: message.to_owned(),
- code: None,
- span: MultiSpan::new(),
- children: vec![],
- }
- }
-
- /// Convenience function for internal use, clients should use one of the
- /// public methods above.
- fn sub(&mut self,
- level: Level,
- message: &str,
- span: MultiSpan,
- render_span: Option<RenderSpan>) {
- let sub = SubDiagnostic {
- level: level,
- message: message.to_owned(),
- span: span,
- render_span: render_span,
- };
- self.children.push(sub);
- }
-}
-
-impl<'a> fmt::Debug for DiagnosticBuilder<'a> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.message.fmt(f)
- }
-}
-
-/// Destructor bomb - a DiagnosticBuilder must be either emitted or cancelled or
-/// we emit a bug.
-impl<'a> Drop for DiagnosticBuilder<'a> {
- fn drop(&mut self) {
- if !panicking() && !self.cancelled() {
- self.handler.emit.borrow_mut().emit(&MultiSpan::new(),
- "Error constructed but not emitted",
- None,
- Bug);
- panic!();
- }
- }
-}
-
-/// A handler deals with errors; certain errors
-/// (fatal, bug, unimpl) may cause immediate exit,
-/// others log errors for later reporting.
-pub struct Handler {
- err_count: Cell<usize>,
- emit: RefCell<Box<Emitter>>,
- pub can_emit_warnings: bool,
- treat_err_as_bug: bool,
- continue_after_error: Cell<bool>,
- delayed_span_bug: RefCell<Option<(MultiSpan, String)>>,
-}
-
-impl Handler {
- pub fn with_tty_emitter(color_config: ColorConfig,
- registry: Option<diagnostics::registry::Registry>,
- can_emit_warnings: bool,
- treat_err_as_bug: bool,
- cm: Rc<codemap::CodeMap>)
- -> Handler {
- let emitter = Box::new(EmitterWriter::stderr(color_config, registry, cm));
- Handler::with_emitter(can_emit_warnings, treat_err_as_bug, emitter)
- }
-
- pub fn with_emitter(can_emit_warnings: bool,
- treat_err_as_bug: bool,
- e: Box<Emitter>) -> Handler {
- Handler {
- err_count: Cell::new(0),
- emit: RefCell::new(e),
- can_emit_warnings: can_emit_warnings,
- treat_err_as_bug: treat_err_as_bug,
- continue_after_error: Cell::new(true),
- delayed_span_bug: RefCell::new(None),
- }
- }
-
- pub fn set_continue_after_error(&self, continue_after_error: bool) {
- self.continue_after_error.set(continue_after_error);
- }
-
- pub fn struct_dummy<'a>(&'a self) -> DiagnosticBuilder<'a> {
- DiagnosticBuilder::new(self, Level::Cancelled, "")
- }
-
- pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self,
- sp: S,
- msg: &str)
- -> DiagnosticBuilder<'a> {
- let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
- result.set_span(sp);
- if !self.can_emit_warnings {
- result.cancel();
- }
- result
- }
- pub fn struct_span_warn_with_code<'a, S: Into<MultiSpan>>(&'a self,
- sp: S,
- msg: &str,
- code: &str)
- -> DiagnosticBuilder<'a> {
- let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
- result.set_span(sp);
- result.code(code.to_owned());
- if !self.can_emit_warnings {
- result.cancel();
- }
- result
- }
- pub fn struct_warn<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
- let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
- if !self.can_emit_warnings {
- result.cancel();
- }
- result
- }
- pub fn struct_span_err<'a, S: Into<MultiSpan>>(&'a self,
- sp: S,
- msg: &str)
- -> DiagnosticBuilder<'a> {
- self.bump_err_count();
- let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
- result.set_span(sp);
- result
- }
- pub fn struct_span_err_with_code<'a, S: Into<MultiSpan>>(&'a self,
- sp: S,
- msg: &str,
- code: &str)
- -> DiagnosticBuilder<'a> {
- self.bump_err_count();
- let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
- result.set_span(sp);
- result.code(code.to_owned());
- result
- }
- pub fn struct_err<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
- self.bump_err_count();
- DiagnosticBuilder::new(self, Level::Error, msg)
- }
- pub fn struct_span_fatal<'a, S: Into<MultiSpan>>(&'a self,
- sp: S,
- msg: &str)
- -> DiagnosticBuilder<'a> {
- self.bump_err_count();
- let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg);
- result.set_span(sp);
- result
- }
- pub fn struct_span_fatal_with_code<'a, S: Into<MultiSpan>>(&'a self,
- sp: S,
- msg: &str,
- code: &str)
- -> DiagnosticBuilder<'a> {
- self.bump_err_count();
- let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg);
- result.set_span(sp);
- result.code(code.to_owned());
- result
- }
- pub fn struct_fatal<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
- self.bump_err_count();
- DiagnosticBuilder::new(self, Level::Fatal, msg)
- }
-
- pub fn cancel(&mut self, err: &mut DiagnosticBuilder) {
- if err.level == Level::Error || err.level == Level::Fatal {
- assert!(self.has_errors());
- self.err_count.set(self.err_count.get() + 1);
- }
- err.cancel();
- }
-
- fn panic_if_treat_err_as_bug(&self) {
- if self.treat_err_as_bug {
- panic!("encountered error with `-Z treat_err_as_bug");
- }
- }
-
- pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str)
- -> FatalError {
- self.emit(&sp.into(), msg, Fatal);
- self.bump_err_count();
- self.panic_if_treat_err_as_bug();
- return FatalError;
- }
- pub fn span_fatal_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: &str)
- -> FatalError {
- self.emit_with_code(&sp.into(), msg, code, Fatal);
- self.bump_err_count();
- self.panic_if_treat_err_as_bug();
- return FatalError;
- }
- pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
- self.emit(&sp.into(), msg, Error);
- self.bump_err_count();
- self.panic_if_treat_err_as_bug();
- }
- pub fn span_err_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: &str) {
- self.emit_with_code(&sp.into(), msg, code, Error);
- self.bump_err_count();
- self.panic_if_treat_err_as_bug();
- }
- pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
- self.emit(&sp.into(), msg, Warning);
- }
- pub fn span_warn_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: &str) {
- self.emit_with_code(&sp.into(), msg, code, Warning);
- }
- pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
- self.emit(&sp.into(), msg, Bug);
- panic!(ExplicitBug);
- }
- pub fn delay_span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
- let mut delayed = self.delayed_span_bug.borrow_mut();
- *delayed = Some((sp.into(), msg.to_string()));
- }
- pub fn span_bug_no_panic<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
- self.emit(&sp.into(), msg, Bug);
- self.bump_err_count();
- }
- pub fn span_note_without_error<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
- self.emit.borrow_mut().emit(&sp.into(), msg, None, Note);
- }
- pub fn span_unimpl<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
- self.span_bug(sp, &format!("unimplemented {}", msg));
- }
- pub fn fatal(&self, msg: &str) -> FatalError {
- if self.treat_err_as_bug {
- self.bug(msg);
- }
- self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Fatal);
- self.bump_err_count();
- FatalError
- }
- pub fn err(&self, msg: &str) {
- if self.treat_err_as_bug {
- self.bug(msg);
- }
- self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Error);
- self.bump_err_count();
- }
- pub fn warn(&self, msg: &str) {
- self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Warning);
- }
- pub fn note_without_error(&self, msg: &str) {
- self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Note);
- }
- pub fn bug(&self, msg: &str) -> ! {
- self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Bug);
- panic!(ExplicitBug);
- }
- pub fn unimpl(&self, msg: &str) -> ! {
- self.bug(&format!("unimplemented {}", msg));
- }
-
- pub fn bump_err_count(&self) {
- self.err_count.set(self.err_count.get() + 1);
- }
-
- pub fn err_count(&self) -> usize {
- self.err_count.get()
- }
-
- pub fn has_errors(&self) -> bool {
- self.err_count.get() > 0
- }
- pub fn abort_if_errors(&self) {
- let s;
- match self.err_count.get() {
- 0 => {
- let delayed_bug = self.delayed_span_bug.borrow();
- match *delayed_bug {
- Some((ref span, ref errmsg)) => {
- self.span_bug(span.clone(), errmsg);
- },
- _ => {}
- }
-
- return;
- }
- 1 => s = "aborting due to previous error".to_string(),
- _ => {
- s = format!("aborting due to {} previous errors",
- self.err_count.get());
- }
- }
-
- panic!(self.fatal(&s));
- }
- pub fn emit(&self,
- msp: &MultiSpan,
- msg: &str,
- lvl: Level) {
- if lvl == Warning && !self.can_emit_warnings { return }
- self.emit.borrow_mut().emit(&msp, msg, None, lvl);
- if !self.continue_after_error.get() { self.abort_if_errors(); }
- }
- pub fn emit_with_code(&self,
- msp: &MultiSpan,
- msg: &str,
- code: &str,
- lvl: Level) {
- if lvl == Warning && !self.can_emit_warnings { return }
- self.emit.borrow_mut().emit(&msp, msg, Some(code), lvl);
- if !self.continue_after_error.get() { self.abort_if_errors(); }
- }
-}
-
-
-#[derive(Copy, PartialEq, Clone, Debug)]
-pub enum Level {
- Bug,
- Fatal,
- // An error which while not immediately fatal, should stop the compiler
- // progressing beyond the current phase.
- PhaseFatal,
- Error,
- Warning,
- Note,
- Help,
- Cancelled,
-}
-
-impl fmt::Display for Level {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.to_str().fmt(f)
- }
-}
-
-impl Level {
- fn color(self) -> term::color::Color {
- match self {
- Bug | Fatal | PhaseFatal | Error => term::color::BRIGHT_RED,
- Warning => term::color::YELLOW,
- Note => term::color::BRIGHT_GREEN,
- Help => term::color::BRIGHT_CYAN,
- Cancelled => unreachable!(),
- }
- }
-
- fn to_str(self) -> &'static str {
- match self {
- Bug => "error: internal compiler error",
- Fatal | PhaseFatal | Error => "error",
- Warning => "warning",
- Note => "note",
- Help => "help",
- Cancelled => panic!("Shouldn't call on cancelled error"),
- }
- }
-}
-
-pub fn expect<T, M>(diag: &Handler, opt: Option<T>, msg: M) -> T where
- M: FnOnce() -> String,
-{
- match opt {
- Some(t) => t,
- None => diag.bug(&msg()),
- }
-}
-
-/// True if we should use the old-skool error format style. This is
-/// the default setting until the new errors are deemed stable enough
-/// for general use.
-///
-/// FIXME(#33240)
-#[cfg(not(test))]
-pub fn check_old_skool() -> bool {
- use std::env;
- env::var("RUST_NEW_ERROR_FORMAT").is_err()
-}
-
-/// For unit tests, use the new format.
-#[cfg(test)]
-pub fn check_old_skool() -> bool {
- false
-}
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Code for annotating snippets.
-
-use codemap::{CharPos, CodeMap, FileMap, LineInfo, Span};
-use errors::check_old_skool;
-use std::cmp;
-use std::rc::Rc;
-use std::mem;
-
-mod test;
-
-#[derive(Clone)]
-pub struct SnippetData {
- codemap: Rc<CodeMap>,
- files: Vec<FileInfo>,
-}
-
-#[derive(Clone)]
-pub struct FileInfo {
- file: Rc<FileMap>,
-
- /// The "primary file", if any, gets a `-->` marker instead of
- /// `>>>`, and has a line-number/column printed and not just a
- /// filename. It appears first in the listing. It is known to
- /// contain at least one primary span, though primary spans (which
- /// are designated with `^^^`) may also occur in other files.
- primary_span: Option<Span>,
-
- lines: Vec<Line>,
-}
-
-#[derive(Clone, Debug)]
-struct Line {
- line_index: usize,
- annotations: Vec<Annotation>,
-}
-
-#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)]
-struct Annotation {
- /// Start column, 0-based indexing -- counting *characters*, not
- /// utf-8 bytes. Note that it is important that this field goes
- /// first, so that when we sort, we sort orderings by start
- /// column.
- start_col: usize,
-
- /// End column within the line (exclusive)
- end_col: usize,
-
- /// Is this annotation derived from primary span
- is_primary: bool,
-
- /// Is this a large span minimized down to a smaller span
- is_minimized: bool,
-
- /// Optional label to display adjacent to the annotation.
- label: Option<String>,
-}
-
-#[derive(Debug)]
-pub struct RenderedLine {
- pub text: Vec<StyledString>,
- pub kind: RenderedLineKind,
-}
-
-#[derive(Debug)]
-pub struct StyledString {
- pub text: String,
- pub style: Style,
-}
-
-#[derive(Debug)]
-pub struct StyledBuffer {
- text: Vec<Vec<char>>,
- styles: Vec<Vec<Style>>
-}
-
-#[derive(Copy, Clone, Debug, PartialEq)]
-pub enum Style {
- FileNameStyle,
- LineAndColumn,
- LineNumber,
- Quotation,
- UnderlinePrimary,
- UnderlineSecondary,
- LabelPrimary,
- LabelSecondary,
- OldSkoolNoteText,
- OldSkoolNote,
- NoStyle,
-}
-
-#[derive(Debug, Clone)]
-pub enum RenderedLineKind {
- PrimaryFileName,
- OtherFileName,
- SourceText {
- file: Rc<FileMap>,
- line_index: usize,
- },
- Annotations,
- Elision,
-}
-
-impl SnippetData {
- pub fn new(codemap: Rc<CodeMap>,
- primary_span: Option<Span>) // (*)
- -> Self {
- // (*) The primary span indicates the file that must appear
- // first, and which will have a line number etc in its
- // name. Outside of tests, this is always `Some`, but for many
- // tests it's not relevant to test this portion of the logic,
- // and it's tedious to pick a primary span (read: tedious to
- // port older tests that predate the existence of a primary
- // span).
-
- debug!("SnippetData::new(primary_span={:?})", primary_span);
-
- let mut data = SnippetData {
- codemap: codemap.clone(),
- files: vec![]
- };
- if let Some(primary_span) = primary_span {
- let lo = codemap.lookup_char_pos(primary_span.lo);
- data.files.push(
- FileInfo {
- file: lo.file,
- primary_span: Some(primary_span),
- lines: vec![],
- });
- }
- data
- }
-
- pub fn push(&mut self, span: Span, is_primary: bool, label: Option<String>) {
- debug!("SnippetData::push(span={:?}, is_primary={}, label={:?})",
- span, is_primary, label);
-
- let file_lines = match self.codemap.span_to_lines(span) {
- Ok(file_lines) => file_lines,
- Err(_) => {
- // ignore unprintable spans completely.
- return;
- }
- };
-
- self.file(&file_lines.file)
- .push_lines(&file_lines.lines, is_primary, label);
- }
-
- fn file(&mut self, file_map: &Rc<FileMap>) -> &mut FileInfo {
- let index = self.files.iter().position(|f| f.file.name == file_map.name);
- if let Some(index) = index {
- return &mut self.files[index];
- }
-
- self.files.push(
- FileInfo {
- file: file_map.clone(),
- lines: vec![],
- primary_span: None,
- });
- self.files.last_mut().unwrap()
- }
-
- pub fn render_lines(&self) -> Vec<RenderedLine> {
- debug!("SnippetData::render_lines()");
-
- let mut rendered_lines: Vec<_> =
- self.files.iter()
- .flat_map(|f| f.render_file_lines(&self.codemap))
- .collect();
- prepend_prefixes(&mut rendered_lines);
- trim_lines(&mut rendered_lines);
- rendered_lines
- }
-}
-
-pub trait StringSource {
- fn make_string(self) -> String;
-}
-
-impl StringSource for String {
- fn make_string(self) -> String {
- self
- }
-}
-
-impl StringSource for Vec<char> {
- fn make_string(self) -> String {
- self.into_iter().collect()
- }
-}
-
-impl<S> From<(S, Style, RenderedLineKind)> for RenderedLine
- where S: StringSource
-{
- fn from((text, style, kind): (S, Style, RenderedLineKind)) -> Self {
- RenderedLine {
- text: vec![StyledString {
- text: text.make_string(),
- style: style,
- }],
- kind: kind,
- }
- }
-}
-
-impl<S1,S2> From<(S1, Style, S2, Style, RenderedLineKind)> for RenderedLine
- where S1: StringSource, S2: StringSource
-{
- fn from(tuple: (S1, Style, S2, Style, RenderedLineKind)) -> Self {
- let (text1, style1, text2, style2, kind) = tuple;
- RenderedLine {
- text: vec![
- StyledString {
- text: text1.make_string(),
- style: style1,
- },
- StyledString {
- text: text2.make_string(),
- style: style2,
- }
- ],
- kind: kind,
- }
- }
-}
-
-impl RenderedLine {
- fn trim_last(&mut self) {
- if let Some(last_text) = self.text.last_mut() {
- let len = last_text.text.trim_right().len();
- last_text.text.truncate(len);
- }
- }
-}
-
-impl RenderedLineKind {
- fn prefix(&self) -> StyledString {
- match *self {
- RenderedLineKind::SourceText { file: _, line_index } =>
- StyledString {
- text: format!("{}", line_index + 1),
- style: Style::LineNumber,
- },
- RenderedLineKind::Elision =>
- StyledString {
- text: String::from("..."),
- style: Style::LineNumber,
- },
- RenderedLineKind::PrimaryFileName |
- RenderedLineKind::OtherFileName |
- RenderedLineKind::Annotations =>
- StyledString {
- text: String::from(""),
- style: Style::LineNumber,
- },
- }
- }
-}
-
-impl StyledBuffer {
- fn new() -> StyledBuffer {
- StyledBuffer { text: vec![], styles: vec![] }
- }
-
- fn render(&self, source_kind: RenderedLineKind) -> Vec<RenderedLine> {
- let mut output: Vec<RenderedLine> = vec![];
- let mut styled_vec: Vec<StyledString> = vec![];
-
- for (row, row_style) in self.text.iter().zip(&self.styles) {
- let mut current_style = Style::NoStyle;
- let mut current_text = String::new();
-
- for (&c, &s) in row.iter().zip(row_style) {
- if s != current_style {
- if !current_text.is_empty() {
- styled_vec.push(StyledString { text: current_text, style: current_style });
- }
- current_style = s;
- current_text = String::new();
- }
- current_text.push(c);
- }
- if !current_text.is_empty() {
- styled_vec.push(StyledString { text: current_text, style: current_style });
- }
-
- if output.is_empty() {
- //We know our first output line is source and the rest are highlights and labels
- output.push(RenderedLine { text: styled_vec, kind: source_kind.clone() });
- } else {
- output.push(RenderedLine { text: styled_vec, kind: RenderedLineKind::Annotations });
- }
- styled_vec = vec![];
- }
-
- output
- }
-
- fn putc(&mut self, line: usize, col: usize, chr: char, style: Style) {
- while line >= self.text.len() {
- self.text.push(vec![]);
- self.styles.push(vec![]);
- }
-
- if col < self.text[line].len() {
- self.text[line][col] = chr;
- self.styles[line][col] = style;
- } else {
- let mut i = self.text[line].len();
- while i < col {
- let s = match self.text[0].get(i) {
- Some(&'\t') => '\t',
- _ => ' '
- };
- self.text[line].push(s);
- self.styles[line].push(Style::NoStyle);
- i += 1;
- }
- self.text[line].push(chr);
- self.styles[line].push(style);
- }
- }
-
- fn puts(&mut self, line: usize, col: usize, string: &str, style: Style) {
- let mut n = col;
- for c in string.chars() {
- self.putc(line, n, c, style);
- n += 1;
- }
- }
-
- fn set_style(&mut self, line: usize, col: usize, style: Style) {
- if self.styles.len() > line && self.styles[line].len() > col {
- self.styles[line][col] = style;
- }
- }
-
- fn append(&mut self, line: usize, string: &str, style: Style) {
- if line >= self.text.len() {
- self.puts(line, 0, string, style);
- } else {
- let col = self.text[line].len();
- self.puts(line, col, string, style);
- }
- }
-}
-
-impl FileInfo {
- fn push_lines(&mut self,
- lines: &[LineInfo],
- is_primary: bool,
- label: Option<String>) {
- assert!(lines.len() > 0);
-
- // If a span covers multiple lines, we reduce it to a single
- // point at the start of the span. This means that instead
- // of producing output like this:
- //
- // ```
- // --> foo.rs:2:1
- // 2 |> fn conflicting_items<'grammar>(state: &LR0State<'grammar>)
- // |> ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- // 3 |> -> Set<LR0Item<'grammar>>
- // |> ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- // (and so on)
- // ```
- //
- // we produce:
- //
- // ```
- // --> foo.rs:2:1
- // 2 |> fn conflicting_items<'grammar>(state: &LR0State<'grammar>)
- // ^
- // ```
- //
- // Basically, although this loses information, multi-line spans just
- // never look good.
-
- let (line, start_col, mut end_col, is_minimized) = if lines.len() == 1 {
- (lines[0].line_index, lines[0].start_col, lines[0].end_col, false)
- } else {
- (lines[0].line_index, lines[0].start_col, CharPos(lines[0].start_col.0 + 1), true)
- };
-
- // Watch out for "empty spans". If we get a span like 6..6, we
- // want to just display a `^` at 6, so convert that to
- // 6..7. This is degenerate input, but it's best to degrade
- // gracefully -- and the parser likes to suply a span like
- // that for EOF, in particular.
- if start_col == end_col {
- end_col.0 += 1;
- }
-
- let index = self.ensure_source_line(line);
- self.lines[index].push_annotation(start_col,
- end_col,
- is_primary,
- is_minimized,
- label);
- }
-
- /// Ensure that we have a `Line` struct corresponding to
- /// `line_index` in the file. If we already have some other lines,
- /// then this will add the intervening lines to ensure that we
- /// have a complete snippet. (Note that when we finally display,
- /// some of those lines may be elided.)
- fn ensure_source_line(&mut self, line_index: usize) -> usize {
- if self.lines.is_empty() {
- self.lines.push(Line::new(line_index));
- return 0;
- }
-
- // Find the range of lines we have thus far.
- let first_line_index = self.lines.first().unwrap().line_index;
- let last_line_index = self.lines.last().unwrap().line_index;
- assert!(first_line_index <= last_line_index);
-
- // If the new line is lower than all the lines we have thus
- // far, then insert the new line and any intervening lines at
- // the front. In a silly attempt at micro-optimization, we
- // don't just call `insert` repeatedly, but instead make a new
- // (empty) vector, pushing the new lines onto it, and then
- // appending the old vector.
- if line_index < first_line_index {
- let lines = mem::replace(&mut self.lines, vec![]);
- self.lines.extend(
- (line_index .. first_line_index)
- .map(|line| Line::new(line))
- .chain(lines));
- return 0;
- }
-
- // If the new line comes after the ones we have so far, insert
- // lines for it.
- if line_index > last_line_index {
- self.lines.extend(
- (last_line_index+1 .. line_index+1)
- .map(|line| Line::new(line)));
- return self.lines.len() - 1;
- }
-
- // Otherwise it should already exist.
- return line_index - first_line_index;
- }
-
- fn render_file_lines(&self, codemap: &Rc<CodeMap>) -> Vec<RenderedLine> {
- let old_school = check_old_skool();
-
- // As a first step, we elide any instance of more than one
- // continuous unannotated line.
-
- let mut lines_iter = self.lines.iter();
- let mut output = vec![];
-
- // First insert the name of the file.
- if !old_school {
- match self.primary_span {
- Some(span) => {
- let lo = codemap.lookup_char_pos(span.lo);
- output.push(RenderedLine {
- text: vec![StyledString {
- text: lo.file.name.clone(),
- style: Style::FileNameStyle,
- }, StyledString {
- text: format!(":{}:{}", lo.line, lo.col.0 + 1),
- style: Style::LineAndColumn,
- }],
- kind: RenderedLineKind::PrimaryFileName,
- });
- output.push(RenderedLine {
- text: vec![StyledString {
- text: "".to_string(),
- style: Style::FileNameStyle,
- }],
- kind: RenderedLineKind::Annotations,
- });
- }
- None => {
- output.push(RenderedLine {
- text: vec![StyledString {
- text: self.file.name.clone(),
- style: Style::FileNameStyle,
- }],
- kind: RenderedLineKind::OtherFileName,
- });
- output.push(RenderedLine {
- text: vec![StyledString {
- text: "".to_string(),
- style: Style::FileNameStyle,
- }],
- kind: RenderedLineKind::Annotations,
- });
- }
- }
- }
-
- let mut next_line = lines_iter.next();
- while next_line.is_some() {
- // Consume lines with annotations.
- while let Some(line) = next_line {
- if line.annotations.is_empty() { break; }
-
- let mut rendered_lines = self.render_line(line);
- assert!(!rendered_lines.is_empty());
- if old_school {
- match self.primary_span {
- Some(span) => {
- let lo = codemap.lookup_char_pos(span.lo);
- let hi = codemap.lookup_char_pos(span.hi);
- //Before each secondary line in old skool-mode, print the label
- //as an old-style note
- if !line.annotations[0].is_primary {
- if let Some(ann) = line.annotations[0].label.clone() {
- output.push(RenderedLine {
- text: vec![StyledString {
- text: lo.file.name.clone(),
- style: Style::FileNameStyle,
- }, StyledString {
- text: format!(":{}:{}: {}:{} ", lo.line, lo.col.0 + 1,
- hi.line, hi.col.0+1),
- style: Style::LineAndColumn,
- }, StyledString {
- text: format!("note: "),
- style: Style::OldSkoolNote,
- }, StyledString {
- text: format!("{}", ann),
- style: Style::OldSkoolNoteText,
- }],
- kind: RenderedLineKind::Annotations,
- });
- }
- }
- rendered_lines[0].text.insert(0, StyledString {
- text: format!(":{} ", lo.line),
- style: Style::LineAndColumn,
- });
- rendered_lines[0].text.insert(0, StyledString {
- text: lo.file.name.clone(),
- style: Style::FileNameStyle,
- });
- let gap_amount =
- rendered_lines[0].text[0].text.len() +
- rendered_lines[0].text[1].text.len();
- assert!(rendered_lines.len() >= 2,
- "no annotations resulted from: {:?}",
- line);
- for i in 1..rendered_lines.len() {
- rendered_lines[i].text.insert(0, StyledString {
- text: vec![" "; gap_amount].join(""),
- style: Style::NoStyle
- });
- }
- }
- _ =>()
- }
- }
- output.append(&mut rendered_lines);
- next_line = lines_iter.next();
- }
-
- // Emit lines without annotations, but only if they are
- // followed by a line with an annotation.
- let unannotated_line = next_line;
- let mut unannotated_lines = 0;
- while let Some(line) = next_line {
- if !line.annotations.is_empty() { break; }
- unannotated_lines += 1;
- next_line = lines_iter.next();
- }
- if unannotated_lines > 1 {
- output.push(RenderedLine::from((String::new(),
- Style::NoStyle,
- RenderedLineKind::Elision)));
- } else if let Some(line) = unannotated_line {
- output.append(&mut self.render_line(line));
- }
- }
-
- output
- }
-
- fn render_line(&self, line: &Line) -> Vec<RenderedLine> {
- let old_school = check_old_skool();
- let source_string = self.file.get_line(line.line_index)
- .unwrap_or("");
- let source_kind = RenderedLineKind::SourceText {
- file: self.file.clone(),
- line_index: line.line_index,
- };
-
- let mut styled_buffer = StyledBuffer::new();
-
- // First create the source line we will highlight.
- styled_buffer.append(0, &source_string, Style::Quotation);
-
- if line.annotations.is_empty() {
- return styled_buffer.render(source_kind);
- }
-
- // We want to display like this:
- //
- // vec.push(vec.pop().unwrap());
- // --- ^^^ _ previous borrow ends here
- // | |
- // | error occurs here
- // previous borrow of `vec` occurs here
- //
- // But there are some weird edge cases to be aware of:
- //
- // vec.push(vec.pop().unwrap());
- // -------- - previous borrow ends here
- // ||
- // |this makes no sense
- // previous borrow of `vec` occurs here
- //
- // For this reason, we group the lines into "highlight lines"
- // and "annotations lines", where the highlight lines have the `~`.
-
- //let mut highlight_line = Self::whitespace(&source_string);
-
- // Sort the annotations by (start, end col)
- let mut annotations = line.annotations.clone();
- annotations.sort();
-
- // Next, create the highlight line.
- for annotation in &annotations {
- if old_school {
- for p in annotation.start_col .. annotation.end_col {
- if p == annotation.start_col {
- styled_buffer.putc(1, p, '^',
- if annotation.is_primary {
- Style::UnderlinePrimary
- } else {
- Style::OldSkoolNote
- });
- }
- else {
- styled_buffer.putc(1, p, '~',
- if annotation.is_primary {
- Style::UnderlinePrimary
- } else {
- Style::OldSkoolNote
- });
- }
- }
- }
- else {
- for p in annotation.start_col .. annotation.end_col {
- if annotation.is_primary {
- styled_buffer.putc(1, p, '^', Style::UnderlinePrimary);
- if !annotation.is_minimized {
- styled_buffer.set_style(0, p, Style::UnderlinePrimary);
- }
- } else {
- styled_buffer.putc(1, p, '-', Style::UnderlineSecondary);
- if !annotation.is_minimized {
- styled_buffer.set_style(0, p, Style::UnderlineSecondary);
- }
- }
- }
- }
- }
-
- // Now we are going to write labels in. To start, we'll exclude
- // the annotations with no labels.
- let (labeled_annotations, unlabeled_annotations): (Vec<_>, _) =
- annotations.into_iter()
- .partition(|a| a.label.is_some());
-
- // If there are no annotations that need text, we're done.
- if labeled_annotations.is_empty() {
- return styled_buffer.render(source_kind);
- }
- if old_school {
- return styled_buffer.render(source_kind);
- }
-
- // Now add the text labels. We try, when possible, to stick the rightmost
- // annotation at the end of the highlight line:
- //
- // vec.push(vec.pop().unwrap());
- // --- --- - previous borrow ends here
- //
- // But sometimes that's not possible because one of the other
- // annotations overlaps it. For example, from the test
- // `span_overlap_label`, we have the following annotations
- // (written on distinct lines for clarity):
- //
- // fn foo(x: u32) {
- // --------------
- // -
- //
- // In this case, we can't stick the rightmost-most label on
- // the highlight line, or we would get:
- //
- // fn foo(x: u32) {
- // -------- x_span
- // |
- // fn_span
- //
- // which is totally weird. Instead we want:
- //
- // fn foo(x: u32) {
- // --------------
- // | |
- // | x_span
- // fn_span
- //
- // which is...less weird, at least. In fact, in general, if
- // the rightmost span overlaps with any other span, we should
- // use the "hang below" version, so we can at least make it
- // clear where the span *starts*.
- let mut labeled_annotations = &labeled_annotations[..];
- match labeled_annotations.split_last().unwrap() {
- (last, previous) => {
- if previous.iter()
- .chain(&unlabeled_annotations)
- .all(|a| !overlaps(a, last))
- {
- // append the label afterwards; we keep it in a separate
- // string
- let highlight_label: String = format!(" {}", last.label.as_ref().unwrap());
- if last.is_primary {
- styled_buffer.append(1, &highlight_label, Style::LabelPrimary);
- } else {
- styled_buffer.append(1, &highlight_label, Style::LabelSecondary);
- }
- labeled_annotations = previous;
- }
- }
- }
-
- // If that's the last annotation, we're done
- if labeled_annotations.is_empty() {
- return styled_buffer.render(source_kind);
- }
-
- for (index, annotation) in labeled_annotations.iter().enumerate() {
- // Leave:
- // - 1 extra line
- // - One line for each thing that comes after
- let comes_after = labeled_annotations.len() - index - 1;
- let blank_lines = 3 + comes_after;
-
- // For each blank line, draw a `|` at our column. The
- // text ought to be long enough for this.
- for index in 2..blank_lines {
- if annotation.is_primary {
- styled_buffer.putc(index, annotation.start_col, '|', Style::UnderlinePrimary);
- } else {
- styled_buffer.putc(index, annotation.start_col, '|', Style::UnderlineSecondary);
- }
- }
-
- if annotation.is_primary {
- styled_buffer.puts(blank_lines, annotation.start_col,
- annotation.label.as_ref().unwrap(), Style::LabelPrimary);
- } else {
- styled_buffer.puts(blank_lines, annotation.start_col,
- annotation.label.as_ref().unwrap(), Style::LabelSecondary);
- }
- }
-
- styled_buffer.render(source_kind)
- }
-}
-
-fn prepend_prefixes(rendered_lines: &mut [RenderedLine]) {
- let old_school = check_old_skool();
- if old_school {
- return;
- }
-
- let prefixes: Vec<_> =
- rendered_lines.iter()
- .map(|rl| rl.kind.prefix())
- .collect();
-
- // find the max amount of spacing we need; add 1 to
- // p.text.len() to leave space between the prefix and the
- // source text
- let padding_len =
- prefixes.iter()
- .map(|p| if p.text.len() == 0 { 0 } else { p.text.len() + 1 })
- .max()
- .unwrap_or(0);
-
- // Ensure we insert at least one character of padding, so that the
- // `-->` arrows can fit etc.
- let padding_len = cmp::max(padding_len, 1);
-
- for (mut prefix, line) in prefixes.into_iter().zip(rendered_lines) {
- let extra_spaces = (prefix.text.len() .. padding_len).map(|_| ' ');
- prefix.text.extend(extra_spaces);
- match line.kind {
- RenderedLineKind::Elision => {
- line.text.insert(0, prefix);
- }
- RenderedLineKind::PrimaryFileName => {
- // --> filename
- // 22 |>
- // ^
- // padding_len
- let dashes = (0..padding_len - 1).map(|_| ' ')
- .chain(Some('-'))
- .chain(Some('-'))
- .chain(Some('>'))
- .chain(Some(' '));
- line.text.insert(0, StyledString {text: dashes.collect(),
- style: Style::LineNumber})
- }
- RenderedLineKind::OtherFileName => {
- // ::: filename
- // 22 |>
- // ^
- // padding_len
- let dashes = (0..padding_len - 1).map(|_| ' ')
- .chain(Some(':'))
- .chain(Some(':'))
- .chain(Some(':'))
- .chain(Some(' '));
- line.text.insert(0, StyledString {text: dashes.collect(),
- style: Style::LineNumber})
- }
- _ => {
- line.text.insert(0, prefix);
- line.text.insert(1, StyledString {text: String::from("|> "),
- style: Style::LineNumber})
- }
- }
- }
-}
-
-fn trim_lines(rendered_lines: &mut [RenderedLine]) {
- for line in rendered_lines {
- while !line.text.is_empty() {
- line.trim_last();
- if line.text.last().unwrap().text.is_empty() {
- line.text.pop();
- } else {
- break;
- }
- }
- }
-}
-
-impl Line {
- fn new(line_index: usize) -> Line {
- Line {
- line_index: line_index,
- annotations: vec![]
- }
- }
-
- fn push_annotation(&mut self,
- start: CharPos,
- end: CharPos,
- is_primary: bool,
- is_minimized: bool,
- label: Option<String>) {
- self.annotations.push(Annotation {
- start_col: start.0,
- end_col: end.0,
- is_primary: is_primary,
- is_minimized: is_minimized,
- label: label,
- });
- }
-}
-
-fn overlaps(a1: &Annotation,
- a2: &Annotation)
- -> bool
-{
- (a2.start_col .. a2.end_col).contains(a1.start_col) ||
- (a1.start_col .. a1.end_col).contains(a2.start_col)
-}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Code for testing annotated snippets.
-
-#![cfg(test)]
-
-use codemap::{BytePos, CodeMap, FileMap, NO_EXPANSION, Span};
-use std::rc::Rc;
-use super::{RenderedLine, SnippetData};
-
-/// Returns the span corresponding to the `n`th occurrence of
-/// `substring` in `source_text`.
-trait CodeMapExtension {
- fn span_substr(&self,
- file: &Rc<FileMap>,
- source_text: &str,
- substring: &str,
- n: usize)
- -> Span;
-}
-
-impl CodeMapExtension for CodeMap {
- fn span_substr(&self,
- file: &Rc<FileMap>,
- source_text: &str,
- substring: &str,
- n: usize)
- -> Span
- {
- println!("span_substr(file={:?}/{:?}, substring={:?}, n={})",
- file.name, file.start_pos, substring, n);
- let mut i = 0;
- let mut hi = 0;
- loop {
- let offset = source_text[hi..].find(substring).unwrap_or_else(|| {
- panic!("source_text `{}` does not have {} occurrences of `{}`, only {}",
- source_text, n, substring, i);
- });
- let lo = hi + offset;
- hi = lo + substring.len();
- if i == n {
- let span = Span {
- lo: BytePos(lo as u32 + file.start_pos.0),
- hi: BytePos(hi as u32 + file.start_pos.0),
- expn_id: NO_EXPANSION,
- };
- assert_eq!(&self.span_to_snippet(span).unwrap()[..],
- substring);
- return span;
- }
- i += 1;
- }
- }
-}
-
-fn splice(start: Span, end: Span) -> Span {
- Span {
- lo: start.lo,
- hi: end.hi,
- expn_id: NO_EXPANSION,
- }
-}
-
-fn make_string(lines: &[RenderedLine]) -> String {
- lines.iter()
- .flat_map(|rl| {
- rl.text.iter()
- .map(|s| &s.text[..])
- .chain(Some("\n"))
- })
- .collect()
-}
-
-#[test]
-fn tab() {
- let file_text = "
-fn foo() {
-\tbar;
-}
-";
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
- let span_bar = cm.span_substr(&foo, file_text, "bar", 0);
-
- let mut snippet = SnippetData::new(cm, Some(span_bar));
- snippet.push(span_bar, true, None);
-
- let lines = snippet.render_lines();
- let text = make_string(&lines);
- assert_eq!(&text[..], &"
- --> foo.rs:3:2
- |>
-3 |> \tbar;
- |> \t^^^
-"[1..]);
-}
-
-#[test]
-fn one_line() {
- let file_text = r#"
-fn foo() {
- vec.push(vec.pop().unwrap());
-}
-"#;
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
- let span_vec0 = cm.span_substr(&foo, file_text, "vec", 0);
- let span_vec1 = cm.span_substr(&foo, file_text, "vec", 1);
- let span_semi = cm.span_substr(&foo, file_text, ";", 0);
-
- let mut snippet = SnippetData::new(cm, None);
- snippet.push(span_vec0, false, Some(format!("previous borrow of `vec` occurs here")));
- snippet.push(span_vec1, false, Some(format!("error occurs here")));
- snippet.push(span_semi, false, Some(format!("previous borrow ends here")));
-
- let lines = snippet.render_lines();
- println!("{:#?}", lines);
-
- let text: String = make_string(&lines);
-
- println!("text=\n{}", text);
- assert_eq!(&text[..], &r#"
- ::: foo.rs
- |>
-3 |> vec.push(vec.pop().unwrap());
- |> --- --- - previous borrow ends here
- |> | |
- |> | error occurs here
- |> previous borrow of `vec` occurs here
-"#[1..]);
-}
-
-#[test]
-fn two_files() {
- let file_text_foo = r#"
-fn foo() {
- vec.push(vec.pop().unwrap());
-}
-"#;
-
- let file_text_bar = r#"
-fn bar() {
- // these blank links here
- // serve to ensure that the line numbers
- // from bar.rs
- // require more digits
-
-
-
-
-
-
-
-
-
-
- vec.push();
-
- // this line will get elided
-
- vec.pop().unwrap());
-}
-"#;
-
- let cm = Rc::new(CodeMap::new());
- let foo_map = cm.new_filemap_and_lines("foo.rs", None, file_text_foo);
- let span_foo_vec0 = cm.span_substr(&foo_map, file_text_foo, "vec", 0);
- let span_foo_vec1 = cm.span_substr(&foo_map, file_text_foo, "vec", 1);
- let span_foo_semi = cm.span_substr(&foo_map, file_text_foo, ";", 0);
-
- let bar_map = cm.new_filemap_and_lines("bar.rs", None, file_text_bar);
- let span_bar_vec0 = cm.span_substr(&bar_map, file_text_bar, "vec", 0);
- let span_bar_vec1 = cm.span_substr(&bar_map, file_text_bar, "vec", 1);
- let span_bar_semi = cm.span_substr(&bar_map, file_text_bar, ";", 0);
-
- let mut snippet = SnippetData::new(cm, Some(span_foo_vec1));
- snippet.push(span_foo_vec0, false, Some(format!("a")));
- snippet.push(span_foo_vec1, true, Some(format!("b")));
- snippet.push(span_foo_semi, false, Some(format!("c")));
- snippet.push(span_bar_vec0, false, Some(format!("d")));
- snippet.push(span_bar_vec1, false, Some(format!("e")));
- snippet.push(span_bar_semi, false, Some(format!("f")));
-
- let lines = snippet.render_lines();
- println!("{:#?}", lines);
-
- let text: String = make_string(&lines);
-
- println!("text=\n{}", text);
-
- // Note that the `|>` remain aligned across both files:
- assert_eq!(&text[..], &r#"
- --> foo.rs:3:14
- |>
-3 |> vec.push(vec.pop().unwrap());
- |> --- ^^^ - c
- |> | |
- |> | b
- |> a
- ::: bar.rs
- |>
-17 |> vec.push();
- |> --- - f
- |> |
- |> d
-...
-21 |> vec.pop().unwrap());
- |> --- e
-"#[1..]);
-}
-
-#[test]
-fn multi_line() {
- let file_text = r#"
-fn foo() {
- let name = find_id(&data, 22).unwrap();
-
- // Add one more item we forgot to the vector. Silly us.
- data.push(Data { name: format!("Hera"), id: 66 });
-
- // Print everything out.
- println!("Name: {:?}", name);
- println!("Data: {:?}", data);
-}
-"#;
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
- let span_data0 = cm.span_substr(&foo, file_text, "data", 0);
- let span_data1 = cm.span_substr(&foo, file_text, "data", 1);
- let span_rbrace = cm.span_substr(&foo, file_text, "}", 3);
-
- let mut snippet = SnippetData::new(cm, None);
- snippet.push(span_data0, false, Some(format!("immutable borrow begins here")));
- snippet.push(span_data1, false, Some(format!("mutable borrow occurs here")));
- snippet.push(span_rbrace, false, Some(format!("immutable borrow ends here")));
-
- let lines = snippet.render_lines();
- println!("{:#?}", lines);
-
- let text: String = make_string(&lines);
-
- println!("text=\n{}", text);
- assert_eq!(&text[..], &r#"
- ::: foo.rs
- |>
-3 |> let name = find_id(&data, 22).unwrap();
- |> ---- immutable borrow begins here
-...
-6 |> data.push(Data { name: format!("Hera"), id: 66 });
- |> ---- mutable borrow occurs here
-...
-11 |> }
- |> - immutable borrow ends here
-"#[1..]);
-}
-
-#[test]
-fn overlapping() {
- let file_text = r#"
-fn foo() {
- vec.push(vec.pop().unwrap());
-}
-"#;
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
- let span0 = cm.span_substr(&foo, file_text, "vec.push", 0);
- let span1 = cm.span_substr(&foo, file_text, "vec", 0);
- let span2 = cm.span_substr(&foo, file_text, "ec.push", 0);
- let span3 = cm.span_substr(&foo, file_text, "unwrap", 0);
-
- let mut snippet = SnippetData::new(cm, None);
- snippet.push(span0, false, Some(format!("A")));
- snippet.push(span1, false, Some(format!("B")));
- snippet.push(span2, false, Some(format!("C")));
- snippet.push(span3, false, Some(format!("D")));
-
- let lines = snippet.render_lines();
- println!("{:#?}", lines);
- let text: String = make_string(&lines);
-
- println!("text=r#\"\n{}\".trim_left()", text);
- assert_eq!(&text[..], &r#"
- ::: foo.rs
- |>
-3 |> vec.push(vec.pop().unwrap());
- |> -------- ------ D
- |> ||
- |> |C
- |> A
- |> B
-"#[1..]);
-}
-
-#[test]
-fn one_line_out_of_order() {
- let file_text = r#"
-fn foo() {
- vec.push(vec.pop().unwrap());
-}
-"#;
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
- let span_vec0 = cm.span_substr(&foo, file_text, "vec", 0);
- let span_vec1 = cm.span_substr(&foo, file_text, "vec", 1);
- let span_semi = cm.span_substr(&foo, file_text, ";", 0);
-
- // intentionally don't push the snippets left to right
- let mut snippet = SnippetData::new(cm, None);
- snippet.push(span_vec1, false, Some(format!("error occurs here")));
- snippet.push(span_vec0, false, Some(format!("previous borrow of `vec` occurs here")));
- snippet.push(span_semi, false, Some(format!("previous borrow ends here")));
-
- let lines = snippet.render_lines();
- println!("{:#?}", lines);
- let text: String = make_string(&lines);
-
- println!("text=r#\"\n{}\".trim_left()", text);
- assert_eq!(&text[..], &r#"
- ::: foo.rs
- |>
-3 |> vec.push(vec.pop().unwrap());
- |> --- --- - previous borrow ends here
- |> | |
- |> | error occurs here
- |> previous borrow of `vec` occurs here
-"#[1..]);
-}
-
-#[test]
-fn elide_unnecessary_lines() {
- let file_text = r#"
-fn foo() {
- let mut vec = vec![0, 1, 2];
- let mut vec2 = vec;
- vec2.push(3);
- vec2.push(4);
- vec2.push(5);
- vec2.push(6);
- vec.push(7);
-}
-"#;
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
- let span_vec0 = cm.span_substr(&foo, file_text, "vec", 3);
- let span_vec1 = cm.span_substr(&foo, file_text, "vec", 8);
-
- let mut snippet = SnippetData::new(cm, None);
- snippet.push(span_vec0, false, Some(format!("`vec` moved here because it \
- has type `collections::vec::Vec<i32>`")));
- snippet.push(span_vec1, false, Some(format!("use of moved value: `vec`")));
-
- let lines = snippet.render_lines();
- println!("{:#?}", lines);
- let text: String = make_string(&lines);
- println!("text=r#\"\n{}\".trim_left()", text);
- assert_eq!(&text[..], &r#"
- ::: foo.rs
- |>
-4 |> let mut vec2 = vec;
- |> --- `vec` moved here because it has type `collections::vec::Vec<i32>`
-...
-9 |> vec.push(7);
- |> --- use of moved value: `vec`
-"#[1..]);
-}
-
-#[test]
-fn spans_without_labels() {
- let file_text = r#"
-fn foo() {
- let mut vec = vec![0, 1, 2];
- let mut vec2 = vec;
- vec2.push(3);
- vec2.push(4);
- vec2.push(5);
- vec2.push(6);
- vec.push(7);
-}
-"#;
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
-
- let mut snippet = SnippetData::new(cm.clone(), None);
- for i in 0..4 {
- let span_veci = cm.span_substr(&foo, file_text, "vec", i);
- snippet.push(span_veci, false, None);
- }
-
- let lines = snippet.render_lines();
- let text: String = make_string(&lines);
- println!("text=&r#\"\n{}\n\"#[1..]", text);
- assert_eq!(text, &r#"
- ::: foo.rs
- |>
-3 |> let mut vec = vec![0, 1, 2];
- |> --- ---
-4 |> let mut vec2 = vec;
- |> --- ---
-"#[1..]);
-}
-
-#[test]
-fn span_long_selection() {
- let file_text = r#"
-impl SomeTrait for () {
- fn foo(x: u32) {
- // impl 1
- // impl 2
- // impl 3
- }
-}
-"#;
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
-
- let mut snippet = SnippetData::new(cm.clone(), None);
- let fn_span = cm.span_substr(&foo, file_text, "fn", 0);
- let rbrace_span = cm.span_substr(&foo, file_text, "}", 0);
- snippet.push(splice(fn_span, rbrace_span), false, None);
- let lines = snippet.render_lines();
- let text: String = make_string(&lines);
- println!("r#\"\n{}\"", text);
- assert_eq!(text, &r#"
- ::: foo.rs
- |>
-3 |> fn foo(x: u32) {
- |> -
-"#[1..]);
-}
-
-#[test]
-fn span_overlap_label() {
- // Test that we don't put `x_span` to the right of its highlight,
- // since there is another highlight that overlaps it.
-
- let file_text = r#"
- fn foo(x: u32) {
- }
-}
-"#;
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
-
- let mut snippet = SnippetData::new(cm.clone(), None);
- let fn_span = cm.span_substr(&foo, file_text, "fn foo(x: u32)", 0);
- let x_span = cm.span_substr(&foo, file_text, "x", 0);
- snippet.push(fn_span, false, Some(format!("fn_span")));
- snippet.push(x_span, false, Some(format!("x_span")));
- let lines = snippet.render_lines();
- let text: String = make_string(&lines);
- println!("r#\"\n{}\"", text);
- assert_eq!(text, &r#"
- ::: foo.rs
- |>
-2 |> fn foo(x: u32) {
- |> --------------
- |> | |
- |> | x_span
- |> fn_span
-"#[1..]);
-}
-
-#[test]
-fn span_overlap_label2() {
- // Test that we don't put `x_span` to the right of its highlight,
- // since there is another highlight that overlaps it. In this
- // case, the overlap is only at the beginning, but it's still
- // better to show the beginning more clearly.
-
- let file_text = r#"
- fn foo(x: u32) {
- }
-}
-"#;
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
-
- let mut snippet = SnippetData::new(cm.clone(), None);
- let fn_span = cm.span_substr(&foo, file_text, "fn foo(x", 0);
- let x_span = cm.span_substr(&foo, file_text, "x: u32)", 0);
- snippet.push(fn_span, false, Some(format!("fn_span")));
- snippet.push(x_span, false, Some(format!("x_span")));
- let lines = snippet.render_lines();
- let text: String = make_string(&lines);
- println!("r#\"\n{}\"", text);
- assert_eq!(text, &r#"
- ::: foo.rs
- |>
-2 |> fn foo(x: u32) {
- |> --------------
- |> | |
- |> | x_span
- |> fn_span
-"#[1..]);
-}
-
-#[test]
-fn span_overlap_label3() {
- // Test that we don't put `x_span` to the right of its highlight,
- // since there is another highlight that overlaps it. In this
- // case, the overlap is only at the beginning, but it's still
- // better to show the beginning more clearly.
-
- let file_text = r#"
- fn foo() {
- let closure = || {
- inner
- };
- }
-}
-"#;
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
-
- let mut snippet = SnippetData::new(cm.clone(), None);
-
- let closure_span = {
- let closure_start_span = cm.span_substr(&foo, file_text, "||", 0);
- let closure_end_span = cm.span_substr(&foo, file_text, "}", 0);
- splice(closure_start_span, closure_end_span)
- };
-
- let inner_span = cm.span_substr(&foo, file_text, "inner", 0);
-
- snippet.push(closure_span, false, Some(format!("foo")));
- snippet.push(inner_span, false, Some(format!("bar")));
-
- let lines = snippet.render_lines();
- let text: String = make_string(&lines);
- println!("r#\"\n{}\"", text);
- assert_eq!(text, &r#"
- ::: foo.rs
- |>
-3 |> let closure = || {
- |> - foo
-4 |> inner
- |> ----- bar
-"#[1..]);
-}
-
-#[test]
-fn span_empty() {
- // In one of the unit tests, we found that the parser sometimes
- // gives empty spans, and in particular it supplied an EOF span
- // like this one, which points at the very end. We want to
- // fallback gracefully in this case.
-
- let file_text = r#"
-fn main() {
- struct Foo;
-
- impl !Sync for Foo {}
-
- unsafe impl Send for &'static Foo {
- // error: cross-crate traits with a default impl, like `core::marker::Send`,
- // can only be implemented for a struct/enum type, not
- // `&'static Foo`
-}"#;
-
-
- let cm = Rc::new(CodeMap::new());
- let foo = cm.new_filemap_and_lines("foo.rs", None, file_text);
-
- let mut rbrace_span = cm.span_substr(&foo, file_text, "}", 1);
- rbrace_span.lo = rbrace_span.hi;
-
- let mut snippet = SnippetData::new(cm.clone(), Some(rbrace_span));
- snippet.push(rbrace_span, false, None);
- let lines = snippet.render_lines();
- let text: String = make_string(&lines);
- println!("r#\"\n{}\"", text);
- assert_eq!(text, &r#"
- --> foo.rs:11:2
- |>
-11 |> }
- |> -
-"#[1..]);
-}
use ast;
use ast::{Name, PatKind};
use attr::HasAttrs;
-use codemap;
-use codemap::{CodeMap, Span, ExpnId, ExpnInfo, NO_EXPANSION};
+use codemap::{self, CodeMap, ExpnInfo};
+use syntax_pos::{Span, ExpnId, NO_EXPANSION};
use errors::DiagnosticBuilder;
use ext;
use ext::expand;
use std::collections::{HashMap, HashSet};
use std::rc::Rc;
use std::default::Default;
+use tokenstream;
#[derive(Debug,Clone)]
fn expand<'cx>(&self,
ecx: &'cx mut ExtCtxt,
span: Span,
- token_tree: &[ast::TokenTree])
+ token_tree: &[tokenstream::TokenTree])
-> Box<MacResult+'cx>;
}
pub type MacroExpanderFn =
- for<'cx> fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box<MacResult+'cx>;
+ for<'cx> fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
+ -> Box<MacResult+'cx>;
impl<F> TTMacroExpander for F
- where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box<MacResult+'cx>
+ where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
+ -> Box<MacResult+'cx>
{
fn expand<'cx>(&self,
ecx: &'cx mut ExtCtxt,
span: Span,
- token_tree: &[ast::TokenTree])
+ token_tree: &[tokenstream::TokenTree])
-> Box<MacResult+'cx> {
(*self)(ecx, span, token_tree)
}
cx: &'cx mut ExtCtxt,
sp: Span,
ident: ast::Ident,
- token_tree: Vec<ast::TokenTree> )
+ token_tree: Vec<tokenstream::TokenTree> )
-> Box<MacResult+'cx>;
}
pub type IdentMacroExpanderFn =
- for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<ast::TokenTree>) -> Box<MacResult+'cx>;
+ for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<tokenstream::TokenTree>)
+ -> Box<MacResult+'cx>;
impl<F> IdentMacroExpander for F
where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, ast::Ident,
- Vec<ast::TokenTree>) -> Box<MacResult+'cx>
+ Vec<tokenstream::TokenTree>) -> Box<MacResult+'cx>
{
fn expand<'cx>(&self,
cx: &'cx mut ExtCtxt,
sp: Span,
ident: ast::Ident,
- token_tree: Vec<ast::TokenTree> )
+ token_tree: Vec<tokenstream::TokenTree> )
-> Box<MacResult+'cx>
{
(*self)(cx, sp, ident, token_tree)
// Use a macro because forwarding to a simple function has type system issues
macro_rules! make_stmts_default {
($me:expr) => {
- $me.make_expr().map(|e| {
- SmallVector::one(codemap::respan(
- e.span, ast::StmtKind::Expr(e, ast::DUMMY_NODE_ID)))
- })
+ $me.make_expr().map(|e| SmallVector::one(ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: e.span,
+ node: ast::StmtKind::Expr(e),
+ }))
}
}
None
}
+ /// Create zero or more trait items.
+ fn make_trait_items(self: Box<Self>) -> Option<SmallVector<ast::TraitItem>> {
+ None
+ }
+
/// Create a pattern.
fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> {
None
pat: P<ast::Pat>,
items: SmallVector<P<ast::Item>>,
impl_items: SmallVector<ast::ImplItem>,
+ trait_items: SmallVector<ast::TraitItem>,
stmts: SmallVector<ast::Stmt>,
ty: P<ast::Ty>,
}
self.impl_items
}
+ fn make_trait_items(self: Box<Self>) -> Option<SmallVector<ast::TraitItem>> {
+ self.trait_items
+ }
+
fn make_stmts(self: Box<Self>) -> Option<SmallVector<ast::Stmt>> {
match self.stmts.as_ref().map_or(0, |s| s.len()) {
0 => make_stmts_default!(self),
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Lit(P(codemap::respan(sp, ast::LitKind::Bool(false)))),
span: sp,
- attrs: None,
+ attrs: ast::ThinVec::new(),
})
}
}
}
+ fn make_trait_items(self: Box<DummyResult>) -> Option<SmallVector<ast::TraitItem>> {
+ if self.expr_only {
+ None
+ } else {
+ Some(SmallVector::zero())
+ }
+ }
+
fn make_stmts(self: Box<DummyResult>) -> Option<SmallVector<ast::Stmt>> {
- Some(SmallVector::one(
- codemap::respan(self.span,
- ast::StmtKind::Expr(DummyResult::raw_expr(self.span),
- ast::DUMMY_NODE_ID))))
+ Some(SmallVector::one(ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: ast::StmtKind::Expr(DummyResult::raw_expr(self.span)),
+ span: self.span,
+ }))
}
}
expand::MacroExpander::new(self)
}
- pub fn new_parser_from_tts(&self, tts: &[ast::TokenTree])
+ pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
-> parser::Parser<'a> {
parse::tts_to_parser(self.parse_sess, tts.to_vec(), self.cfg())
}
/// done as rarely as possible).
pub fn check_zero_tts(cx: &ExtCtxt,
sp: Span,
- tts: &[ast::TokenTree],
+ tts: &[tokenstream::TokenTree],
name: &str) {
if !tts.is_empty() {
cx.span_err(sp, &format!("{} takes no arguments", name));
/// is not a string literal, emit an error and return None.
pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
sp: Span,
- tts: &[ast::TokenTree],
+ tts: &[tokenstream::TokenTree],
name: &str)
-> Option<String> {
let mut p = cx.new_parser_from_tts(tts);
/// parsing error, emit a non-fatal error and return None.
pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
sp: Span,
- tts: &[ast::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
+ tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
let mut p = cx.new_parser_from_tts(tts);
let mut es = Vec::new();
while p.token != token::Eof {
use abi::Abi;
use ast::{self, Ident, Generics, Expr, BlockCheckMode, UnOp, PatKind};
use attr;
-use codemap::{Span, respan, Spanned, DUMMY_SP, Pos};
+use syntax_pos::{Span, DUMMY_SP, Pos};
+use codemap::{respan, Spanned};
use ext::base::ExtCtxt;
use parse::token::{self, keywords, InternedString};
use ptr::P;
// statements
fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt;
+ fn stmt_semi(&self, expr: P<ast::Expr>) -> ast::Stmt;
fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, ex: P<ast::Expr>) -> ast::Stmt;
fn stmt_let_typed(&self,
sp: Span,
fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt;
// blocks
- fn block(&self, span: Span, stmts: Vec<ast::Stmt>,
- expr: Option<P<ast::Expr>>) -> P<ast::Block>;
+ fn block(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Block>;
fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block>;
- fn block_all(&self, span: Span,
- stmts: Vec<ast::Stmt>,
- expr: Option<P<ast::Expr>>) -> P<ast::Block>;
// expressions
fn expr(&self, span: Span, node: ast::ExprKind) -> P<ast::Expr>;
}
fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
- respan(expr.span, ast::StmtKind::Semi(expr, ast::DUMMY_NODE_ID))
+ ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: expr.span,
+ node: ast::StmtKind::Expr(expr),
+ }
+ }
+
+ fn stmt_semi(&self, expr: P<ast::Expr>) -> ast::Stmt {
+ ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: expr.span,
+ node: ast::StmtKind::Semi(expr),
+ }
}
fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident,
init: Some(ex),
id: ast::DUMMY_NODE_ID,
span: sp,
- attrs: None,
+ attrs: ast::ThinVec::new(),
});
- let decl = respan(sp, ast::DeclKind::Local(local));
- respan(sp, ast::StmtKind::Decl(P(decl), ast::DUMMY_NODE_ID))
+ ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: ast::StmtKind::Local(local),
+ span: sp,
+ }
}
fn stmt_let_typed(&self,
init: Some(ex),
id: ast::DUMMY_NODE_ID,
span: sp,
- attrs: None,
+ attrs: ast::ThinVec::new(),
});
- let decl = respan(sp, ast::DeclKind::Local(local));
- P(respan(sp, ast::StmtKind::Decl(P(decl), ast::DUMMY_NODE_ID)))
- }
-
- fn block(&self, span: Span, stmts: Vec<ast::Stmt>,
- expr: Option<P<Expr>>) -> P<ast::Block> {
- self.block_all(span, stmts, expr)
+ P(ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: ast::StmtKind::Local(local),
+ span: sp,
+ })
}
fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt {
- let decl = respan(sp, ast::DeclKind::Item(item));
- respan(sp, ast::StmtKind::Decl(P(decl), ast::DUMMY_NODE_ID))
+ ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: ast::StmtKind::Item(item),
+ span: sp,
+ }
}
fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block> {
- self.block_all(expr.span, Vec::new(), Some(expr))
- }
- fn block_all(&self,
- span: Span,
- stmts: Vec<ast::Stmt>,
- expr: Option<P<ast::Expr>>) -> P<ast::Block> {
- P(ast::Block {
- stmts: stmts,
- expr: expr,
- id: ast::DUMMY_NODE_ID,
- rules: BlockCheckMode::Default,
- span: span,
- })
+ self.block(expr.span, vec![ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: expr.span,
+ node: ast::StmtKind::Expr(expr),
+ }])
+ }
+ fn block(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Block> {
+ P(ast::Block {
+ stmts: stmts,
+ id: ast::DUMMY_NODE_ID,
+ rules: BlockCheckMode::Default,
+ span: span,
+ })
}
fn expr(&self, span: Span, node: ast::ExprKind) -> P<ast::Expr> {
id: ast::DUMMY_NODE_ID,
node: node,
span: span,
- attrs: None,
+ attrs: ast::ThinVec::new(),
})
}
}
fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec<P<ast::Pat>>) -> P<ast::Pat> {
let pat = if subpats.is_empty() {
- PatKind::Path(path)
+ PatKind::Path(None, path)
} else {
PatKind::TupleStruct(path, subpats, None)
};
ids: Vec<ast::Ident>,
stmts: Vec<ast::Stmt>)
-> P<ast::Expr> {
- self.lambda(span, ids, self.block(span, stmts, None))
+ self.lambda(span, ids, self.block(span, stmts))
}
fn lambda_stmts_0(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Expr> {
- self.lambda0(span, self.block(span, stmts, None))
+ self.lambda0(span, self.block(span, stmts))
}
fn lambda_stmts_1(&self, span: Span, stmts: Vec<ast::Stmt>,
ident: ast::Ident) -> P<ast::Expr> {
- self.lambda1(span, self.block(span, stmts, None), ident)
+ self.lambda1(span, self.block(span, stmts), ident)
}
fn arg(&self, span: Span, ident: ast::Ident, ty: P<ast::Ty>) -> ast::Arg {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use ast::{Block, Crate, DeclKind, PatKind};
+use ast::{Block, Crate, PatKind};
use ast::{Local, Ident, Mac_, Name, SpannedIdent};
use ast::{MacStmtStyle, Mrk, Stmt, StmtKind, ItemKind};
-use ast::TokenTree;
use ast;
use attr::HasAttrs;
use ext::mtwt;
-use ext::build::AstBuilder;
use attr;
-use attr::{AttrMetaMethods, WithAttrs, ThinAttributesExt};
-use codemap;
-use codemap::{Span, Spanned, ExpnInfo, ExpnId, NameAndSpan, MacroBang, MacroAttribute};
+use attr::AttrMetaMethods;
+use codemap::{Spanned, ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
+use syntax_pos::{self, Span, ExpnId};
use config::StripUnconfigured;
use ext::base::*;
use feature_gate::{self, Features};
use util::move_map::MoveMap;
use parse::token::{fresh_mark, fresh_name, intern, keywords};
use ptr::P;
+use tokenstream::TokenTree;
use util::small_vector::SmallVector;
use visit;
use visit::Visitor;
// Fold this node or list of nodes using the given folder.
fn fold_with<F: Folder>(self, folder: &mut F) -> Self;
- fn visit_with<'v, V: Visitor<'v>>(&'v self, visitor: &mut V);
+ fn visit_with<V: Visitor>(&self, visitor: &mut V);
// Return a placeholder expansion to allow compilation to continue after an erroring expansion.
fn dummy(span: Span) -> Self;
$( folder.$fold(self) )*
$( self.into_iter().flat_map(|item| folder. $fold_elt (item)).collect() )*
}
- fn visit_with<'v, V: Visitor<'v>>(&'v self, visitor: &mut V) {
+ fn visit_with<V: Visitor>(&self, visitor: &mut V) {
$( visitor.$visit(self) )*
$( for item in self.as_slice() { visitor. $visit_elt (item) } )*
}
"statement", .make_stmts, lift .fold_stmt, lift .visit_stmt, |_span| SmallVector::zero();
SmallVector<P<ast::Item>>:
"item", .make_items, lift .fold_item, lift .visit_item, |_span| SmallVector::zero();
+ SmallVector<ast::TraitItem>:
+ "trait item", .make_trait_items, lift .fold_trait_item, lift .visit_trait_item,
+ |_span| SmallVector::zero();
SmallVector<ast::ImplItem>:
- "impl item", .make_impl_items, lift .fold_impl_item, lift .visit_impl_item,
+ "impl item", .make_impl_items, lift .fold_impl_item, lift .visit_impl_item,
|_span| SmallVector::zero();
}
fn fold_with<F: Folder>(self, folder: &mut F) -> Self {
self.and_then(|expr| folder.fold_opt_expr(expr))
}
- fn visit_with<'v, V: Visitor<'v>>(&'v self, visitor: &mut V) {
+ fn visit_with<V: Visitor>(&self, visitor: &mut V) {
self.as_ref().map(|expr| visitor.visit_expr(expr));
}
}
-pub fn expand_expr(expr: ast::Expr, fld: &mut MacroExpander) -> P<ast::Expr> {
+pub fn expand_expr(mut expr: ast::Expr, fld: &mut MacroExpander) -> P<ast::Expr> {
match expr.node {
// expr_mac should really be expr_ext or something; it's the
// entry-point for all syntax extensions.
ast::ExprKind::Mac(mac) => {
- expand_mac_invoc(mac, None, expr.attrs.into_attr_vec(), expr.span, fld)
+ return expand_mac_invoc(mac, None, expr.attrs.into(), expr.span, fld);
}
ast::ExprKind::While(cond, body, opt_ident) => {
let cond = fld.fold_expr(cond);
let (body, opt_ident) = expand_loop_block(body, opt_ident, fld);
- fld.cx.expr(expr.span, ast::ExprKind::While(cond, body, opt_ident))
- .with_attrs(fold_thin_attrs(expr.attrs, fld))
+ expr.node = ast::ExprKind::While(cond, body, opt_ident);
}
ast::ExprKind::WhileLet(pat, cond, body, opt_ident) => {
});
assert!(rewritten_pats.len() == 1);
- let wl = ast::ExprKind::WhileLet(rewritten_pats.remove(0), cond, body, opt_ident);
- fld.cx.expr(expr.span, wl).with_attrs(fold_thin_attrs(expr.attrs, fld))
+ expr.node = ast::ExprKind::WhileLet(rewritten_pats.remove(0), cond, body, opt_ident);
}
ast::ExprKind::Loop(loop_block, opt_ident) => {
let (loop_block, opt_ident) = expand_loop_block(loop_block, opt_ident, fld);
- fld.cx.expr(expr.span, ast::ExprKind::Loop(loop_block, opt_ident))
- .with_attrs(fold_thin_attrs(expr.attrs, fld))
+ expr.node = ast::ExprKind::Loop(loop_block, opt_ident);
}
ast::ExprKind::ForLoop(pat, head, body, opt_ident) => {
assert!(rewritten_pats.len() == 1);
let head = fld.fold_expr(head);
- let fl = ast::ExprKind::ForLoop(rewritten_pats.remove(0), head, body, opt_ident);
- fld.cx.expr(expr.span, fl).with_attrs(fold_thin_attrs(expr.attrs, fld))
+ expr.node = ast::ExprKind::ForLoop(rewritten_pats.remove(0), head, body, opt_ident);
}
ast::ExprKind::IfLet(pat, sub_expr, body, else_opt) => {
let else_opt = else_opt.map(|else_opt| fld.fold_expr(else_opt));
let sub_expr = fld.fold_expr(sub_expr);
- let il = ast::ExprKind::IfLet(rewritten_pats.remove(0), sub_expr, body, else_opt);
- fld.cx.expr(expr.span, il).with_attrs(fold_thin_attrs(expr.attrs, fld))
+ expr.node = ast::ExprKind::IfLet(rewritten_pats.remove(0), sub_expr, body, else_opt);
}
ast::ExprKind::Closure(capture_clause, fn_decl, block, fn_decl_span) => {
let (rewritten_fn_decl, rewritten_block)
= expand_and_rename_fn_decl_and_block(fn_decl, block, fld);
- let new_node = ast::ExprKind::Closure(capture_clause,
- rewritten_fn_decl,
- rewritten_block,
- fn_decl_span);
- P(ast::Expr{ id: expr.id,
- node: new_node,
- span: expr.span,
- attrs: fold_thin_attrs(expr.attrs, fld) })
+ expr.node = ast::ExprKind::Closure(capture_clause,
+ rewritten_fn_decl,
+ rewritten_block,
+ fn_decl_span);
}
- _ => P(noop_fold_expr(expr, fld)),
- }
+ _ => expr = noop_fold_expr(expr, fld),
+ };
+ P(expr)
}
/// Expand a macro invocation. Returns the result of expansion.
},
});
- let marked_tts = mark_tts(&tts[..], mark);
+ let marked_tts = mark_tts(tts, mark);
Some(expandfun.expand(fld.cx, call_site, &marked_tts))
}
}
});
- let marked_tts = mark_tts(&tts, mark);
+ let marked_tts = mark_tts(tts, mark);
Some(expander.expand(fld.cx, call_site, ident, marked_tts))
}
};
let (mac, style, attrs) = match stmt.node {
- StmtKind::Mac(mac, style, attrs) => (mac, style, attrs),
+ StmtKind::Mac(mac) => mac.unwrap(),
_ => return expand_non_macro_stmt(stmt, fld)
};
let mut fully_expanded: SmallVector<ast::Stmt> =
- expand_mac_invoc(mac.unwrap(), None, attrs.into_attr_vec(), stmt.span, fld);
+ expand_mac_invoc(mac, None, attrs.into(), stmt.span, fld);
// If this is a macro invocation with a semicolon, then apply that
// semicolon to the final statement produced by expansion.
if style == MacStmtStyle::Semicolon {
if let Some(stmt) = fully_expanded.pop() {
- let new_stmt = Spanned {
+ fully_expanded.push(Stmt {
+ id: stmt.id,
node: match stmt.node {
- StmtKind::Expr(e, stmt_id) => StmtKind::Semi(e, stmt_id),
+ StmtKind::Expr(expr) => StmtKind::Semi(expr),
_ => stmt.node /* might already have a semi */
},
- span: stmt.span
- };
- fully_expanded.push(new_stmt);
+ span: stmt.span,
+ });
}
}
// expand a non-macro stmt. this is essentially the fallthrough for
// expand_stmt, above.
-fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroExpander)
+fn expand_non_macro_stmt(stmt: Stmt, fld: &mut MacroExpander)
-> SmallVector<Stmt> {
// is it a let?
- match node {
- StmtKind::Decl(decl, node_id) => decl.and_then(|Spanned {node: decl, span}| match decl {
- DeclKind::Local(local) => {
- // take it apart:
- let rewritten_local = local.map(|Local {id, pat, ty, init, span, attrs}| {
- // expand the ty since TyKind::FixedLengthVec contains an Expr
- // and thus may have a macro use
- let expanded_ty = ty.map(|t| fld.fold_ty(t));
- // expand the pat (it might contain macro uses):
- let expanded_pat = fld.fold_pat(pat);
- // find the PatIdents in the pattern:
- // oh dear heaven... this is going to include the enum
- // names, as well... but that should be okay, as long as
- // the new names are gensyms for the old ones.
- // generate fresh names, push them to a new pending list
- let idents = pattern_bindings(&expanded_pat);
- let mut new_pending_renames =
- idents.iter().map(|ident| (*ident, fresh_name(*ident))).collect();
- // rewrite the pattern using the new names (the old
- // ones have already been applied):
- let rewritten_pat = {
- // nested binding to allow borrow to expire:
- let mut rename_fld = IdentRenamer{renames: &mut new_pending_renames};
- rename_fld.fold_pat(expanded_pat)
- };
- // add them to the existing pending renames:
- fld.cx.syntax_env.info().pending_renames
- .extend(new_pending_renames);
- Local {
- id: id,
- ty: expanded_ty,
- pat: rewritten_pat,
- // also, don't forget to expand the init:
- init: init.map(|e| fld.fold_expr(e)),
- span: span,
- attrs: fold::fold_thin_attrs(attrs, fld),
- }
- });
- SmallVector::one(Spanned {
- node: StmtKind::Decl(P(Spanned {
- node: DeclKind::Local(rewritten_local),
- span: span
- }),
- node_id),
- span: stmt_span
- })
- }
- _ => {
- noop_fold_stmt(Spanned {
- node: StmtKind::Decl(P(Spanned {
- node: decl,
- span: span
- }),
- node_id),
- span: stmt_span
- }, fld)
- }
- }),
- _ => {
- noop_fold_stmt(Spanned {
- node: node,
- span: stmt_span
- }, fld)
+ match stmt.node {
+ StmtKind::Local(local) => {
+ // take it apart:
+ let rewritten_local = local.map(|Local {id, pat, ty, init, span, attrs}| {
+ // expand the ty since TyKind::FixedLengthVec contains an Expr
+ // and thus may have a macro use
+ let expanded_ty = ty.map(|t| fld.fold_ty(t));
+ // expand the pat (it might contain macro uses):
+ let expanded_pat = fld.fold_pat(pat);
+ // find the PatIdents in the pattern:
+ // oh dear heaven... this is going to include the enum
+ // names, as well... but that should be okay, as long as
+ // the new names are gensyms for the old ones.
+ // generate fresh names, push them to a new pending list
+ let idents = pattern_bindings(&expanded_pat);
+ let mut new_pending_renames =
+ idents.iter().map(|ident| (*ident, fresh_name(*ident))).collect();
+ // rewrite the pattern using the new names (the old
+ // ones have already been applied):
+ let rewritten_pat = {
+ // nested binding to allow borrow to expire:
+ let mut rename_fld = IdentRenamer{renames: &mut new_pending_renames};
+ rename_fld.fold_pat(expanded_pat)
+ };
+ // add them to the existing pending renames:
+ fld.cx.syntax_env.info().pending_renames
+ .extend(new_pending_renames);
+ Local {
+ id: id,
+ ty: expanded_ty,
+ pat: rewritten_pat,
+ // also, don't forget to expand the init:
+ init: init.map(|e| fld.fold_expr(e)),
+ span: span,
+ attrs: fold::fold_thin_attrs(attrs, fld),
+ }
+ });
+ SmallVector::one(Stmt {
+ id: stmt.id,
+ node: StmtKind::Local(rewritten_local),
+ span: stmt.span,
+ })
}
+ _ => noop_fold_stmt(stmt, fld),
}
}
ident_accumulator: Vec<ast::Ident>
}
-impl<'v> Visitor<'v> for PatIdentFinder {
+impl Visitor for PatIdentFinder {
fn visit_pat(&mut self, pattern: &ast::Pat) {
match *pattern {
ast::Pat { id: _, node: PatKind::Ident(_, ref path1, ref inner), span: _ } => {
// expand the elements of a block.
pub fn expand_block_elts(b: P<Block>, fld: &mut MacroExpander) -> P<Block> {
- b.map(|Block {id, stmts, expr, rules, span}| {
+ b.map(|Block {id, stmts, rules, span}| {
let new_stmts = stmts.into_iter().flat_map(|x| {
// perform pending renames and expand macros in the statement
fld.fold_stmt(x).into_iter()
}).collect();
- let new_expr = expr.map(|x| {
- let expr = {
- let pending_renames = &mut fld.cx.syntax_env.info().pending_renames;
- let mut rename_fld = IdentRenamer{renames:pending_renames};
- rename_fld.fold_expr(x)
- };
- fld.fold_expr(expr)
- });
Block {
id: fld.new_id(id),
stmts: new_stmts,
- expr: new_expr,
rules: rules,
span: span
}
_ => noop_fold_item(it, fld),
}.into_iter().map(|i| Annotatable::Item(i)).collect(),
- Annotatable::TraitItem(it) => match it.node {
- ast::TraitItemKind::Method(_, Some(_)) => {
- let ti = it.unwrap();
- SmallVector::one(ast::TraitItem {
- id: ti.id,
- ident: ti.ident,
- attrs: ti.attrs,
- node: match ti.node {
- ast::TraitItemKind::Method(sig, Some(body)) => {
- let (sig, body) = expand_and_rename_method(sig, body, fld);
- ast::TraitItemKind::Method(sig, Some(body))
- }
- _ => unreachable!()
- },
- span: ti.span,
- })
- }
- _ => fold::noop_fold_trait_item(it.unwrap(), fld)
- }.into_iter().map(|ti| Annotatable::TraitItem(P(ti))).collect(),
+ Annotatable::TraitItem(it) => {
+ expand_trait_item(it.unwrap(), fld).into_iter().
+ map(|it| Annotatable::TraitItem(P(it))).collect()
+ }
Annotatable::ImplItem(ii) => {
expand_impl_item(ii.unwrap(), fld).into_iter().
}
}
+fn expand_trait_item(ti: ast::TraitItem, fld: &mut MacroExpander)
+ -> SmallVector<ast::TraitItem> {
+ match ti.node {
+ ast::TraitItemKind::Method(_, Some(_)) => {
+ SmallVector::one(ast::TraitItem {
+ id: ti.id,
+ ident: ti.ident,
+ attrs: ti.attrs,
+ node: match ti.node {
+ ast::TraitItemKind::Method(sig, Some(body)) => {
+ let (sig, body) = expand_and_rename_method(sig, body, fld);
+ ast::TraitItemKind::Method(sig, Some(body))
+ }
+ _ => unreachable!()
+ },
+ span: ti.span,
+ })
+ }
+ ast::TraitItemKind::Macro(mac) => {
+ expand_mac_invoc(mac, None, ti.attrs, ti.span, fld)
+ }
+ _ => fold::noop_fold_trait_item(ti, fld)
+ }
+}
+
/// Given a fn_decl and a block and a MacroExpander, expand the fn_decl, then use the
/// PatIdents in its arguments to perform renaming in the FnDecl and
/// the block, returning both the new FnDecl and the new Block.
at_crate_root: bool,
}
- impl<'a, 'b, 'v> Visitor<'v> for MacroLoadingVisitor<'a, 'b> {
- fn visit_mac(&mut self, _: &'v ast::Mac) {}
- fn visit_item(&mut self, item: &'v ast::Item) {
+ impl<'a, 'b> Visitor for MacroLoadingVisitor<'a, 'b> {
+ fn visit_mac(&mut self, _: &ast::Mac) {}
+ fn visit_item(&mut self, item: &ast::Item) {
if let ast::ItemKind::ExternCrate(..) = item.node {
// We need to error on `#[macro_use] extern crate` when it isn't at the
// crate root, because `$crate` won't work properly.
self.at_crate_root = at_crate_root;
}
}
- fn visit_block(&mut self, block: &'v ast::Block) {
+ fn visit_block(&mut self, block: &ast::Block) {
let at_crate_root = ::std::mem::replace(&mut self.at_crate_root, false);
visit::walk_block(self, block);
self.at_crate_root = at_crate_root;
fn fold_opt_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
expr.and_then(|expr| match expr.node {
ast::ExprKind::Mac(mac) =>
- expand_mac_invoc(mac, None, expr.attrs.into_attr_vec(), expr.span, self),
+ expand_mac_invoc(mac, None, expr.attrs.into(), expr.span, self),
_ => Some(expand_expr(expr, self)),
})
}
result = expand_item(item, self);
self.pop_mod_path();
} else {
- let filename = if inner != codemap::DUMMY_SP {
+ let filename = if inner != syntax_pos::DUMMY_SP {
Some(self.cx.parse_sess.codemap().span_to_filename(inner))
} else { None };
let orig_filename = replace(&mut self.cx.filename, filename);
Spanned {
node: Mac_ {
path: self.fold_path(node.path),
- tts: self.fold_tts(&node.tts),
- ctxt: mtwt::apply_mark(self.mark, node.ctxt),
+ tts: self.fold_tts(node.tts),
},
span: self.new_span(span),
}
}
// apply a given mark to the given token trees. Used prior to expansion of a macro.
-fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec<TokenTree> {
+fn mark_tts(tts: Vec<TokenTree>, m: Mrk) -> Vec<TokenTree> {
noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
}
use super::{PatIdentFinder, IdentRenamer, PatIdentRenamer, ExpansionConfig};
use ast;
use ast::Name;
- use codemap;
+ use syntax_pos;
use ext::base::{ExtCtxt, DummyMacroLoader};
use ext::mtwt;
use fold::Folder;
path_accumulator: Vec<ast::Path> ,
}
- impl<'v> Visitor<'v> for PathExprFinderContext {
+ impl Visitor for PathExprFinderContext {
fn visit_expr(&mut self, expr: &ast::Expr) {
if let ast::ExprKind::Path(None, ref p) = expr.node {
self.path_accumulator.push(p.clone());
ident_accumulator: Vec<ast::Ident>
}
- impl<'v> Visitor<'v> for IdentFinder {
- fn visit_ident(&mut self, _: codemap::Span, id: ast::Ident){
+ impl Visitor for IdentFinder {
+ fn visit_ident(&mut self, _: syntax_pos::Span, id: ast::Ident){
self.ident_accumulator.push(id);
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, TokenTree, Ty};
-use codemap::Span;
+use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, Ty};
+use syntax_pos::Span;
use ext::base::ExtCtxt;
use ext::base;
use ext::build::AstBuilder;
use parse::token::*;
use parse::token;
use ptr::P;
+use tokenstream::{self, TokenTree};
/// Quasiquoting works via token trees.
///
use ext::base::ExtCtxt;
use parse::{self, token, classify};
use ptr::P;
- use std::rc::Rc;
- use ast::TokenTree;
+ use tokenstream::{self, TokenTree};
pub use parse::new_parser_from_tts;
- pub use codemap::{BytePos, Span, dummy_spanned, DUMMY_SP};
+ pub use syntax_pos::{BytePos, Span, DUMMY_SP};
+ pub use codemap::{dummy_spanned};
pub trait ToTokens {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree>;
if self.node.style == ast::AttrStyle::Inner {
r.push(TokenTree::Token(self.span, token::Not));
}
- r.push(TokenTree::Delimited(self.span, Rc::new(ast::Delimited {
+ r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
delim: token::Bracket,
open_span: self.span,
tts: self.node.value.to_tokens(cx),
close_span: self.span,
- })));
+ }));
r
}
}
impl ToTokens for () {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
- vec![TokenTree::Delimited(DUMMY_SP, Rc::new(ast::Delimited {
+ vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
delim: token::Paren,
open_span: DUMMY_SP,
tts: vec![],
close_span: DUMMY_SP,
- }))]
+ })]
}
}
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Lit(P(self.clone())),
span: DUMMY_SP,
- attrs: None,
+ attrs: ast::ThinVec::new(),
}).to_tokens(cx)
}
}
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Lit(P(dummy_spanned(lit))),
span: DUMMY_SP,
- attrs: None,
+ attrs: ast::ThinVec::new(),
});
if *self >= 0 {
return lit.to_tokens(cx);
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Unary(ast::UnOp::Neg, lit),
span: DUMMY_SP,
- attrs: None,
+ attrs: ast::ThinVec::new(),
}).to_tokens(cx)
}
}
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
let mut vector = mk_stmts_let(cx, sp);
vector.extend(statements_mk_tts(cx, &tts[..], true));
- let block = cx.expr_block(
- cx.block_all(sp,
- vector,
- Some(cx.expr_ident(sp, id_ext("tt")))));
+ vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
+ let block = cx.expr_block(cx.block(sp, vector));
let expanded = expand_wrapper(cx, sp, cx_expr, block, &[&["syntax", "ext", "quote", "rt"]]);
base::MacEager::expr(expanded)
}
fn mk_tt_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> {
- let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext("TokenTree"), id_ext(name));
+ let idents = vec!(id_ext("syntax"), id_ext("tokenstream"), id_ext("TokenTree"), id_ext(name));
cx.expr_path(cx.path_global(sp, idents))
}
let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
let mut tts_stmts = vec![stmt_let_tt];
tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher));
- let e_tts = cx.expr_block(cx.block(sp, tts_stmts,
- Some(cx.expr_ident(sp, id_ext("tt")))));
+ tts_stmts.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
+ let e_tts = cx.expr_block(cx.block(sp, tts_stmts));
+
let e_separator = match seq.separator {
Some(ref sep) => cx.expr_some(sp, expr_mk_token(cx, sp, sep)),
None => cx.expr_none(sp),
};
let e_op = match seq.op {
- ast::KleeneOp::ZeroOrMore => "ZeroOrMore",
- ast::KleeneOp::OneOrMore => "OneOrMore",
+ tokenstream::KleeneOp::ZeroOrMore => "ZeroOrMore",
+ tokenstream::KleeneOp::OneOrMore => "OneOrMore",
};
let e_op_idents = vec![
id_ext("syntax"),
- id_ext("ast"),
+ id_ext("tokenstream"),
id_ext("KleeneOp"),
id_ext(e_op),
];
cx.field_imm(sp, id_ext("op"), e_op),
cx.field_imm(sp, id_ext("num_captures"),
cx.expr_usize(sp, seq.num_captures))];
- let seq_path = vec![id_ext("syntax"), id_ext("ast"), id_ext("SequenceRepetition")];
+ let seq_path = vec![id_ext("syntax"),
+ id_ext("tokenstream"),
+ id_ext("SequenceRepetition")];
let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields);
- let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"),
- id_ext("rc"),
- id_ext("Rc"),
- id_ext("new")],
- vec![e_seq_struct]);
let e_tok = cx.expr_call(sp,
mk_tt_path(cx, sp, "Sequence"),
- vec!(e_sp, e_rc_new));
+ vec!(e_sp, e_seq_struct));
let e_push =
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("tt")),
let mut vector = mk_stmts_let(cx, sp);
vector.extend(statements_mk_tts(cx, &tts[..], false));
- let block = cx.expr_block(
- cx.block_all(sp,
- vector,
- Some(cx.expr_ident(sp, id_ext("tt")))));
+ vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
+ let block = cx.expr_block(cx.block(sp, vector));
(cx_expr, block)
}
let cx_expr_borrow = cx.expr_addr_of(sp, cx.expr_deref(sp, cx_expr));
let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr_borrow);
- let stmts = imports.iter().map(|path| {
+ let mut stmts = imports.iter().map(|path| {
// make item: `use ...;`
let path = path.iter().map(|s| s.to_string()).collect();
cx.stmt_item(sp, cx.item_use_glob(sp, ast::Visibility::Inherited, ids_ext(path)))
- }).chain(Some(stmt_let_ext_cx)).collect();
+ }).chain(Some(stmt_let_ext_cx)).collect::<Vec<_>>();
+ stmts.push(cx.stmt_expr(expr));
- cx.expr_block(cx.block_all(sp, stmts, Some(expr)))
+ cx.expr_block(cx.block(sp, stmts))
}
fn expand_parse_call(cx: &ExtCtxt,
// except according to those terms.
use ast;
-use codemap::{Pos, Span};
-use codemap;
+use syntax_pos::{self, Pos, Span};
use ext::base::*;
use ext::base;
use ext::build::AstBuilder;
use parse;
use print::pprust;
use ptr::P;
+use tokenstream;
use util::small_vector::SmallVector;
use std::fs::File;
// a given file into the current one.
/// line!(): expands to the current line number
-pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'static> {
base::check_zero_tts(cx, sp, tts, "line!");
}
/* column!(): expands to the current column number */
-pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'static> {
base::check_zero_tts(cx, sp, tts, "column!");
/// file!(): expands to the current filename */
/// The filemap (`loc.file`) contains a bunch more information we could spit
/// out if we wanted.
-pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'static> {
base::check_zero_tts(cx, sp, tts, "file!");
base::MacEager::expr(cx.expr_str(topmost, filename))
}
-pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'static> {
let s = pprust::tts_to_string(tts);
base::MacEager::expr(cx.expr_str(sp,
token::intern_and_get_ident(&s[..])))
}
-pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'static> {
base::check_zero_tts(cx, sp, tts, "module_path!");
let string = cx.mod_path()
/// include! : parse the given file as an expr
/// This is generally a bad idea because it's going to behave
/// unhygienically.
-pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'cx> {
let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
Some(f) => f,
}
// include_str! : read the given file, insert it as a literal string expr
-pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'static> {
let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
Some(f) => f,
}
}
-pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'static> {
let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
Some(f) => f,
// resolve a file-system path to an absolute file-system path (if it
// isn't already)
-fn res_rel_file(cx: &mut ExtCtxt, sp: codemap::Span, arg: &Path) -> PathBuf {
+fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: &Path) -> PathBuf {
// NB: relative paths are resolved relative to the compilation unit
if !arg.is_absolute() {
let callsite = cx.codemap().source_callsite(sp);
use self::TokenTreeOrTokenTreeVec::*;
use ast;
-use ast::{TokenTree, Name, Ident};
-use codemap::{BytePos, mk_sp, Span, Spanned};
-use codemap;
+use ast::{Name, Ident};
+use syntax_pos::{self, BytePos, mk_sp, Span};
+use codemap::Spanned;
use errors::FatalError;
use parse::lexer::*; //resolve bug?
use parse::ParseSess;
use parse::token;
use print::pprust;
use ptr::P;
+use tokenstream::{self, TokenTree};
use std::mem;
use std::rc::Rc;
#[derive(Clone)]
enum TokenTreeOrTokenTreeVec {
- Tt(ast::TokenTree),
- TtSeq(Rc<Vec<ast::TokenTree>>),
+ Tt(tokenstream::TokenTree),
+ TtSeq(Rc<Vec<tokenstream::TokenTree>>),
}
impl TokenTreeOrTokenTreeVec {
/// token tree it was derived from.
pub enum NamedMatch {
- MatchedSeq(Vec<Rc<NamedMatch>>, codemap::Span),
+ MatchedSeq(Vec<Rc<NamedMatch>>, syntax_pos::Span),
MatchedNonterminal(Nonterminal)
}
-> ParseResult<HashMap<Name, Rc<NamedMatch>>> {
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
ret_val: &mut HashMap<Name, Rc<NamedMatch>>, idx: &mut usize)
- -> Result<(), (codemap::Span, String)> {
+ -> Result<(), (syntax_pos::Span, String)> {
match *m {
TokenTree::Sequence(_, ref seq) => {
for next_m in &seq.tts {
pub enum ParseResult<T> {
Success(T),
/// Arm failed to match
- Failure(codemap::Span, String),
+ Failure(syntax_pos::Span, String),
/// Fatal error (malformed macro?). Abort compilation.
- Error(codemap::Span, String)
+ Error(syntax_pos::Span, String)
}
pub type NamedParseResult = ParseResult<HashMap<Name, Rc<NamedMatch>>>;
match ei.top_elts.get_tt(idx) {
/* need to descend into sequence */
TokenTree::Sequence(sp, seq) => {
- if seq.op == ast::KleeneOp::ZeroOrMore {
+ if seq.op == tokenstream::KleeneOp::ZeroOrMore {
let mut new_ei = ei.clone();
new_ei.match_cur += seq.num_captures;
new_ei.idx += 1;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use ast::{self, TokenTree};
-use codemap::{Span, DUMMY_SP};
+use ast;
+use syntax_pos::{Span, DUMMY_SP};
use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension};
use ext::base::{NormalTT, TTMacroExpander};
use ext::tt::macro_parser::{Success, Error, Failure};
use parse::token::Token::*;
use print;
use ptr::P;
+use tokenstream::{self, TokenTree};
use util::small_vector::SmallVector;
use std::cell::RefCell;
use std::collections::{HashMap};
use std::collections::hash_map::{Entry};
-use std::rc::Rc;
struct ParserAnyMacro<'a> {
parser: RefCell<Parser<'a>>,
Some(ret)
}
+ fn make_trait_items(self: Box<ParserAnyMacro<'a>>)
+ -> Option<SmallVector<ast::TraitItem>> {
+ let mut ret = SmallVector::zero();
+ loop {
+ let mut parser = self.parser.borrow_mut();
+ match parser.token {
+ token::Eof => break,
+ _ => ret.push(panictry!(parser.parse_trait_item()))
+ }
+ }
+ self.ensure_complete_parse(false, "item");
+ Some(ret)
+ }
+
+
fn make_stmts(self: Box<ParserAnyMacro<'a>>)
-> Option<SmallVector<ast::Stmt>> {
let mut ret = SmallVector::zero();
// These spans won't matter, anyways
let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt"));
let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt"));
- let argument_gram = vec!(
- TokenTree::Sequence(DUMMY_SP,
- Rc::new(ast::SequenceRepetition {
- tts: vec![
- TokenTree::Token(DUMMY_SP, match_lhs_tok),
- TokenTree::Token(DUMMY_SP, token::FatArrow),
- TokenTree::Token(DUMMY_SP, match_rhs_tok)],
- separator: Some(token::Semi),
- op: ast::KleeneOp::OneOrMore,
- num_captures: 2
- })),
- //to phase into semicolon-termination instead of
- //semicolon-separation
- TokenTree::Sequence(DUMMY_SP,
- Rc::new(ast::SequenceRepetition {
- tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)],
- separator: None,
- op: ast::KleeneOp::ZeroOrMore,
- num_captures: 0
- })));
-
+ let argument_gram = vec![
+ TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
+ tts: vec![
+ TokenTree::Token(DUMMY_SP, match_lhs_tok),
+ TokenTree::Token(DUMMY_SP, token::FatArrow),
+ TokenTree::Token(DUMMY_SP, match_rhs_tok),
+ ],
+ separator: Some(token::Semi),
+ op: tokenstream::KleeneOp::OneOrMore,
+ num_captures: 2,
+ }),
+ // to phase into semicolon-termination instead of semicolon-separation
+ TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
+ tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)],
+ separator: None,
+ op: tokenstream::KleeneOp::ZeroOrMore,
+ num_captures: 0
+ }),
+ ];
// Parse the macro_rules! invocation (`none` is for no interpolations):
let arg_reader = new_tt_reader(&cx.parse_sess().span_diagnostic,
}
// Reverse scan: Sequence comes before `first`.
- if subfirst.maybe_empty || seq_rep.op == ast::KleeneOp::ZeroOrMore {
+ if subfirst.maybe_empty || seq_rep.op == tokenstream::KleeneOp::ZeroOrMore {
// If sequence is potentially empty, then
// union them (preserving first emptiness).
first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
assert!(first.maybe_empty);
first.add_all(subfirst);
- if subfirst.maybe_empty || seq_rep.op == ast::KleeneOp::ZeroOrMore {
+ if subfirst.maybe_empty ||
+ seq_rep.op == tokenstream::KleeneOp::ZeroOrMore {
// continue scanning for more first
// tokens, but also make sure we
// restore empty-tracking state
// except according to those terms.
use self::LockstepIterSize::*;
-use ast;
-use ast::{TokenTree, Ident, Name};
-use codemap::{Span, DUMMY_SP};
+use ast::{Ident, Name};
+use syntax_pos::{Span, DUMMY_SP};
use errors::{Handler, DiagnosticBuilder};
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
use parse::token::{DocComment, MatchNt, SubstNt};
use parse::token::{Token, NtIdent, SpecialMacroVar};
use parse::token;
use parse::lexer::TokenAndSpan;
+use tokenstream::{self, TokenTree};
use std::rc::Rc;
use std::ops::Add;
pub fn new_tt_reader(sp_diag: &Handler,
interp: Option<HashMap<Name, Rc<NamedMatch>>>,
imported_from: Option<Ident>,
- src: Vec<ast::TokenTree>)
+ src: Vec<tokenstream::TokenTree>)
-> TtReader {
new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false)
}
pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
interp: Option<HashMap<Name, Rc<NamedMatch>>>,
imported_from: Option<Ident>,
- src: Vec<ast::TokenTree>,
+ src: Vec<tokenstream::TokenTree>,
desugar_doc_comments: bool)
-> TtReader {
let mut r = TtReader {
sp_diag: sp_diag,
stack: vec!(TtFrame {
- forest: TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition {
+ forest: TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition {
tts: src,
// doesn't matter. This merely holds the root unzipping.
- separator: None, op: ast::KleeneOp::ZeroOrMore, num_captures: 0
- })),
+ separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0
+ }),
idx: 0,
dotdotdoted: false,
sep: None,
}
LisConstraint(len, _) => {
if len == 0 {
- if seq.op == ast::KleeneOp::OneOrMore {
+ if seq.op == tokenstream::KleeneOp::OneOrMore {
// FIXME #2887 blame invoker
panic!(r.sp_diag.span_fatal(sp.clone(),
"this must repeat at least once"));
use ast;
use attr;
use attr::AttrMetaMethods;
-use codemap::{CodeMap, Span};
+use codemap::CodeMap;
+use syntax_pos::Span;
use errors::Handler;
-use visit;
-use visit::{FnKind, Visitor};
+use visit::{self, FnKind, Visitor};
use parse::ParseSess;
use parse::token::InternedString;
}}
}
-impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> {
+impl<'a> Visitor for PostExpansionVisitor<'a> {
fn visit_attribute(&mut self, attr: &ast::Attribute) {
if !self.context.cm.span_allows_unstable(attr.span) {
self.context.check_attribute(attr, false);
}
fn visit_fn(&mut self,
- fn_kind: FnKind<'v>,
- fn_decl: &'v ast::FnDecl,
- block: &'v ast::Block,
+ fn_kind: FnKind,
+ fn_decl: &ast::FnDecl,
+ block: &ast::Block,
span: Span,
_node_id: NodeId) {
// check for const fn declarations
visit::walk_fn(self, fn_kind, fn_decl, block, span);
}
- fn visit_trait_item(&mut self, ti: &'v ast::TraitItem) {
+ fn visit_trait_item(&mut self, ti: &ast::TraitItem) {
match ti.node {
ast::TraitItemKind::Const(..) => {
gate_feature_post!(&self, associated_consts,
visit::walk_trait_item(self, ti);
}
- fn visit_impl_item(&mut self, ii: &'v ast::ImplItem) {
+ fn visit_impl_item(&mut self, ii: &ast::ImplItem) {
if ii.defaultness == ast::Defaultness::Default {
gate_feature_post!(&self, specialization,
ii.span,
visit::walk_impl_item(self, ii);
}
- fn visit_vis(&mut self, vis: &'v ast::Visibility) {
+ fn visit_vis(&mut self, vis: &ast::Visibility) {
let span = match *vis {
ast::Visibility::Crate(span) => span,
ast::Visibility::Restricted { ref path, .. } => path.span,
use ast::*;
use ast;
-use attr::{ThinAttributes, ThinAttributesExt};
-use codemap::{respan, Span, Spanned};
+use syntax_pos::Span;
+use codemap::{Spanned, respan};
use parse::token::{self, keywords};
use ptr::P;
+use tokenstream::*;
use util::small_vector::SmallVector;
use util::move_map::MoveMap;
-use std::rc::Rc;
-
pub trait Folder : Sized {
// Any additions to this trait should happen in form
// of a call to a public `noop_*` function that only calls
noop_fold_pat(p, self)
}
- fn fold_decl(&mut self, d: P<Decl>) -> SmallVector<P<Decl>> {
- noop_fold_decl(d, self)
- }
-
fn fold_expr(&mut self, e: P<Expr>) -> P<Expr> {
e.map(|e| noop_fold_expr(e, self))
}
noop_fold_ty_params(tps, self)
}
- fn fold_tt(&mut self, tt: &TokenTree) -> TokenTree {
+ fn fold_tt(&mut self, tt: TokenTree) -> TokenTree {
noop_fold_tt(tt, self)
}
- fn fold_tts(&mut self, tts: &[TokenTree]) -> Vec<TokenTree> {
+ fn fold_tts(&mut self, tts: Vec<TokenTree>) -> Vec<TokenTree> {
noop_fold_tts(tts, self)
}
attrs.move_flat_map(|x| fld.fold_attribute(x))
}
-pub fn fold_thin_attrs<T: Folder>(attrs: ThinAttributes, fld: &mut T) -> ThinAttributes {
- attrs.map_thin_attrs(|v| fold_attrs(v, fld))
+pub fn fold_thin_attrs<T: Folder>(attrs: ThinVec<Attribute>, fld: &mut T) -> ThinVec<Attribute> {
+ fold_attrs(attrs.into(), fld).into()
}
pub fn noop_fold_arm<T: Folder>(Arm {attrs, pats, guard, body}: Arm, fld: &mut T) -> Arm {
}
}
-pub fn noop_fold_decl<T: Folder>(d: P<Decl>, fld: &mut T) -> SmallVector<P<Decl>> {
- d.and_then(|Spanned {node, span}| match node {
- DeclKind::Local(l) => SmallVector::one(P(Spanned {
- node: DeclKind::Local(fld.fold_local(l)),
- span: fld.new_span(span)
- })),
- DeclKind::Item(it) => fld.fold_item(it).into_iter().map(|i| P(Spanned {
- node: DeclKind::Item(i),
- span: fld.new_span(span)
- })).collect()
- })
-}
-
pub fn noop_fold_ty_binding<T: Folder>(b: TypeBinding, fld: &mut T) -> TypeBinding {
TypeBinding {
id: fld.new_id(b.id),
pat: fld.fold_pat(pat),
init: init.map(|e| fld.fold_expr(e)),
span: fld.new_span(span),
- attrs: attrs.map_thin_attrs(|v| fold_attrs(v, fld)),
+ attrs: fold_attrs(attrs.into(), fld).into(),
})
}
Spanned {
node: Mac_ {
path: fld.fold_path(node.path),
- tts: fld.fold_tts(&node.tts),
- ctxt: node.ctxt,
+ tts: fld.fold_tts(node.tts),
},
span: fld.new_span(span)
}
}
}
-pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
- match *tt {
+pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
+ match tt {
TokenTree::Token(span, ref tok) =>
TokenTree::Token(span, fld.fold_token(tok.clone())),
- TokenTree::Delimited(span, ref delimed) => {
- TokenTree::Delimited(span, Rc::new(
- Delimited {
- delim: delimed.delim,
- open_span: delimed.open_span,
- tts: fld.fold_tts(&delimed.tts),
- close_span: delimed.close_span,
- }
- ))
- },
- TokenTree::Sequence(span, ref seq) =>
- TokenTree::Sequence(span,
- Rc::new(SequenceRepetition {
- tts: fld.fold_tts(&seq.tts),
- separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
- ..**seq
- })),
+ TokenTree::Delimited(span, delimed) => TokenTree::Delimited(span, Delimited {
+ delim: delimed.delim,
+ open_span: delimed.open_span,
+ tts: fld.fold_tts(delimed.tts),
+ close_span: delimed.close_span,
+ }),
+ TokenTree::Sequence(span, seq) => TokenTree::Sequence(span, SequenceRepetition {
+ tts: fld.fold_tts(seq.tts),
+ separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
+ ..seq
+ }),
}
}
-pub fn noop_fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree> {
- // FIXME: Does this have to take a tts slice?
- // Could use move_map otherwise...
- tts.iter().map(|tt| fld.fold_tt(tt)).collect()
+pub fn noop_fold_tts<T: Folder>(tts: Vec<TokenTree>, fld: &mut T) -> Vec<TokenTree> {
+ tts.move_map(|tt| fld.fold_tt(tt))
}
// apply ident folder if it's an ident, apply other folds to interpolated nodes
token::NtIdent(Box::new(Spanned::<Ident>{node: fld.fold_ident(id.node), ..*id})),
token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
- token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))),
+ token::NtTT(tt) => token::NtTT(tt.map(|tt| fld.fold_tt(tt))),
token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
token::NtImplItem(arm) =>
token::NtImplItem(arm.map(|arm| fld.fold_impl_item(arm)
}
pub fn noop_fold_block<T: Folder>(b: P<Block>, folder: &mut T) -> P<Block> {
- b.map(|Block {id, stmts, expr, rules, span}| Block {
+ b.map(|Block {id, stmts, rules, span}| Block {
id: folder.new_id(id),
stmts: stmts.move_flat_map(|s| folder.fold_stmt(s).into_iter()),
- expr: expr.and_then(|x| folder.fold_opt_expr(x)),
rules: rules,
span: folder.new_span(span),
})
TraitItemKind::Type(folder.fold_bounds(bounds),
default.map(|x| folder.fold_ty(x)))
}
+ ast::TraitItemKind::Macro(mac) => {
+ TraitItemKind::Macro(folder.fold_mac(mac))
+ }
},
span: folder.new_span(i.span)
})
PatKind::TupleStruct(folder.fold_path(pth),
pats.move_map(|x| folder.fold_pat(x)), ddpos)
}
- PatKind::Path(pth) => {
- PatKind::Path(folder.fold_path(pth))
- }
- PatKind::QPath(qself, pth) => {
- let qself = QSelf {ty: folder.fold_ty(qself.ty), .. qself};
- PatKind::QPath(qself, folder.fold_path(pth))
+ PatKind::Path(opt_qself, pth) => {
+ let opt_qself = opt_qself.map(|qself| {
+ QSelf { ty: folder.fold_ty(qself.ty), position: qself.position }
+ });
+ PatKind::Path(opt_qself, folder.fold_path(pth))
}
PatKind::Struct(pth, fields, etc) => {
let pth = folder.fold_path(pth);
respan(folder.new_span(label.span),
folder.fold_ident(label.node)))
),
- ExprKind::Again(opt_ident) => ExprKind::Again(opt_ident.map(|label|
+ ExprKind::Continue(opt_ident) => ExprKind::Continue(opt_ident.map(|label|
respan(folder.new_span(label.span),
folder.fold_ident(label.node)))
),
ExprKind::Try(ex) => ExprKind::Try(folder.fold_expr(ex)),
},
span: folder.new_span(span),
- attrs: attrs.map_thin_attrs(|v| fold_attrs(v, folder)),
+ attrs: fold_attrs(attrs.into(), folder).into(),
}
}
es.move_flat_map(|e| folder.fold_opt_expr(e))
}
-pub fn noop_fold_stmt<T: Folder>(Spanned {node, span}: Stmt, folder: &mut T)
+pub fn noop_fold_stmt<T: Folder>(Stmt {node, span, id}: Stmt, folder: &mut T)
-> SmallVector<Stmt> {
+ let id = folder.new_id(id);
let span = folder.new_span(span);
+
match node {
- StmtKind::Decl(d, id) => {
- let id = folder.new_id(id);
- folder.fold_decl(d).into_iter().map(|d| Spanned {
- node: StmtKind::Decl(d, id),
- span: span
- }).collect()
- }
- StmtKind::Expr(e, id) => {
- let id = folder.new_id(id);
- if let Some(e) = folder.fold_opt_expr(e) {
- SmallVector::one(Spanned {
- node: StmtKind::Expr(e, id),
- span: span
+ StmtKind::Local(local) => SmallVector::one(Stmt {
+ id: id,
+ node: StmtKind::Local(folder.fold_local(local)),
+ span: span,
+ }),
+ StmtKind::Item(item) => folder.fold_item(item).into_iter().map(|item| Stmt {
+ id: id,
+ node: StmtKind::Item(item),
+ span: span,
+ }).collect(),
+ StmtKind::Expr(expr) => {
+ if let Some(expr) = folder.fold_opt_expr(expr) {
+ SmallVector::one(Stmt {
+ id: id,
+ node: StmtKind::Expr(expr),
+ span: span,
})
} else {
SmallVector::zero()
}
}
- StmtKind::Semi(e, id) => {
- let id = folder.new_id(id);
- if let Some(e) = folder.fold_opt_expr(e) {
- SmallVector::one(Spanned {
- node: StmtKind::Semi(e, id),
- span: span
+ StmtKind::Semi(expr) => {
+ if let Some(expr) = folder.fold_opt_expr(expr) {
+ SmallVector::one(Stmt {
+ id: id,
+ node: StmtKind::Semi(expr),
+ span: span,
})
} else {
SmallVector::zero()
}
}
- StmtKind::Mac(mac, semi, attrs) => SmallVector::one(Spanned {
- node: StmtKind::Mac(mac.map(|m| folder.fold_mac(m)),
- semi,
- attrs.map_thin_attrs(|v| fold_attrs(v, folder))),
- span: span
+ StmtKind::Mac(mac) => SmallVector::one(Stmt {
+ id: id,
+ node: StmtKind::Mac(mac.map(|(mac, semi, attrs)| {
+ (folder.fold_mac(mac), semi, fold_attrs(attrs.into(), folder).into())
+ })),
+ span: span,
})
}
}
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A JSON emitter for errors.
+//!
+//! This works by converting errors to a simplified structural format (see the
+//! structs at the start of the file) and then serialising them. These should
+//! contain as much information about the error as possible.
+//!
+//! The format of the JSON output should be considered *unstable*. For now the
+//! structs at the end of this file (Diagnostic*) specify the error format.
+
+// FIXME spec the JSON output properly.
+
+use codemap::CodeMap;
+use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
+use errors::registry::Registry;
+use errors::{Level, DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion, CodeMapper};
+use errors::emitter::Emitter;
+
+use std::rc::Rc;
+use std::io::{self, Write};
+use std::vec;
+
+use rustc_serialize::json::as_json;
+
+pub struct JsonEmitter {
+ dst: Box<Write + Send>,
+ registry: Option<Registry>,
+ cm: Rc<CodeMapper + 'static>,
+}
+
+impl JsonEmitter {
+ pub fn basic() -> JsonEmitter {
+ JsonEmitter::stderr(None, Rc::new(CodeMap::new()))
+ }
+
+ pub fn stderr(registry: Option<Registry>,
+ code_map: Rc<CodeMap>) -> JsonEmitter {
+ JsonEmitter {
+ dst: Box::new(io::stderr()),
+ registry: registry,
+ cm: code_map,
+ }
+ }
+}
+
+impl Emitter for JsonEmitter {
+ fn emit(&mut self, span: &MultiSpan, msg: &str, code: Option<&str>, level: Level) {
+ let data = Diagnostic::new(span, msg, code, level, self);
+ if let Err(e) = writeln!(&mut self.dst, "{}", as_json(&data)) {
+ panic!("failed to print diagnostics: {:?}", e);
+ }
+ }
+
+ fn emit_struct(&mut self, db: &DiagnosticBuilder) {
+ let data = Diagnostic::from_diagnostic_builder(db, self);
+ if let Err(e) = writeln!(&mut self.dst, "{}", as_json(&data)) {
+ panic!("failed to print diagnostics: {:?}", e);
+ }
+ }
+}
+
+// The following data types are provided just for serialisation.
+
+#[derive(RustcEncodable)]
+struct Diagnostic<'a> {
+ /// The primary error message.
+ message: &'a str,
+ code: Option<DiagnosticCode>,
+ /// "error: internal compiler error", "error", "warning", "note", "help".
+ level: &'static str,
+ spans: Vec<DiagnosticSpan>,
+ /// Associated diagnostic messages.
+ children: Vec<Diagnostic<'a>>,
+ /// The message as rustc would render it. Currently this is only
+ /// `Some` for "suggestions", but eventually it will include all
+ /// snippets.
+ rendered: Option<String>,
+}
+
+#[derive(RustcEncodable)]
+struct DiagnosticSpan {
+ file_name: String,
+ byte_start: u32,
+ byte_end: u32,
+ /// 1-based.
+ line_start: usize,
+ line_end: usize,
+ /// 1-based, character offset.
+ column_start: usize,
+ column_end: usize,
+ /// Is this a "primary" span -- meaning the point, or one of the points,
+ /// where the error occurred?
+ is_primary: bool,
+ /// Source text from the start of line_start to the end of line_end.
+ text: Vec<DiagnosticSpanLine>,
+ /// Label that should be placed at this location (if any)
+ label: Option<String>,
+ /// If we are suggesting a replacement, this will contain text
+ /// that should be sliced in atop this span. You may prefer to
+ /// load the fully rendered version from the parent `Diagnostic`,
+ /// however.
+ suggested_replacement: Option<String>,
+ /// Macro invocations that created the code at this span, if any.
+ expansion: Option<Box<DiagnosticSpanMacroExpansion>>,
+}
+
+#[derive(RustcEncodable)]
+struct DiagnosticSpanLine {
+ text: String,
+
+ /// 1-based, character offset in self.text.
+ highlight_start: usize,
+
+ highlight_end: usize,
+}
+
+#[derive(RustcEncodable)]
+struct DiagnosticSpanMacroExpansion {
+ /// span where macro was applied to generate this code; note that
+ /// this may itself derive from a macro (if
+ /// `span.expansion.is_some()`)
+ span: DiagnosticSpan,
+
+ /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]")
+ macro_decl_name: String,
+
+ /// span where macro was defined (if known)
+ def_site_span: Option<DiagnosticSpan>,
+}
+
+#[derive(RustcEncodable)]
+struct DiagnosticCode {
+ /// The code itself.
+ code: String,
+ /// An explanation for the code.
+ explanation: Option<&'static str>,
+}
+
+impl<'a> Diagnostic<'a> {
+ fn new(msp: &MultiSpan,
+ msg: &'a str,
+ code: Option<&str>,
+ level: Level,
+ je: &JsonEmitter)
+ -> Diagnostic<'a> {
+ Diagnostic {
+ message: msg,
+ code: DiagnosticCode::map_opt_string(code.map(|c| c.to_owned()), je),
+ level: level.to_str(),
+ spans: DiagnosticSpan::from_multispan(msp, je),
+ children: vec![],
+ rendered: None,
+ }
+ }
+
+ fn from_diagnostic_builder<'c>(db: &'c DiagnosticBuilder,
+ je: &JsonEmitter)
+ -> Diagnostic<'c> {
+ Diagnostic {
+ message: &db.message,
+ code: DiagnosticCode::map_opt_string(db.code.clone(), je),
+ level: db.level.to_str(),
+ spans: DiagnosticSpan::from_multispan(&db.span, je),
+ children: db.children.iter().map(|c| {
+ Diagnostic::from_sub_diagnostic(c, je)
+ }).collect(),
+ rendered: None,
+ }
+ }
+
+ fn from_sub_diagnostic<'c>(db: &'c SubDiagnostic, je: &JsonEmitter) -> Diagnostic<'c> {
+ Diagnostic {
+ message: &db.message,
+ code: None,
+ level: db.level.to_str(),
+ spans: db.render_span.as_ref()
+ .map(|sp| DiagnosticSpan::from_render_span(sp, je))
+ .unwrap_or_else(|| DiagnosticSpan::from_multispan(&db.span, je)),
+ children: vec![],
+ rendered: db.render_span.as_ref()
+ .and_then(|rsp| je.render(rsp)),
+ }
+ }
+}
+
+impl DiagnosticSpan {
+ fn from_span_label(span: SpanLabel,
+ suggestion: Option<&String>,
+ je: &JsonEmitter)
+ -> DiagnosticSpan {
+ Self::from_span_etc(span.span,
+ span.is_primary,
+ span.label,
+ suggestion,
+ je)
+ }
+
+ fn from_span_etc(span: Span,
+ is_primary: bool,
+ label: Option<String>,
+ suggestion: Option<&String>,
+ je: &JsonEmitter)
+ -> DiagnosticSpan {
+ // obtain the full backtrace from the `macro_backtrace`
+ // helper; in some ways, it'd be better to expand the
+ // backtrace ourselves, but the `macro_backtrace` helper makes
+ // some decision, such as dropping some frames, and I don't
+ // want to duplicate that logic here.
+ let backtrace = je.cm.macro_backtrace(span).into_iter();
+ DiagnosticSpan::from_span_full(span,
+ is_primary,
+ label,
+ suggestion,
+ backtrace,
+ je)
+ }
+
+ fn from_span_full(span: Span,
+ is_primary: bool,
+ label: Option<String>,
+ suggestion: Option<&String>,
+ mut backtrace: vec::IntoIter<MacroBacktrace>,
+ je: &JsonEmitter)
+ -> DiagnosticSpan {
+ let start = je.cm.lookup_char_pos(span.lo);
+ let end = je.cm.lookup_char_pos(span.hi);
+ let backtrace_step = backtrace.next().map(|bt| {
+ let call_site =
+ Self::from_span_full(bt.call_site,
+ false,
+ None,
+ None,
+ backtrace,
+ je);
+ let def_site_span = bt.def_site_span.map(|sp| {
+ Self::from_span_full(sp,
+ false,
+ None,
+ None,
+ vec![].into_iter(),
+ je)
+ });
+ Box::new(DiagnosticSpanMacroExpansion {
+ span: call_site,
+ macro_decl_name: bt.macro_decl_name,
+ def_site_span: def_site_span,
+ })
+ });
+ DiagnosticSpan {
+ file_name: start.file.name.clone(),
+ byte_start: span.lo.0,
+ byte_end: span.hi.0,
+ line_start: start.line,
+ line_end: end.line,
+ column_start: start.col.0 + 1,
+ column_end: end.col.0 + 1,
+ is_primary: is_primary,
+ text: DiagnosticSpanLine::from_span(span, je),
+ suggested_replacement: suggestion.cloned(),
+ expansion: backtrace_step,
+ label: label,
+ }
+ }
+
+ fn from_multispan(msp: &MultiSpan, je: &JsonEmitter) -> Vec<DiagnosticSpan> {
+ msp.span_labels()
+ .into_iter()
+ .map(|span_str| Self::from_span_label(span_str, None, je))
+ .collect()
+ }
+
+ fn from_suggestion(suggestion: &CodeSuggestion, je: &JsonEmitter)
+ -> Vec<DiagnosticSpan> {
+ assert_eq!(suggestion.msp.span_labels().len(), suggestion.substitutes.len());
+ suggestion.msp.span_labels()
+ .into_iter()
+ .zip(&suggestion.substitutes)
+ .map(|(span_label, suggestion)| {
+ DiagnosticSpan::from_span_label(span_label,
+ Some(suggestion),
+ je)
+ })
+ .collect()
+ }
+
+ fn from_render_span(rsp: &RenderSpan, je: &JsonEmitter) -> Vec<DiagnosticSpan> {
+ match *rsp {
+ RenderSpan::FullSpan(ref msp) =>
+ DiagnosticSpan::from_multispan(msp, je),
+ RenderSpan::Suggestion(ref suggestion) =>
+ DiagnosticSpan::from_suggestion(suggestion, je),
+ }
+ }
+}
+
+impl DiagnosticSpanLine {
+ fn line_from_filemap(fm: &syntax_pos::FileMap,
+ index: usize,
+ h_start: usize,
+ h_end: usize)
+ -> DiagnosticSpanLine {
+ DiagnosticSpanLine {
+ text: fm.get_line(index).unwrap().to_owned(),
+ highlight_start: h_start,
+ highlight_end: h_end,
+ }
+ }
+
+ /// Create a list of DiagnosticSpanLines from span - each line with any part
+ /// of `span` gets a DiagnosticSpanLine, with the highlight indicating the
+ /// `span` within the line.
+ fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
+ je.cm.span_to_lines(span)
+ .map(|lines| {
+ let fm = &*lines.file;
+ lines.lines
+ .iter()
+ .map(|line| {
+ DiagnosticSpanLine::line_from_filemap(fm,
+ line.line_index,
+ line.start_col.0 + 1,
+ line.end_col.0 + 1)
+ })
+ .collect()
+ })
+ .unwrap_or(vec![])
+ }
+}
+
+impl DiagnosticCode {
+ fn map_opt_string(s: Option<String>, je: &JsonEmitter) -> Option<DiagnosticCode> {
+ s.map(|s| {
+
+ let explanation = je.registry
+ .as_ref()
+ .and_then(|registry| registry.find_description(&s));
+
+ DiagnosticCode {
+ code: s,
+ explanation: explanation,
+ }
+ })
+ }
+}
+
+impl JsonEmitter {
+ fn render(&self, render_span: &RenderSpan) -> Option<String> {
+ use std::borrow::Borrow;
+
+ match *render_span {
+ RenderSpan::FullSpan(_) => {
+ None
+ }
+ RenderSpan::Suggestion(ref suggestion) => {
+ Some(suggestion.splice_lines(self.cm.borrow()))
+ }
+ }
+ }
+}
+
#![feature(str_escape)]
#![feature(unicode)]
#![feature(question_mark)]
-#![feature(range_contains)]
extern crate serialize;
extern crate term;
#[macro_use] extern crate log;
#[macro_use] #[no_link] extern crate rustc_bitflags;
extern crate rustc_unicode;
+pub extern crate rustc_errors as errors;
+extern crate syntax_pos;
extern crate serialize as rustc_serialize; // used by deriving
+
// A variant of 'try!' that panics on an Err. This is used as a crutch on the
// way towards a non-panic!-prone parser. It should be used for fatal parsing
// errors; eventually we plan to convert all code using panictry to just use
macro_rules! panictry {
($e:expr) => ({
use std::result::Result::{Ok, Err};
- use $crate::errors::FatalError;
+ use errors::FatalError;
match $e {
Ok(e) => e,
Err(mut e) => {
pub mod parser_testing;
pub mod small_vector;
pub mod move_map;
+
+ mod thin_vec;
+ pub use self::thin_vec::ThinVec;
}
pub mod diagnostics {
pub mod macros;
pub mod plugin;
- pub mod registry;
pub mod metadata;
}
-pub mod errors;
+pub mod json;
pub mod syntax {
pub use ext;
pub mod std_inject;
pub mod str;
pub mod test;
+pub mod tokenstream;
pub mod visit;
pub mod print {
use attr;
use ast;
-use codemap::{spanned, Spanned, mk_sp, Span};
+use syntax_pos::{mk_sp, Span};
+use codemap::{spanned, Spanned};
use parse::common::SeqSep;
use parse::PResult;
use parse::token;
/// seen the semicolon, and thus don't need another.
pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool {
match *stmt {
- ast::StmtKind::Decl(ref d, _) => {
- match d.node {
- ast::DeclKind::Local(_) => true,
- ast::DeclKind::Item(_) => false,
- }
- }
- ast::StmtKind::Expr(ref e, _) => expr_requires_semi_to_be_stmt(e),
+ ast::StmtKind::Local(_) => true,
+ ast::StmtKind::Item(_) => false,
+ ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e),
ast::StmtKind::Semi(..) => false,
ast::StmtKind::Mac(..) => false,
}
pub use self::CommentStyle::*;
use ast;
-use codemap::{BytePos, CharPos, CodeMap, Pos};
+use codemap::CodeMap;
+use syntax_pos::{BytePos, CharPos, Pos};
use errors;
use parse::lexer::is_block_doc_comment;
use parse::lexer::{StringReader, TokenAndSpan};
// except according to those terms.
use ast;
-use codemap::{BytePos, CharPos, CodeMap, Pos, Span};
-use codemap;
+use syntax_pos::{self, BytePos, CharPos, Pos, Span};
+use codemap::CodeMap;
use errors::{FatalError, Handler, DiagnosticBuilder};
use ext::tt::transcribe::tt_next_token;
use parse::token::{self, keywords, str_to_ident};
pub col: CharPos,
/// The last character to be read
pub curr: Option<char>,
- pub filemap: Rc<codemap::FileMap>,
+ pub filemap: Rc<syntax_pos::FileMap>,
// cached:
pub peek_tok: token::Token,
pub peek_span: Span,
impl<'a> StringReader<'a> {
/// For comments.rs, which hackily pokes into pos and curr
pub fn new_raw<'b>(span_diagnostic: &'b Handler,
- filemap: Rc<codemap::FileMap>)
+ filemap: Rc<syntax_pos::FileMap>)
-> StringReader<'b> {
if filemap.src.is_none() {
span_diagnostic.bug(&format!("Cannot lex filemap \
filemap: filemap,
// dummy values; not read
peek_tok: token::Eof,
- peek_span: codemap::DUMMY_SP,
+ peek_span: syntax_pos::DUMMY_SP,
source_text: source_text,
fatal_errs: Vec::new(),
};
}
pub fn new<'b>(span_diagnostic: &'b Handler,
- filemap: Rc<codemap::FileMap>)
+ filemap: Rc<syntax_pos::FileMap>)
-> StringReader<'b> {
let mut sr = StringReader::new_raw(span_diagnostic, filemap);
if let Err(_) = sr.advance_token() {
/// Report a fatal error spanning [`from_pos`, `to_pos`).
fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError {
- self.fatal_span(codemap::mk_sp(from_pos, to_pos), m)
+ self.fatal_span(syntax_pos::mk_sp(from_pos, to_pos), m)
}
/// Report a lexical error spanning [`from_pos`, `to_pos`).
fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) {
- self.err_span(codemap::mk_sp(from_pos, to_pos), m)
+ self.err_span(syntax_pos::mk_sp(from_pos, to_pos), m)
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
for c in c.escape_default() {
m.push(c)
}
- self.span_diagnostic.struct_span_fatal(codemap::mk_sp(from_pos, to_pos), &m[..])
+ self.span_diagnostic.struct_span_fatal(syntax_pos::mk_sp(from_pos, to_pos), &m[..])
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
for c in c.escape_default() {
m.push(c)
}
- self.span_diagnostic.struct_span_err(codemap::mk_sp(from_pos, to_pos), &m[..])
+ self.span_diagnostic.struct_span_err(syntax_pos::mk_sp(from_pos, to_pos), &m[..])
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
None => {
if self.is_eof() {
self.peek_tok = token::Eof;
- self.peek_span = codemap::mk_sp(self.filemap.end_pos, self.filemap.end_pos);
+ self.peek_span = syntax_pos::mk_sp(self.filemap.end_pos, self.filemap.end_pos);
} else {
let start_bytepos = self.last_pos;
self.peek_tok = self.next_token_inner()?;
- self.peek_span = codemap::mk_sp(start_bytepos, self.last_pos);
+ self.peek_span = syntax_pos::mk_sp(start_bytepos, self.last_pos);
};
}
}
match self.curr {
Some(c) => {
if c.is_whitespace() {
- self.span_diagnostic.span_err(codemap::mk_sp(self.last_pos, self.last_pos),
+ self.span_diagnostic.span_err(syntax_pos::mk_sp(self.last_pos, self.last_pos),
"called consume_any_line_comment, but there \
was whitespace");
}
Some(TokenAndSpan {
tok: tok,
- sp: codemap::mk_sp(start_bpos, self.last_pos),
+ sp: syntax_pos::mk_sp(start_bpos, self.last_pos),
})
})
} else {
Some(TokenAndSpan {
tok: token::Comment,
- sp: codemap::mk_sp(start_bpos, self.last_pos),
+ sp: syntax_pos::mk_sp(start_bpos, self.last_pos),
})
};
}
}
return Some(TokenAndSpan {
tok: token::Shebang(self.name_from(start)),
- sp: codemap::mk_sp(start, self.last_pos),
+ sp: syntax_pos::mk_sp(start, self.last_pos),
});
}
}
}
let c = Some(TokenAndSpan {
tok: token::Whitespace,
- sp: codemap::mk_sp(start_bpos, self.last_pos),
+ sp: syntax_pos::mk_sp(start_bpos, self.last_pos),
});
debug!("scanning whitespace: {:?}", c);
c
Some(TokenAndSpan {
tok: tok,
- sp: codemap::mk_sp(start_bpos, self.last_pos),
+ sp: syntax_pos::mk_sp(start_bpos, self.last_pos),
})
})
}
let valid = if self.curr_is('{') {
self.scan_unicode_escape(delim) && !ascii_only
} else {
- let span = codemap::mk_sp(start, self.last_pos);
+ let span = syntax_pos::mk_sp(start, self.last_pos);
self.span_diagnostic
.struct_span_err(span, "incorrect unicode escape sequence")
.span_help(span,
},
c);
if e == '\r' {
- err.span_help(codemap::mk_sp(escaped_pos, last_pos),
+ err.span_help(syntax_pos::mk_sp(escaped_pos, last_pos),
"this is an isolated carriage return; consider \
checking your editor and version control \
settings");
}
if (e == '{' || e == '}') && !ascii_only {
- err.span_help(codemap::mk_sp(escaped_pos, last_pos),
+ err.span_help(syntax_pos::mk_sp(escaped_pos, last_pos),
"if used in a formatting string, curly braces \
are escaped with `{{` and `}}`");
}
mod tests {
use super::*;
- use codemap::{BytePos, CodeMap, Span, NO_EXPANSION};
+ use syntax_pos::{BytePos, Span, NO_EXPANSION};
+ use codemap::CodeMap;
use errors;
use parse::token;
use parse::token::str_to_ident;
fn mk_sh(cm: Rc<CodeMap>) -> errors::Handler {
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
- let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), None, cm);
+ let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()),
+ None,
+ cm,
+ errors::snippet::FormatMode::EnvironmentSelected);
errors::Handler::with_emitter(true, false, Box::new(emitter))
}
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
let comment = lexer.next_token();
assert_eq!(comment.tok, token::Comment);
- assert_eq!(comment.sp, ::codemap::mk_sp(BytePos(0), BytePos(7)));
+ assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7)));
assert_eq!(lexer.next_token().tok, token::Whitespace);
assert_eq!(lexer.next_token().tok,
token::DocComment(token::intern("/// test")));
// Characters and their corresponding confusables were collected from
// http://www.unicode.org/Public/security/revision-06/confusables.txt
-use codemap::mk_sp as make_span;
+use syntax_pos::mk_sp as make_span;
use errors::DiagnosticBuilder;
use super::StringReader;
//! The main parser interface
use ast;
-use codemap::{self, Span, CodeMap, FileMap};
+use codemap::CodeMap;
+use syntax_pos::{self, Span, FileMap};
use errors::{Handler, ColorConfig, DiagnosticBuilder};
use parse::parser::Parser;
use parse::token::InternedString;
use ptr::P;
use str::char_at;
+use tokenstream;
use std::cell::RefCell;
use std::iter;
source: String,
cfg: ast::CrateConfig,
sess: &'a ParseSess)
- -> PResult<'a, Vec<ast::TokenTree>> {
+ -> PResult<'a, Vec<tokenstream::TokenTree>> {
let mut p = new_parser_from_source_str(
sess,
cfg,
let end_pos = filemap.end_pos;
let mut parser = tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg);
- if parser.token == token::Eof && parser.span == codemap::DUMMY_SP {
- parser.span = codemap::mk_sp(end_pos, end_pos);
+ if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP {
+ parser.span = syntax_pos::mk_sp(end_pos, end_pos);
}
parser
// compiler expands into it
pub fn new_parser_from_tts<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
- tts: Vec<ast::TokenTree>) -> Parser<'a> {
+ tts: Vec<tokenstream::TokenTree>) -> Parser<'a> {
tts_to_parser(sess, tts, cfg)
}
/// Given a filemap, produce a sequence of token-trees
pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
- -> Vec<ast::TokenTree> {
+ -> Vec<tokenstream::TokenTree> {
// it appears to me that the cfg doesn't matter here... indeed,
// parsing tt's probably shouldn't require a parser at all.
let cfg = Vec::new();
/// Given tts and cfg, produce a parser
pub fn tts_to_parser<'a>(sess: &'a ParseSess,
- tts: Vec<ast::TokenTree>,
+ tts: Vec<tokenstream::TokenTree>,
cfg: ast::CrateConfig) -> Parser<'a> {
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts);
let mut p = Parser::new(sess, cfg, Box::new(trdr));
#[cfg(test)]
mod tests {
use super::*;
- use std::rc::Rc;
- use codemap::{Span, BytePos, Pos, Spanned, NO_EXPANSION};
- use ast::{self, TokenTree, PatKind};
+ use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
+ use codemap::Spanned;
+ use ast::{self, PatKind};
use abi::Abi;
use attr::{first_attr_value_str_by_name, AttrMetaMethods};
use parse;
use parse::token::{str_to_ident};
use print::pprust::item_to_string;
use ptr::P;
+ use tokenstream::{self, TokenTree};
use util::parser_testing::{string_to_tts, string_to_parser};
use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt};
+ use util::ThinVec;
- // produce a codemap::span
+ // produce a syntax_pos::span
fn sp(a: u32, b: u32) -> Span {
Span {lo: BytePos(a), hi: BytePos(b), expn_id: NO_EXPANSION}
}
),
}),
span: sp(0, 1),
- attrs: None,
+ attrs: ThinVec::new(),
}))
}
)
}),
span: sp(0, 6),
- attrs: None,
+ attrs: ThinVec::new(),
}))
}
#[test]
fn string_to_tts_macro () {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
- let tts: &[ast::TokenTree] = &tts[..];
+ let tts: &[tokenstream::TokenTree] = &tts[..];
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
(
)
if first_delimed.delim == token::Paren
&& ident.name.as_str() == "a" => {},
- _ => panic!("value 3: {:?}", **first_delimed),
+ _ => panic!("value 3: {:?}", *first_delimed),
}
let tts = &second_delimed.tts[..];
match (tts.len(), tts.get(0), tts.get(1)) {
)
if second_delimed.delim == token::Paren
&& ident.name.as_str() == "a" => {},
- _ => panic!("value 4: {:?}", **second_delimed),
+ _ => panic!("value 4: {:?}", *second_delimed),
}
},
- _ => panic!("value 2: {:?}", **macro_delimed),
+ _ => panic!("value 2: {:?}", *macro_delimed),
}
},
_ => panic!("value: {:?}",tts),
TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))),
TokenTree::Delimited(
sp(5, 14),
- Rc::new(ast::Delimited {
+ tokenstream::Delimited {
delim: token::DelimToken::Paren,
open_span: sp(5, 6),
tts: vec![
TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))),
],
close_span: sp(13, 14),
- })),
+ }),
TokenTree::Delimited(
sp(15, 21),
- Rc::new(ast::Delimited {
+ tokenstream::Delimited {
delim: token::DelimToken::Brace,
open_span: sp(15, 16),
tts: vec![
TokenTree::Token(sp(18, 19), token::Semi),
],
close_span: sp(20, 21),
- }))
+ })
];
assert_eq!(tts, expected);
),
}),
span:sp(7,8),
- attrs: None,
+ attrs: ThinVec::new(),
}))),
span:sp(0,8),
- attrs: None,
+ attrs: ThinVec::new(),
}))
}
#[test] fn parse_stmt_1 () {
assert!(string_to_stmt("b;".to_string()) ==
- Some(Spanned{
+ Some(ast::Stmt {
node: ast::StmtKind::Expr(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
),
}),
span: sp(0,1),
- attrs: None}),
- ast::DUMMY_NODE_ID),
+ attrs: ThinVec::new()})),
+ id: ast::DUMMY_NODE_ID,
span: sp(0,1)}))
}
}
},
P(ast::Block {
- stmts: vec!(Spanned{
+ stmts: vec!(ast::Stmt {
node: ast::StmtKind::Semi(P(ast::Expr{
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None,
),
}),
span: sp(17,18),
- attrs: None,}),
- ast::DUMMY_NODE_ID),
+ attrs: ThinVec::new()})),
+ id: ast::DUMMY_NODE_ID,
span: sp(17,19)}),
- expr: None,
id: ast::DUMMY_NODE_ID,
rules: ast::BlockCheckMode::Default, // no idea
span: sp(15,21),
struct PatIdentVisitor {
spans: Vec<Span>
}
- impl<'v> ::visit::Visitor<'v> for PatIdentVisitor {
- fn visit_pat(&mut self, p: &'v ast::Pat) {
+ impl ::visit::Visitor for PatIdentVisitor {
+ fn visit_pat(&mut self, p: &ast::Pat) {
match p.node {
PatKind::Ident(_ , ref spannedident, _) => {
self.spans.push(spannedident.span.clone());
//!
//! Obsolete syntax that becomes too hard to parse can be removed.
-use codemap::Span;
+use syntax_pos::Span;
use parse::parser;
/// The specific types of unsupported syntax
use ast::Block;
use ast::{BlockCheckMode, CaptureBy};
use ast::{Constness, Crate, CrateConfig};
-use ast::{Decl, DeclKind, Defaultness};
-use ast::{EMPTY_CTXT, EnumDef};
+use ast::Defaultness;
+use ast::EnumDef;
use ast::{Expr, ExprKind, RangeLimits};
use ast::{Field, FnDecl};
use ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
use ast::{VariantData, StructField};
use ast::StrStyle;
use ast::SelfKind;
-use ast::{Delimited, SequenceRepetition, TokenTree, TraitItem, TraitRef};
+use ast::{TraitItem, TraitRef};
use ast::{Ty, TyKind, TypeBinding, TyParam, TyParamBounds};
use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple};
use ast::{Visibility, WhereClause};
-use attr::{ThinAttributes, ThinAttributesExt, AttributesExt};
use ast::{BinOpKind, UnOp};
use ast;
-use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp, CodeMap};
+use codemap::{self, CodeMap, Spanned, spanned};
+use syntax_pos::{self, Span, BytePos, mk_sp};
use errors::{self, DiagnosticBuilder};
use ext::tt::macro_parser;
use parse;
use print::pprust;
use ptr::P;
use parse::PResult;
+use tokenstream::{self, Delimited, SequenceRepetition, TokenTree};
+use util::ThinVec;
use std::collections::HashSet;
use std::mem;
_ => unreachable!()
};
let span = $p.span;
- Some($p.mk_expr(span.lo, span.hi, ExprKind::Path(None, pt), None))
+ Some($p.mk_expr(span.lo, span.hi, ExprKind::Path(None, pt), ThinVec::new()))
}
token::Interpolated(token::NtBlock(_)) => {
// FIXME: The following avoids an issue with lexical borrowck scopes,
_ => unreachable!()
};
let span = $p.span;
- Some($p.mk_expr(span.lo, span.hi, ExprKind::Block(b), None))
+ Some($p.mk_expr(span.lo, span.hi, ExprKind::Block(b), ThinVec::new()))
}
_ => None
};
pub enum LhsExpr {
NotYetParsed,
- AttributesParsed(ThinAttributes),
+ AttributesParsed(ThinVec<Attribute>),
AlreadyParsed(P<Expr>),
}
-impl From<Option<ThinAttributes>> for LhsExpr {
- fn from(o: Option<ThinAttributes>) -> Self {
+impl From<Option<ThinVec<Attribute>>> for LhsExpr {
+ fn from(o: Option<ThinVec<Attribute>>) -> Self {
if let Some(attrs) = o {
LhsExpr::AttributesParsed(attrs)
} else {
{
let tok0 = rdr.real_token();
let span = tok0.sp;
- let filename = if span != codemap::DUMMY_SP {
+ let filename = if span != syntax_pos::DUMMY_SP {
Some(sess.codemap().span_to_filename(span))
} else { None };
let placeholder = TokenAndSpan {
}
/// Parse the items in a trait declaration
- pub fn parse_trait_items(&mut self) -> PResult<'a, Vec<TraitItem>> {
- self.parse_unspanned_seq(
- &token::OpenDelim(token::Brace),
- &token::CloseDelim(token::Brace),
- SeqSep::none(),
- |p| -> PResult<'a, TraitItem> {
- maybe_whole!(no_clone_from_p p, NtTraitItem);
- let mut attrs = p.parse_outer_attributes()?;
- let lo = p.span.lo;
-
- let (name, node) = if p.eat_keyword(keywords::Type) {
- let TyParam {ident, bounds, default, ..} = p.parse_ty_param()?;
- p.expect(&token::Semi)?;
- (ident, TraitItemKind::Type(bounds, default))
- } else if p.is_const_item() {
- p.expect_keyword(keywords::Const)?;
- let ident = p.parse_ident()?;
- p.expect(&token::Colon)?;
- let ty = p.parse_ty_sum()?;
- let default = if p.check(&token::Eq) {
- p.bump();
- let expr = p.parse_expr()?;
- p.commit_expr_expecting(&expr, token::Semi)?;
- Some(expr)
- } else {
- p.expect(&token::Semi)?;
- None
- };
- (ident, TraitItemKind::Const(ty, default))
+ pub fn parse_trait_item(&mut self) -> PResult<'a, TraitItem> {
+ maybe_whole!(no_clone_from_p self, NtTraitItem);
+ let mut attrs = self.parse_outer_attributes()?;
+ let lo = self.span.lo;
+
+ let (name, node) = if self.eat_keyword(keywords::Type) {
+ let TyParam {ident, bounds, default, ..} = self.parse_ty_param()?;
+ self.expect(&token::Semi)?;
+ (ident, TraitItemKind::Type(bounds, default))
+ } else if self.is_const_item() {
+ self.expect_keyword(keywords::Const)?;
+ let ident = self.parse_ident()?;
+ self.expect(&token::Colon)?;
+ let ty = self.parse_ty_sum()?;
+ let default = if self.check(&token::Eq) {
+ self.bump();
+ let expr = self.parse_expr()?;
+ self.commit_expr_expecting(&expr, token::Semi)?;
+ Some(expr)
+ } else {
+ self.expect(&token::Semi)?;
+ None
+ };
+ (ident, TraitItemKind::Const(ty, default))
+ } else if !self.token.is_any_keyword()
+ && self.look_ahead(1, |t| *t == token::Not)
+ && (self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren))
+ || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) {
+ // trait item macro.
+ // code copied from parse_macro_use_or_failure... abstraction!
+ let lo = self.span.lo;
+ let pth = self.parse_ident_into_path()?;
+ self.expect(&token::Not)?;
+
+ // eat a matched-delimiter token tree:
+ let delim = self.expect_open_delim()?;
+ let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
+ SeqSep::none(),
+ |pp| pp.parse_token_tree())?;
+ let m_ = Mac_ { path: pth, tts: tts };
+ let m: ast::Mac = codemap::Spanned { node: m_,
+ span: mk_sp(lo,
+ self.last_span.hi) };
+ if delim != token::Brace {
+ self.expect(&token::Semi)?
+ }
+ (keywords::Invalid.ident(), ast::TraitItemKind::Macro(m))
} else {
- let (constness, unsafety, abi) = match p.parse_fn_front_matter() {
+ let (constness, unsafety, abi) = match self.parse_fn_front_matter() {
Ok(cua) => cua,
Err(e) => {
loop {
- match p.token {
+ match self.token {
token::Eof => break,
-
token::CloseDelim(token::Brace) |
token::Semi => {
- p.bump();
+ self.bump();
break;
}
-
token::OpenDelim(token::Brace) => {
- p.parse_token_tree()?;
+ self.parse_token_tree()?;
break;
}
-
- _ => p.bump()
+ _ => self.bump()
}
}
}
};
- let ident = p.parse_ident()?;
- let mut generics = p.parse_generics()?;
+ let ident = self.parse_ident()?;
+ let mut generics = self.parse_generics()?;
- let d = p.parse_fn_decl_with_self(|p: &mut Parser<'a>|{
+ let d = self.parse_fn_decl_with_self(|p: &mut Parser<'a>|{
// This is somewhat dubious; We don't want to allow
// argument names to be left off if there is a
// definition...
p.parse_arg_general(false)
})?;
- generics.where_clause = p.parse_where_clause()?;
+ generics.where_clause = self.parse_where_clause()?;
let sig = ast::MethodSig {
unsafety: unsafety,
constness: constness,
abi: abi,
};
- let body = match p.token {
- token::Semi => {
- p.bump();
- debug!("parse_trait_methods(): parsing required method");
- None
- }
- token::OpenDelim(token::Brace) => {
- debug!("parse_trait_methods(): parsing provided method");
- let (inner_attrs, body) =
- p.parse_inner_attrs_and_block()?;
- attrs.extend(inner_attrs.iter().cloned());
- Some(body)
- }
+ let body = match self.token {
+ token::Semi => {
+ self.bump();
+ debug!("parse_trait_methods(): parsing required method");
+ None
+ }
+ token::OpenDelim(token::Brace) => {
+ debug!("parse_trait_methods(): parsing provided method");
+ let (inner_attrs, body) =
+ self.parse_inner_attrs_and_block()?;
+ attrs.extend(inner_attrs.iter().cloned());
+ Some(body)
+ }
- _ => {
- let token_str = p.this_token_to_string();
- return Err(p.fatal(&format!("expected `;` or `{{`, found `{}`",
- token_str)[..]))
- }
+ _ => {
+ let token_str = self.this_token_to_string();
+ return Err(self.fatal(&format!("expected `;` or `{{`, found `{}`",
+ token_str)[..]))
+ }
};
(ident, ast::TraitItemKind::Method(sig, body))
};
+ Ok(TraitItem {
+ id: ast::DUMMY_NODE_ID,
+ ident: name,
+ attrs: attrs,
+ node: node,
+ span: mk_sp(lo, self.last_span.hi),
+ })
+ }
- Ok(TraitItem {
- id: ast::DUMMY_NODE_ID,
- ident: name,
- attrs: attrs,
- node: node,
- span: mk_sp(lo, p.last_span.hi),
+
+ /// Parse the items in a trait declaration
+ pub fn parse_trait_items(&mut self) -> PResult<'a, Vec<TraitItem>> {
+ self.parse_unspanned_seq(
+ &token::OpenDelim(token::Brace),
+ &token::CloseDelim(token::Brace),
+ SeqSep::none(),
+ |p| -> PResult<'a, TraitItem> {
+ p.parse_trait_item()
})
- })
}
/// Parse a possibly mutable type
SeqSep::none(),
|p| p.parse_token_tree())?;
let hi = self.span.hi;
- TyKind::Mac(spanned(lo, hi, Mac_ { path: path, tts: tts, ctxt: EMPTY_CTXT }))
+ TyKind::Mac(spanned(lo, hi, Mac_ { path: path, tts: tts }))
} else {
// NAMED TYPE
TyKind::Path(None, path)
let lo = self.span.lo;
let literal = P(self.parse_lit()?);
let hi = self.last_span.hi;
- let expr = self.mk_expr(lo, hi, ExprKind::Lit(literal), None);
+ let expr = self.mk_expr(lo, hi, ExprKind::Lit(literal), ThinVec::new());
if minus_present {
let minus_hi = self.last_span.hi;
let unary = self.mk_unary(UnOp::Neg, expr);
- Ok(self.mk_expr(minus_lo, minus_hi, unary, None))
+ Ok(self.mk_expr(minus_lo, minus_hi, unary, ThinVec::new()))
} else {
Ok(expr)
}
})
}
- pub fn mk_expr(&mut self, lo: BytePos, hi: BytePos,
- node: ExprKind, attrs: ThinAttributes) -> P<Expr> {
+ pub fn mk_expr(&mut self, lo: BytePos, hi: BytePos, node: ExprKind, attrs: ThinVec<Attribute>)
+ -> P<Expr> {
P(Expr {
id: ast::DUMMY_NODE_ID,
node: node,
span: mk_sp(lo, hi),
- attrs: attrs,
+ attrs: attrs.into(),
})
}
}
pub fn mk_mac_expr(&mut self, lo: BytePos, hi: BytePos,
- m: Mac_, attrs: ThinAttributes) -> P<Expr> {
+ m: Mac_, attrs: ThinVec<Attribute>) -> P<Expr> {
P(Expr {
id: ast::DUMMY_NODE_ID,
node: ExprKind::Mac(codemap::Spanned {node: m, span: mk_sp(lo, hi)}),
})
}
- pub fn mk_lit_u32(&mut self, i: u32, attrs: ThinAttributes) -> P<Expr> {
+ pub fn mk_lit_u32(&mut self, i: u32, attrs: ThinVec<Attribute>) -> P<Expr> {
let span = &self.span;
let lv_lit = P(codemap::Spanned {
node: LitKind::Int(i as u64, ast::LitIntType::Unsigned(UintTy::U32)),
//
// Therefore, prevent sub-parser from parsing
// attributes by giving them a empty "already parsed" list.
- let mut attrs = None;
+ let mut attrs = ThinVec::new();
let lo = self.span.lo;
let mut hi = self.span.hi;
token::OpenDelim(token::Paren) => {
self.bump();
- let attrs = self.parse_inner_attributes()?
- .into_thin_attrs()
- .prepend(attrs);
+ attrs.extend(self.parse_inner_attributes()?);
// (e) is parenthesized e
// (e,) is a tuple with only one field, e
token::OpenDelim(token::Bracket) => {
self.bump();
- let inner_attrs = self.parse_inner_attributes()?
- .into_thin_attrs();
- attrs.update(|attrs| attrs.append(inner_attrs));
+ attrs.extend(self.parse_inner_attributes()?);
if self.check(&token::CloseDelim(token::Bracket)) {
// Empty vector.
}
if self.eat_keyword(keywords::Continue) {
let ex = if self.token.is_lifetime() {
- let ex = ExprKind::Again(Some(Spanned{
+ let ex = ExprKind::Continue(Some(Spanned{
node: self.get_lifetime(),
span: self.span
}));
self.bump();
ex
} else {
- ExprKind::Again(None)
+ ExprKind::Continue(None)
};
let hi = self.last_span.hi;
return Ok(self.mk_expr(lo, hi, ex, attrs));
return Ok(self.mk_mac_expr(lo,
hi,
- Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT },
+ Mac_ { path: pth, tts: tts },
attrs));
}
if self.check(&token::OpenDelim(token::Brace)) {
let mut fields = Vec::new();
let mut base = None;
- let attrs = attrs.append(
- self.parse_inner_attributes()?
- .into_thin_attrs());
+ attrs.extend(self.parse_inner_attributes()?);
while self.token != token::CloseDelim(token::Brace) {
if self.eat(&token::DotDot) {
}
fn parse_or_use_outer_attributes(&mut self,
- already_parsed_attrs: Option<ThinAttributes>)
- -> PResult<'a, ThinAttributes> {
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
+ -> PResult<'a, ThinVec<Attribute>> {
if let Some(attrs) = already_parsed_attrs {
Ok(attrs)
} else {
- self.parse_outer_attributes().map(|a| a.into_thin_attrs())
+ self.parse_outer_attributes().map(|a| a.into())
}
}
/// Parse a block or unsafe block
pub fn parse_block_expr(&mut self, lo: BytePos, blk_mode: BlockCheckMode,
- attrs: ThinAttributes)
+ outer_attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> {
- let outer_attrs = attrs;
self.expect(&token::OpenDelim(token::Brace))?;
- let inner_attrs = self.parse_inner_attributes()?.into_thin_attrs();
- let attrs = outer_attrs.append(inner_attrs);
+ let mut attrs = outer_attrs;
+ attrs.extend(self.parse_inner_attributes()?);
let blk = self.parse_block_tail(lo, blk_mode)?;
return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), attrs));
/// parse a.b or a(13) or a[4] or just a
pub fn parse_dot_or_call_expr(&mut self,
- already_parsed_attrs: Option<ThinAttributes>)
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> {
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
pub fn parse_dot_or_call_expr_with(&mut self,
e0: P<Expr>,
lo: BytePos,
- attrs: ThinAttributes)
+ mut attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> {
// Stitch the list of outer attributes onto the return value.
// A little bit ugly, but the best way given the current code
self.parse_dot_or_call_expr_with_(e0, lo)
.map(|expr|
expr.map(|mut expr| {
- expr.attrs.update(|a| a.prepend(attrs));
+ attrs.extend::<Vec<_>>(expr.attrs.into());
+ expr.attrs = attrs;
match expr.node {
ExprKind::If(..) | ExprKind::IfLet(..) => {
- if !expr.attrs.as_attr_slice().is_empty() {
+ if !expr.attrs.is_empty() {
// Just point to the first attribute in there...
- let span = expr.attrs.as_attr_slice()[0].span;
+ let span = expr.attrs[0].span;
self.span_err(span,
"attributes are not yet allowed on `if` \
es.insert(0, self_value);
let id = spanned(ident_span.lo, ident_span.hi, ident);
let nd = self.mk_method_call(id, tys, es);
- self.mk_expr(lo, hi, nd, None)
+ self.mk_expr(lo, hi, nd, ThinVec::new())
}
// Field access.
_ => {
let id = spanned(ident_span.lo, ident_span.hi, ident);
let field = self.mk_field(self_value, id);
- self.mk_expr(lo, ident_span.hi, field, None)
+ self.mk_expr(lo, ident_span.hi, field, ThinVec::new())
}
})
}
// expr?
while self.eat(&token::Question) {
let hi = self.last_span.hi;
- e = self.mk_expr(lo, hi, ExprKind::Try(e), None);
+ e = self.mk_expr(lo, hi, ExprKind::Try(e), ThinVec::new());
}
// expr.f
Some(n) => {
let id = spanned(dot, hi, n);
let field = self.mk_tup_field(e, id);
- e = self.mk_expr(lo, hi, field, None);
+ e = self.mk_expr(lo, hi, field, ThinVec::new());
}
None => {
let last_span = self.last_span;
hi = self.last_span.hi;
let nd = self.mk_call(e, es);
- e = self.mk_expr(lo, hi, nd, None);
+ e = self.mk_expr(lo, hi, nd, ThinVec::new());
}
// expr[...]
hi = self.span.hi;
self.commit_expr_expecting(&ix, token::CloseDelim(token::Bracket))?;
let index = self.mk_index(e, ix);
- e = self.mk_expr(lo, hi, index, None)
+ e = self.mk_expr(lo, hi, index, ThinVec::new())
}
_ => return Ok(e)
}
)?;
let (sep, repeat) = self.parse_sep_and_kleene_op()?;
let name_num = macro_parser::count_names(&seq);
- return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi),
- Rc::new(SequenceRepetition {
- tts: seq,
- separator: sep,
- op: repeat,
- num_captures: name_num
- })));
+ return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), SequenceRepetition {
+ tts: seq,
+ separator: sep,
+ op: repeat,
+ num_captures: name_num
+ }));
} else if self.token.is_keyword(keywords::Crate) {
self.bump();
return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
/// Parse an optional separator followed by a Kleene-style
/// repetition token (+ or *).
pub fn parse_sep_and_kleene_op(&mut self)
- -> PResult<'a, (Option<token::Token>, ast::KleeneOp)> {
- fn parse_kleene_op<'a>(parser: &mut Parser<'a>) -> PResult<'a, Option<ast::KleeneOp>> {
+ -> PResult<'a, (Option<token::Token>, tokenstream::KleeneOp)> {
+ fn parse_kleene_op<'a>(parser: &mut Parser<'a>) ->
+ PResult<'a, Option<tokenstream::KleeneOp>> {
match parser.token {
token::BinOp(token::Star) => {
parser.bump();
- Ok(Some(ast::KleeneOp::ZeroOrMore))
+ Ok(Some(tokenstream::KleeneOp::ZeroOrMore))
},
token::BinOp(token::Plus) => {
parser.bump();
- Ok(Some(ast::KleeneOp::OneOrMore))
+ Ok(Some(tokenstream::KleeneOp::OneOrMore))
},
_ => Ok(None)
}
_ => {}
}
- Ok(TokenTree::Delimited(span, Rc::new(Delimited {
+ Ok(TokenTree::Delimited(span, Delimited {
delim: delim,
open_span: open_span,
tts: tts,
close_span: close_span,
- })))
+ }))
},
_ => {
// invariants: the current token is not a left-delimiter,
/// Parse a prefix-unary-operator expr
pub fn parse_prefix_expr(&mut self,
- already_parsed_attrs: Option<ThinAttributes>)
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> {
let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?;
let lo = self.span.lo;
let blk = self.parse_block()?;
let span = blk.span;
hi = span.hi;
- let blk_expr = self.mk_expr(span.lo, span.hi, ExprKind::Block(blk),
- None);
+ let blk_expr = self.mk_expr(span.lo, hi, ExprKind::Block(blk), ThinVec::new());
ExprKind::InPlace(place, blk_expr)
}
token::Ident(..) if self.token.is_keyword(keywords::Box) => {
/// This parses an expression accounting for associativity and precedence of the operators in
/// the expression.
pub fn parse_assoc_expr(&mut self,
- already_parsed_attrs: Option<ThinAttributes>)
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> {
self.parse_assoc_expr_with(0, already_parsed_attrs.into())
}
// Special cases:
if op == AssocOp::As {
let rhs = self.parse_ty()?;
- lhs = self.mk_expr(lhs_span.lo, rhs.span.hi,
- ExprKind::Cast(lhs, rhs), None);
+ let (lo, hi) = (lhs_span.lo, rhs.span.hi);
+ lhs = self.mk_expr(lo, hi, ExprKind::Cast(lhs, rhs), ThinVec::new());
continue
} else if op == AssocOp::Colon {
let rhs = self.parse_ty()?;
- lhs = self.mk_expr(lhs_span.lo, rhs.span.hi,
- ExprKind::Type(lhs, rhs), None);
+ let (lo, hi) = (lhs_span.lo, rhs.span.hi);
+ lhs = self.mk_expr(lo, hi, ExprKind::Type(lhs, rhs), ThinVec::new());
continue
} else if op == AssocOp::DotDot || op == AssocOp::DotDotDot {
// If we didn’t have to handle `x..`/`x...`, it would be pretty easy to
};
let r = try!(self.mk_range(Some(lhs), rhs, limits));
- lhs = self.mk_expr(lhs_span.lo, rhs_span.hi, r, None);
+ lhs = self.mk_expr(lhs_span.lo, rhs_span.hi, r, ThinVec::new());
break
}
}),
}?;
+ let (lo, hi) = (lhs_span.lo, rhs.span.hi);
lhs = match op {
AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide |
AssocOp::Modulus | AssocOp::LAnd | AssocOp::LOr | AssocOp::BitXor |
AssocOp::Equal | AssocOp::Less | AssocOp::LessEqual | AssocOp::NotEqual |
AssocOp::Greater | AssocOp::GreaterEqual => {
let ast_op = op.to_ast_binop().unwrap();
- let (lhs_span, rhs_span) = (lhs_span, rhs.span);
let binary = self.mk_binary(codemap::respan(cur_op_span, ast_op), lhs, rhs);
- self.mk_expr(lhs_span.lo, rhs_span.hi, binary, None)
+ self.mk_expr(lo, hi, binary, ThinVec::new())
}
AssocOp::Assign =>
- self.mk_expr(lhs_span.lo, rhs.span.hi, ExprKind::Assign(lhs, rhs), None),
+ self.mk_expr(lo, hi, ExprKind::Assign(lhs, rhs), ThinVec::new()),
AssocOp::Inplace =>
- self.mk_expr(lhs_span.lo, rhs.span.hi, ExprKind::InPlace(lhs, rhs), None),
+ self.mk_expr(lo, hi, ExprKind::InPlace(lhs, rhs), ThinVec::new()),
AssocOp::AssignOp(k) => {
let aop = match k {
token::Plus => BinOpKind::Add,
token::Shl => BinOpKind::Shl,
token::Shr => BinOpKind::Shr,
};
- let (lhs_span, rhs_span) = (lhs_span, rhs.span);
let aopexpr = self.mk_assign_op(codemap::respan(cur_op_span, aop), lhs, rhs);
- self.mk_expr(lhs_span.lo, rhs_span.hi, aopexpr, None)
+ self.mk_expr(lo, hi, aopexpr, ThinVec::new())
}
AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotDot => {
self.bug("As, Colon, DotDot or DotDotDot branch reached")
/// Parse prefix-forms of range notation: `..expr`, `..`, `...expr`
fn parse_prefix_range_expr(&mut self,
- already_parsed_attrs: Option<ThinAttributes>)
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> {
debug_assert!(self.token == token::DotDot || self.token == token::DotDotDot);
let tok = self.token.clone();
}
/// Parse an 'if' or 'if let' expression ('if' token already eaten)
- pub fn parse_if_expr(&mut self, attrs: ThinAttributes) -> PResult<'a, P<Expr>> {
+ pub fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
if self.check_keyword(keywords::Let) {
return self.parse_if_let_expr(attrs);
}
}
/// Parse an 'if let' expression ('if' token already eaten)
- pub fn parse_if_let_expr(&mut self, attrs: ThinAttributes)
+ pub fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> {
let lo = self.last_span.lo;
self.expect_keyword(keywords::Let)?;
pub fn parse_lambda_expr(&mut self,
lo: BytePos,
capture_clause: CaptureBy,
- attrs: ThinAttributes)
+ attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>>
{
let decl = self.parse_fn_block_decl()?;
let body_expr = self.parse_expr()?;
P(ast::Block {
id: ast::DUMMY_NODE_ID,
- stmts: vec![],
span: body_expr.span,
- expr: Some(body_expr),
+ stmts: vec![Stmt {
+ span: body_expr.span,
+ node: StmtKind::Expr(body_expr),
+ id: ast::DUMMY_NODE_ID,
+ }],
rules: BlockCheckMode::Default,
})
}
// `else` token already eaten
pub fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
if self.eat_keyword(keywords::If) {
- return self.parse_if_expr(None);
+ return self.parse_if_expr(ThinVec::new());
} else {
let blk = self.parse_block()?;
- return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), None));
+ return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprKind::Block(blk), ThinVec::new()));
}
}
/// Parse a 'for' .. 'in' expression ('for' token already eaten)
pub fn parse_for_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
span_lo: BytePos,
- attrs: ThinAttributes) -> PResult<'a, P<Expr>> {
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
let pat = self.parse_pat()?;
self.expect_keyword(keywords::In)?;
let expr = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?;
let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?;
- let attrs = attrs.append(iattrs.into_thin_attrs());
+ attrs.extend(iattrs);
let hi = self.last_span.hi;
/// Parse a 'while' or 'while let' expression ('while' token already eaten)
pub fn parse_while_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
span_lo: BytePos,
- attrs: ThinAttributes) -> PResult<'a, P<Expr>> {
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
if self.token.is_keyword(keywords::Let) {
return self.parse_while_let_expr(opt_ident, span_lo, attrs);
}
let cond = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
- let attrs = attrs.append(iattrs.into_thin_attrs());
+ attrs.extend(iattrs);
let hi = body.span.hi;
return Ok(self.mk_expr(span_lo, hi, ExprKind::While(cond, body, opt_ident),
attrs));
/// Parse a 'while let' expression ('while' token already eaten)
pub fn parse_while_let_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
span_lo: BytePos,
- attrs: ThinAttributes) -> PResult<'a, P<Expr>> {
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
self.expect_keyword(keywords::Let)?;
let pat = self.parse_pat()?;
self.expect(&token::Eq)?;
let expr = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
- let attrs = attrs.append(iattrs.into_thin_attrs());
+ attrs.extend(iattrs);
let hi = body.span.hi;
return Ok(self.mk_expr(span_lo, hi, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs));
}
// parse `loop {...}`, `loop` token already eaten
pub fn parse_loop_expr(&mut self, opt_ident: Option<ast::SpannedIdent>,
span_lo: BytePos,
- attrs: ThinAttributes) -> PResult<'a, P<Expr>> {
+ mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
- let attrs = attrs.append(iattrs.into_thin_attrs());
+ attrs.extend(iattrs);
let hi = body.span.hi;
Ok(self.mk_expr(span_lo, hi, ExprKind::Loop(body, opt_ident), attrs))
}
// `match` token already eaten
- fn parse_match_expr(&mut self, attrs: ThinAttributes) -> PResult<'a, P<Expr>> {
+ fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let match_span = self.last_span;
let lo = self.last_span.lo;
let discriminant = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL,
}
return Err(e)
}
- let attrs = attrs.append(
- self.parse_inner_attributes()?.into_thin_attrs());
+ attrs.extend(self.parse_inner_attributes()?);
+
let mut arms: Vec<Arm> = Vec::new();
while self.token != token::CloseDelim(token::Brace) {
match self.parse_arm() {
/// Parse an expression, subject to the given restrictions
pub fn parse_expr_res(&mut self, r: Restrictions,
- already_parsed_attrs: Option<ThinAttributes>)
+ already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> {
self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
}
}
/// Parse the fields of a struct-like pattern
- fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<codemap::Spanned<ast::FieldPat>> , bool)> {
+ fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<codemap::Spanned<ast::FieldPat>>, bool)> {
let mut fields = Vec::new();
let mut etc = false;
let mut first = true;
};
fields.push(codemap::Spanned { span: mk_sp(lo, hi),
- node: ast::FieldPat { ident: fieldname,
- pat: subpat,
- is_shorthand: is_shorthand }});
+ node: ast::FieldPat { ident: fieldname,
+ pat: subpat,
+ is_shorthand: is_shorthand }});
}
return Ok((fields, etc));
}
(None, self.parse_path(PathStyle::Expr)?)
};
let hi = self.last_span.hi;
- Ok(self.mk_expr(lo, hi, ExprKind::Path(qself, path), None))
+ Ok(self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new()))
} else {
self.parse_pat_literal_maybe_minus()
}
let tts = self.parse_seq_to_end(
&token::CloseDelim(delim),
SeqSep::none(), |p| p.parse_token_tree())?;
- let mac = Mac_ { path: path, tts: tts, ctxt: EMPTY_CTXT };
+ let mac = Mac_ { path: path, tts: tts };
pat = PatKind::Mac(codemap::Spanned {node: mac,
- span: mk_sp(lo, self.last_span.hi)});
+ span: mk_sp(lo, self.last_span.hi)});
} else {
// Parse ident @ pat
// This can give false positives and parse nullary enums,
token::DotDotDot => {
// Parse range
let hi = self.last_span.hi;
- let begin = self.mk_expr(lo, hi, ExprKind::Path(qself, path), None);
+ let begin =
+ self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new());
self.bump();
let end = self.parse_pat_range_end()?;
pat = PatKind::Range(begin, end);
pat = PatKind::TupleStruct(path, fields, ddpos)
}
_ => {
- pat = match qself {
- // Parse qualified path
- Some(qself) => PatKind::QPath(qself, path),
- // Parse nullary enum
- None => PatKind::Path(path)
- };
+ pat = PatKind::Path(qself, path);
}
}
}
}
/// Parse a local variable declaration
- fn parse_local(&mut self, attrs: ThinAttributes) -> PResult<'a, P<Local>> {
+ fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
let lo = self.span.lo;
let pat = self.parse_pat()?;
}))
}
- /// Parse a "let" stmt
- fn parse_let(&mut self, attrs: ThinAttributes) -> PResult<'a, P<Decl>> {
- let lo = self.span.lo;
- let local = self.parse_local(attrs)?;
- Ok(P(spanned(lo, self.last_span.hi, DeclKind::Local(local))))
- }
-
/// Parse a structure field
fn parse_name_and_ty(&mut self, pr: Visibility,
attrs: Vec<Attribute> ) -> PResult<'a, StructField> {
let attrs = self.parse_outer_attributes()?;
let lo = self.span.lo;
- Ok(Some(if self.check_keyword(keywords::Let) {
- self.expect_keyword(keywords::Let)?;
- let decl = self.parse_let(attrs.into_thin_attrs())?;
- let hi = decl.span.hi;
- let stmt = StmtKind::Decl(decl, ast::DUMMY_NODE_ID);
- spanned(lo, hi, stmt)
+ Ok(Some(if self.eat_keyword(keywords::Let) {
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Local(self.parse_local(attrs.into())?),
+ span: mk_sp(lo, self.last_span.hi),
+ }
} else if self.token.is_ident()
&& !self.token.is_any_keyword()
&& self.look_ahead(1, |t| *t == token::Not) {
};
if id.name == keywords::Invalid.name() {
- let mac = P(spanned(lo, hi, Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT }));
- let stmt = StmtKind::Mac(mac, style, attrs.into_thin_attrs());
- spanned(lo, hi, stmt)
+ let mac = spanned(lo, hi, Mac_ { path: pth, tts: tts });
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Mac(P((mac, style, attrs.into()))),
+ span: mk_sp(lo, hi),
+ }
} else {
// if it has a special ident, it's definitely an item
//
followed by a semicolon");
}
}
- spanned(lo, hi, StmtKind::Decl(
- P(spanned(lo, hi, DeclKind::Item(
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: mk_sp(lo, hi),
+ node: StmtKind::Item({
self.mk_item(
lo, hi, id /*id is good here*/,
- ItemKind::Mac(spanned(lo, hi,
- Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT })),
- Visibility::Inherited, attrs)))),
- ast::DUMMY_NODE_ID))
+ ItemKind::Mac(spanned(lo, hi, Mac_ { path: pth, tts: tts })),
+ Visibility::Inherited,
+ attrs)
+ }),
+ }
}
} else {
// FIXME: Bad copy of attrs
let restrictions = self.restrictions | Restrictions::NO_NONINLINE_MOD;
match self.with_res(restrictions,
|this| this.parse_item_(attrs.clone(), false, true))? {
- Some(i) => {
- let hi = i.span.hi;
- let decl = P(spanned(lo, hi, DeclKind::Item(i)));
- spanned(lo, hi, StmtKind::Decl(decl, ast::DUMMY_NODE_ID))
- }
+ Some(i) => Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: mk_sp(lo, i.span.hi),
+ node: StmtKind::Item(i),
+ },
None => {
let unused_attrs = |attrs: &[_], s: &mut Self| {
if attrs.len() > 0 {
// Remainder are line-expr stmts.
let e = self.parse_expr_res(
- Restrictions::RESTRICTION_STMT_EXPR, Some(attrs.into_thin_attrs()))?;
- let hi = e.span.hi;
- let stmt = StmtKind::Expr(e, ast::DUMMY_NODE_ID);
- spanned(lo, hi, stmt)
+ Restrictions::RESTRICTION_STMT_EXPR, Some(attrs.into()))?;
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ span: mk_sp(lo, e.span.hi),
+ node: StmtKind::Expr(e),
+ }
}
}
}))
/// Precondition: already parsed the '{'.
fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> PResult<'a, P<Block>> {
let mut stmts = vec![];
- let mut expr = None;
while !self.eat(&token::CloseDelim(token::Brace)) {
- let Spanned {node, span} = if let Some(s) = self.parse_stmt_() {
+ let Stmt {node, span, ..} = if let Some(s) = self.parse_stmt_() {
s
} else if self.token == token::Eof {
break;
// Found only `;` or `}`.
continue;
};
+
match node {
- StmtKind::Expr(e, _) => {
- self.handle_expression_like_statement(e, span, &mut stmts, &mut expr)?;
+ StmtKind::Expr(e) => {
+ self.handle_expression_like_statement(e, span, &mut stmts)?;
}
- StmtKind::Mac(mac, MacStmtStyle::NoBraces, attrs) => {
- // statement macro without braces; might be an
- // expr depending on whether a semicolon follows
- match self.token {
- token::Semi => {
- stmts.push(Spanned {
- node: StmtKind::Mac(mac, MacStmtStyle::Semicolon, attrs),
- span: mk_sp(span.lo, self.span.hi),
- });
- self.bump();
- }
- _ => {
- let e = self.mk_mac_expr(span.lo, span.hi,
- mac.and_then(|m| m.node),
- None);
- let lo = e.span.lo;
- let e = self.parse_dot_or_call_expr_with(e, lo, attrs)?;
- let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
- self.handle_expression_like_statement(
- e,
- span,
- &mut stmts,
- &mut expr)?;
- }
- }
- }
- StmtKind::Mac(m, style, attrs) => {
- // statement macro; might be an expr
- match self.token {
- token::Semi => {
- stmts.push(Spanned {
- node: StmtKind::Mac(m, MacStmtStyle::Semicolon, attrs),
- span: mk_sp(span.lo, self.span.hi),
- });
- self.bump();
- }
- token::CloseDelim(token::Brace) => {
- // if a block ends in `m!(arg)` without
- // a `;`, it must be an expr
- expr = Some(self.mk_mac_expr(span.lo, span.hi,
- m.and_then(|x| x.node),
- attrs));
- }
- _ => {
- stmts.push(Spanned {
- node: StmtKind::Mac(m, style, attrs),
- span: span
- });
- }
- }
+ StmtKind::Mac(mac) => {
+ self.handle_macro_in_block(mac.unwrap(), span, &mut stmts)?;
}
_ => { // all other kinds of statements:
let mut hi = span.hi;
hi = self.last_span.hi;
}
- stmts.push(Spanned {
+ stmts.push(Stmt {
+ id: ast::DUMMY_NODE_ID,
node: node,
span: mk_sp(span.lo, hi)
});
Ok(P(ast::Block {
stmts: stmts,
- expr: expr,
id: ast::DUMMY_NODE_ID,
rules: s,
span: mk_sp(lo, self.last_span.hi),
}))
}
+ fn handle_macro_in_block(&mut self,
+ (mac, style, attrs): (ast::Mac, MacStmtStyle, ThinVec<Attribute>),
+ span: Span,
+ stmts: &mut Vec<Stmt>)
+ -> PResult<'a, ()> {
+ if style == MacStmtStyle::NoBraces {
+ // statement macro without braces; might be an
+ // expr depending on whether a semicolon follows
+ match self.token {
+ token::Semi => {
+ stmts.push(Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Mac(P((mac, MacStmtStyle::Semicolon, attrs))),
+ span: mk_sp(span.lo, self.span.hi),
+ });
+ self.bump();
+ }
+ _ => {
+ let e = self.mk_mac_expr(span.lo, span.hi, mac.node, ThinVec::new());
+ let lo = e.span.lo;
+ let e = self.parse_dot_or_call_expr_with(e, lo, attrs)?;
+ let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
+ self.handle_expression_like_statement(e, span, stmts)?;
+ }
+ }
+ } else {
+ // statement macro; might be an expr
+ match self.token {
+ token::Semi => {
+ stmts.push(Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Mac(P((mac, MacStmtStyle::Semicolon, attrs))),
+ span: mk_sp(span.lo, self.span.hi),
+ });
+ self.bump();
+ }
+ _ => {
+ stmts.push(Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Mac(P((mac, style, attrs))),
+ span: span
+ });
+ }
+ }
+ }
+ Ok(())
+ }
+
fn handle_expression_like_statement(&mut self,
e: P<Expr>,
span: Span,
- stmts: &mut Vec<Stmt>,
- last_block_expr: &mut Option<P<Expr>>)
+ stmts: &mut Vec<Stmt>)
-> PResult<'a, ()> {
// expression without semicolon
if classify::expr_requires_semi_to_be_stmt(&e) {
hi: self.last_span.hi,
expn_id: span.expn_id,
};
- stmts.push(Spanned {
- node: StmtKind::Semi(e, ast::DUMMY_NODE_ID),
+ stmts.push(Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Semi(e),
span: span_with_semi,
});
}
- token::CloseDelim(token::Brace) => *last_block_expr = Some(e),
_ => {
- stmts.push(Spanned {
- node: StmtKind::Expr(e, ast::DUMMY_NODE_ID),
+ stmts.push(Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Expr(e),
span: span
});
}
let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
SeqSep::none(),
|p| p.parse_token_tree())?;
- let m_ = Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT };
+ let m_ = Mac_ { path: pth, tts: tts };
let m: ast::Mac = codemap::Spanned { node: m_,
- span: mk_sp(lo,
- self.last_span.hi) };
+ span: mk_sp(lo,
+ self.last_span.hi) };
if delim != token::Brace {
self.expect(&token::Semi)?
}
/// Parse trait Foo { ... }
fn parse_item_trait(&mut self, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
-
let ident = self.parse_ident()?;
let mut tps = self.parse_generics()?;
return Err(self.fatal(&format!("expected item, found `{}`", token_str)));
}
- let hi = if self.span == codemap::DUMMY_SP {
+ let hi = if self.span == syntax_pos::DUMMY_SP {
inner_lo
} else {
self.last_span.hi
SeqSep::none(),
|p| p.parse_token_tree())?;
// single-variant-enum... :
- let m = Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT };
+ let m = Mac_ { path: pth, tts: tts };
let m: ast::Mac = codemap::Spanned { node: m,
- span: mk_sp(mac_lo,
- self.last_span.hi) };
+ span: mk_sp(mac_lo,
+ self.last_span.hi) };
if delim != token::Brace {
if !self.eat(&token::Semi) {
use ptr::P;
use util::interner::{RcStr, StrInterner};
use util::interner;
+use tokenstream;
use serialize::{Decodable, Decoder, Encodable, Encoder};
use std::fmt;
/// Stuff inside brackets for attributes
NtMeta(P<ast::MetaItem>),
NtPath(Box<ast::Path>),
- NtTT(P<ast::TokenTree>), // needs P'ed to break a circularity
+ NtTT(P<tokenstream::TokenTree>), // needs P'ed to break a circularity
// These are not exposed to macros, but are used by quasiquote.
NtArm(ast::Arm),
NtImplItem(P<ast::ImplItem>),
pub use self::AnnNode::*;
use abi::{self, Abi};
-use ast::{self, TokenTree, BlockCheckMode, PatKind};
+use ast::{self, BlockCheckMode, PatKind};
use ast::{SelfKind, RegionTyParamBound, TraitTyParamBound, TraitBoundModifier};
use ast::Attribute;
-use attr::ThinAttributesExt;
use util::parser::AssocOp;
use attr;
use attr::{AttrMetaMethods, AttributeMethods};
-use codemap::{self, CodeMap, BytePos};
+use codemap::{self, CodeMap};
+use syntax_pos::{self, BytePos};
use errors;
use parse::token::{self, keywords, BinOpToken, Token, InternedString};
use parse::lexer::comments;
use print::pp::Breaks::{Consistent, Inconsistent};
use ptr::P;
use std_inject;
+use tokenstream::{self, TokenTree};
use std::ascii;
use std::io::{self, Write, Read};
to_string(|s| s.print_lifetime(e))
}
-pub fn tt_to_string(tt: &ast::TokenTree) -> String {
+pub fn tt_to_string(tt: &tokenstream::TokenTree) -> String {
to_string(|s| s.print_tt(tt))
}
-pub fn tts_to_string(tts: &[ast::TokenTree]) -> String {
+pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String {
to_string(|s| s.print_tts(tts))
}
self.end() // close the head-box
}
- pub fn bclose_(&mut self, span: codemap::Span,
+ pub fn bclose_(&mut self, span: syntax_pos::Span,
indented: usize) -> io::Result<()> {
self.bclose_maybe_open(span, indented, true)
}
- pub fn bclose_maybe_open(&mut self, span: codemap::Span,
+ pub fn bclose_maybe_open(&mut self, span: syntax_pos::Span,
indented: usize, close_box: bool) -> io::Result<()> {
try!(self.maybe_print_comment(span.hi));
try!(self.break_offset_if_not_bol(1, -(indented as isize)));
}
Ok(())
}
- pub fn bclose(&mut self, span: codemap::Span) -> io::Result<()> {
+ pub fn bclose(&mut self, span: syntax_pos::Span) -> io::Result<()> {
self.bclose_(span, INDENT_UNIT)
}
mut op: F,
mut get_span: G) -> io::Result<()> where
F: FnMut(&mut State, &T) -> io::Result<()>,
- G: FnMut(&T) -> codemap::Span,
+ G: FnMut(&T) -> syntax_pos::Span,
{
try!(self.rbox(0, b));
let len = elts.len();
pub fn print_enum_def(&mut self, enum_definition: &ast::EnumDef,
generics: &ast::Generics, ident: ast::Ident,
- span: codemap::Span,
+ span: syntax_pos::Span,
visibility: &ast::Visibility) -> io::Result<()> {
try!(self.head(&visibility_qualified(visibility, "enum")));
try!(self.print_ident(ident));
pub fn print_variants(&mut self,
variants: &[ast::Variant],
- span: codemap::Span) -> io::Result<()> {
+ span: syntax_pos::Span) -> io::Result<()> {
try!(self.bopen());
for v in variants {
try!(self.space_if_not_bol());
struct_def: &ast::VariantData,
generics: &ast::Generics,
ident: ast::Ident,
- span: codemap::Span,
+ span: syntax_pos::Span,
print_finalizer: bool) -> io::Result<()> {
try!(self.print_ident(ident));
try!(self.print_generics(generics));
/// appropriate macro, transcribe back into the grammar we just parsed from,
/// and then pretty-print the resulting AST nodes (so, e.g., we print
/// expression arguments as expressions). It can be done! I think.
- pub fn print_tt(&mut self, tt: &ast::TokenTree) -> io::Result<()> {
+ pub fn print_tt(&mut self, tt: &tokenstream::TokenTree) -> io::Result<()> {
match *tt {
TokenTree::Token(_, ref tk) => {
try!(word(&mut self.s, &token_to_string(tk)));
None => {},
}
match seq.op {
- ast::KleeneOp::ZeroOrMore => word(&mut self.s, "*"),
- ast::KleeneOp::OneOrMore => word(&mut self.s, "+"),
+ tokenstream::KleeneOp::ZeroOrMore => word(&mut self.s, "*"),
+ tokenstream::KleeneOp::OneOrMore => word(&mut self.s, "+"),
}
}
}
}
- pub fn print_tts(&mut self, tts: &[ast::TokenTree]) -> io::Result<()> {
+ pub fn print_tts(&mut self, tts: &[tokenstream::TokenTree]) -> io::Result<()> {
try!(self.ibox(0));
for (i, tt) in tts.iter().enumerate() {
if i != 0 {
try!(self.print_associated_type(ti.ident, Some(bounds),
default.as_ref().map(|ty| &**ty)));
}
+ ast::TraitItemKind::Macro(codemap::Spanned { ref node, .. }) => {
+ // code copied from ItemKind::Mac:
+ self.print_path(&node.path, false, 0)?;
+ word(&mut self.s, "! ")?;
+ self.cbox(INDENT_UNIT)?;
+ self.popen()?;
+ self.print_tts(&node.tts[..])?;
+ self.pclose()?;
+ word(&mut self.s, ";")?;
+ self.end()?
+ }
}
self.ann.post(self, NodeSubItem(ti.id))
}
pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> {
try!(self.maybe_print_comment(st.span.lo));
match st.node {
- ast::StmtKind::Decl(ref decl, _) => {
- try!(self.print_decl(&decl));
+ ast::StmtKind::Local(ref loc) => {
+ try!(self.print_outer_attributes(&loc.attrs));
+ try!(self.space_if_not_bol());
+ try!(self.ibox(INDENT_UNIT));
+ try!(self.word_nbsp("let"));
+
+ try!(self.ibox(INDENT_UNIT));
+ try!(self.print_local_decl(&loc));
+ try!(self.end());
+ if let Some(ref init) = loc.init {
+ try!(self.nbsp());
+ try!(self.word_space("="));
+ try!(self.print_expr(&init));
+ }
+ try!(word(&mut self.s, ";"));
+ self.end()?;
}
- ast::StmtKind::Expr(ref expr, _) => {
+ ast::StmtKind::Item(ref item) => self.print_item(&item)?,
+ ast::StmtKind::Expr(ref expr) => {
try!(self.space_if_not_bol());
try!(self.print_expr_outer_attr_style(&expr, false));
+ if parse::classify::expr_requires_semi_to_be_stmt(expr) {
+ try!(word(&mut self.s, ";"));
+ }
}
- ast::StmtKind::Semi(ref expr, _) => {
+ ast::StmtKind::Semi(ref expr) => {
try!(self.space_if_not_bol());
try!(self.print_expr_outer_attr_style(&expr, false));
try!(word(&mut self.s, ";"));
}
- ast::StmtKind::Mac(ref mac, style, ref attrs) => {
+ ast::StmtKind::Mac(ref mac) => {
+ let (ref mac, style, ref attrs) = **mac;
try!(self.space_if_not_bol());
- try!(self.print_outer_attributes(attrs.as_attr_slice()));
+ try!(self.print_outer_attributes(&attrs));
let delim = match style {
ast::MacStmtStyle::Braces => token::Brace,
_ => token::Paren
}
}
}
- if parse::classify::stmt_ends_with_semi(&st.node) {
- try!(word(&mut self.s, ";"));
- }
self.maybe_print_trailing_comment(st.span, None)
}
try!(self.print_inner_attributes(attrs));
- for st in &blk.stmts {
- try!(self.print_stmt(st));
- }
- match blk.expr {
- Some(ref expr) => {
- try!(self.space_if_not_bol());
- try!(self.print_expr_outer_attr_style(&expr, false));
- try!(self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi)));
+ for (i, st) in blk.stmts.iter().enumerate() {
+ match st.node {
+ ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
+ try!(self.space_if_not_bol());
+ try!(self.print_expr_outer_attr_style(&expr, false));
+ try!(self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi)));
+ }
+ _ => try!(self.print_stmt(st)),
}
- _ => ()
}
+
try!(self.bclose_maybe_open(blk.span, indented, close_box));
self.ann.post(self, NodeBlock(blk))
}
is_inline: bool) -> io::Result<()> {
try!(self.maybe_print_comment(expr.span.lo));
- let attrs = expr.attrs.as_attr_slice();
+ let attrs = &expr.attrs;
if is_inline {
try!(self.print_outer_attributes_inline(attrs));
} else {
_ => false
};
- if !default_return || !body.stmts.is_empty() || body.expr.is_none() {
- try!(self.print_block_unclosed(&body));
- } else {
- // we extract the block, so as not to create another set of boxes
- let i_expr = body.expr.as_ref().unwrap();
- match i_expr.node {
- ast::ExprKind::Block(ref blk) => {
- try!(self.print_block_unclosed_with_attrs(
- &blk,
- i_expr.attrs.as_attr_slice()));
- }
- _ => {
+ match body.stmts.last().map(|stmt| &stmt.node) {
+ Some(&ast::StmtKind::Expr(ref i_expr)) if default_return &&
+ body.stmts.len() == 1 => {
+ // we extract the block, so as not to create another set of boxes
+ if let ast::ExprKind::Block(ref blk) = i_expr.node {
+ try!(self.print_block_unclosed_with_attrs(&blk, &i_expr.attrs));
+ } else {
// this is a bare expression
try!(self.print_expr(&i_expr));
try!(self.end()); // need to close a box
}
}
+ _ => try!(self.print_block_unclosed(&body)),
}
+
// a box will be closed by print_expr, but we didn't want an overall
// wrapper so we closed the corresponding opening. so create an
// empty box to satisfy the close.
try!(space(&mut self.s));
}
}
- ast::ExprKind::Again(opt_ident) => {
+ ast::ExprKind::Continue(opt_ident) => {
try!(word(&mut self.s, "continue"));
try!(space(&mut self.s));
if let Some(ident) = opt_ident {
Ok(())
}
- pub fn print_decl(&mut self, decl: &ast::Decl) -> io::Result<()> {
- try!(self.maybe_print_comment(decl.span.lo));
- match decl.node {
- ast::DeclKind::Local(ref loc) => {
- try!(self.print_outer_attributes(loc.attrs.as_attr_slice()));
- try!(self.space_if_not_bol());
- try!(self.ibox(INDENT_UNIT));
- try!(self.word_nbsp("let"));
-
- try!(self.ibox(INDENT_UNIT));
- try!(self.print_local_decl(&loc));
- try!(self.end());
- if let Some(ref init) = loc.init {
- try!(self.nbsp());
- try!(self.word_space("="));
- try!(self.print_expr(&init));
- }
- self.end()
- }
- ast::DeclKind::Item(ref item) => self.print_item(&item)
- }
- }
-
pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> {
try!(word(&mut self.s, &ident.name.as_str()));
self.ann.post(self, NodeIdent(&ident))
}
try!(self.pclose());
}
- PatKind::Path(ref path) => {
+ PatKind::Path(None, ref path) => {
try!(self.print_path(path, true, 0));
}
- PatKind::QPath(ref qself, ref path) => {
+ PatKind::Path(Some(ref qself), ref path) => {
try!(self.print_qpath(path, qself, false));
}
PatKind::Struct(ref path, ref fields, etc) => {
self.end()
}
- pub fn maybe_print_trailing_comment(&mut self, span: codemap::Span,
+ pub fn maybe_print_trailing_comment(&mut self, span: syntax_pos::Span,
next_pos: Option<BytePos>)
-> io::Result<()> {
let cm = match self.cm {
use ast;
use codemap;
use parse::token;
+ use syntax_pos;
#[test]
fn test_fun_to_string() {
let decl = ast::FnDecl {
inputs: Vec::new(),
- output: ast::FunctionRetTy::Default(codemap::DUMMY_SP),
+ output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP),
variadic: false
};
let generics = ast::Generics::default();
fn test_variant_to_string() {
let ident = token::str_to_ident("principal_skinner");
- let var = codemap::respan(codemap::DUMMY_SP, ast::Variant_ {
+ let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ {
name: ident,
attrs: Vec::new(),
// making this up as I go.... ?
mode: Mode,
}
-impl<'a, 'v> Visitor<'v> for ShowSpanVisitor<'a> {
+impl<'a> Visitor for ShowSpanVisitor<'a> {
fn visit_expr(&mut self, e: &ast::Expr) {
if let Mode::Expression = self.mode {
self.span_diagnostic.span_warn(e.span, "expression");
use ast;
use attr;
-use codemap::{DUMMY_SP, Span, ExpnInfo, NameAndSpan, MacroAttribute};
-use codemap;
+use syntax_pos::{DUMMY_SP, Span};
+use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute};
use parse::token::{intern, InternedString, keywords};
use parse::{token, ParseSess};
use ptr::P;
#![allow(dead_code)]
#![allow(unused_imports)]
+
use self::HasTestSignature::*;
use std::iter;
use std::vec;
use attr::AttrMetaMethods;
use attr;
-use codemap::{DUMMY_SP, Span, ExpnInfo, NameAndSpan, MacroAttribute};
-use codemap;
+use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, FileMap, BytePos};
+use std::rc::Rc;
+
+use codemap::{self, CodeMap, ExpnInfo, NameAndSpan, MacroAttribute};
use errors;
+use errors::snippet::{RenderedLine, SnippetData};
use config;
use entry::{self, EntryPointType};
use ext::base::{ExtCtxt, DummyMacroLoader};
let main_attr = ecx.attribute(sp, main_meta);
// pub fn main() { ... }
let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![]));
- let main_body = ecx.block_all(sp, vec![call_test_main], None);
+ let main_body = ecx.block(sp, vec![call_test_main]);
let main = ast::ItemKind::Fn(ecx.fn_decl(vec![], main_ret_ty),
ast::Unsafety::Normal,
ast::Constness::NotConst,
mk_test_desc_and_fn_rec(cx, test)
}).collect()),
span: DUMMY_SP,
- attrs: None,
+ attrs: ast::ThinVec::new(),
})),
span: DUMMY_SP,
- attrs: None,
+ attrs: ast::ThinVec::new(),
})
}
--- /dev/null
+// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! # Token Trees
+//! TokenTrees are syntactic forms for dealing with tokens. The description below is
+//! more complete; in short a TokenTree is a single token, a delimited sequence of token
+//! trees, or a sequence with repetition for list splicing as part of macro expansion.
+
+use ast::{AttrStyle};
+use codemap::{Span};
+use ext::base;
+use ext::tt::macro_parser;
+use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
+use parse::lexer;
+use parse::token;
+
+/// A delimited sequence of token trees
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
+pub struct Delimited {
+ /// The type of delimiter
+ pub delim: token::DelimToken,
+ /// The span covering the opening delimiter
+ pub open_span: Span,
+ /// The delimited sequence of token trees
+ pub tts: Vec<TokenTree>,
+ /// The span covering the closing delimiter
+ pub close_span: Span,
+}
+
+impl Delimited {
+ /// Returns the opening delimiter as a token.
+ pub fn open_token(&self) -> token::Token {
+ token::OpenDelim(self.delim)
+ }
+
+ /// Returns the closing delimiter as a token.
+ pub fn close_token(&self) -> token::Token {
+ token::CloseDelim(self.delim)
+ }
+
+ /// Returns the opening delimiter as a token tree.
+ pub fn open_tt(&self) -> TokenTree {
+ TokenTree::Token(self.open_span, self.open_token())
+ }
+
+ /// Returns the closing delimiter as a token tree.
+ pub fn close_tt(&self) -> TokenTree {
+ TokenTree::Token(self.close_span, self.close_token())
+ }
+}
+
+/// A sequence of token trees
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
+pub struct SequenceRepetition {
+ /// The sequence of token trees
+ pub tts: Vec<TokenTree>,
+ /// The optional separator
+ pub separator: Option<token::Token>,
+ /// Whether the sequence can be repeated zero (*), or one or more times (+)
+ pub op: KleeneOp,
+ /// The number of `MatchNt`s that appear in the sequence (and subsequences)
+ pub num_captures: usize,
+}
+
+/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
+/// for token sequences.
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
+pub enum KleeneOp {
+ ZeroOrMore,
+ OneOrMore,
+}
+
+/// When the main rust parser encounters a syntax-extension invocation, it
+/// parses the arguments to the invocation as a token-tree. This is a very
+/// loose structure, such that all sorts of different AST-fragments can
+/// be passed to syntax extensions using a uniform type.
+///
+/// If the syntax extension is an MBE macro, it will attempt to match its
+/// LHS token tree against the provided token tree, and if it finds a
+/// match, will transcribe the RHS token tree, splicing in any captured
+/// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
+///
+/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
+/// Nothing special happens to misnamed or misplaced `SubstNt`s.
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
+pub enum TokenTree {
+ /// A single token
+ Token(Span, token::Token),
+ /// A delimited sequence of token trees
+ Delimited(Span, Delimited),
+
+ // This only makes sense in MBE macros.
+
+ /// A kleene-style repetition sequence with a span
+ // FIXME(eddyb) #12938 Use DST.
+ Sequence(Span, SequenceRepetition),
+}
+
+impl TokenTree {
+ pub fn len(&self) -> usize {
+ match *self {
+ TokenTree::Token(_, token::DocComment(name)) => {
+ match doc_comment_style(&name.as_str()) {
+ AttrStyle::Outer => 2,
+ AttrStyle::Inner => 3
+ }
+ }
+ TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
+ TokenTree::Token(_, token::MatchNt(..)) => 3,
+ TokenTree::Delimited(_, ref delimed) => {
+ delimed.tts.len() + 2
+ }
+ TokenTree::Sequence(_, ref seq) => {
+ seq.tts.len()
+ }
+ TokenTree::Token(..) => 0
+ }
+ }
+
+ pub fn get_tt(&self, index: usize) -> TokenTree {
+ match (self, index) {
+ (&TokenTree::Token(sp, token::DocComment(_)), 0) => {
+ TokenTree::Token(sp, token::Pound)
+ }
+ (&TokenTree::Token(sp, token::DocComment(name)), 1)
+ if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
+ TokenTree::Token(sp, token::Not)
+ }
+ (&TokenTree::Token(sp, token::DocComment(name)), _) => {
+ let stripped = strip_doc_comment_decoration(&name.as_str());
+
+ // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
+ // required to wrap the text.
+ let num_of_hashes = stripped.chars().scan(0, |cnt, x| {
+ *cnt = if x == '"' {
+ 1
+ } else if *cnt != 0 && x == '#' {
+ *cnt + 1
+ } else {
+ 0
+ };
+ Some(*cnt)
+ }).max().unwrap_or(0);
+
+ TokenTree::Delimited(sp, Delimited {
+ delim: token::Bracket,
+ open_span: sp,
+ tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
+ TokenTree::Token(sp, token::Eq),
+ TokenTree::Token(sp, token::Literal(
+ token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
+ close_span: sp,
+ })
+ }
+ (&TokenTree::Delimited(_, ref delimed), _) => {
+ if index == 0 {
+ return delimed.open_tt();
+ }
+ if index == delimed.tts.len() + 1 {
+ return delimed.close_tt();
+ }
+ delimed.tts[index - 1].clone()
+ }
+ (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
+ let v = [TokenTree::Token(sp, token::Dollar),
+ TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))];
+ v[index].clone()
+ }
+ (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
+ let v = [TokenTree::Token(sp, token::SubstNt(name)),
+ TokenTree::Token(sp, token::Colon),
+ TokenTree::Token(sp, token::Ident(kind))];
+ v[index].clone()
+ }
+ (&TokenTree::Sequence(_, ref seq), _) => {
+ seq.tts[index].clone()
+ }
+ _ => panic!("Cannot expand a token tree")
+ }
+ }
+
+ /// Returns the `Span` corresponding to this token tree.
+ pub fn get_span(&self) -> Span {
+ match *self {
+ TokenTree::Token(span, _) => span,
+ TokenTree::Delimited(span, _) => span,
+ TokenTree::Sequence(span, _) => span,
+ }
+ }
+
+ /// Use this token tree as a matcher to parse given tts.
+ pub fn parse(cx: &base::ExtCtxt, mtch: &[TokenTree], tts: &[TokenTree])
+ -> macro_parser::NamedParseResult {
+ // `None` is because we're not interpolating
+ let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
+ None,
+ None,
+ tts.iter().cloned().collect(),
+ true);
+ macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch)
+ }
+}
+
use visit::*;
use ast::*;
-use codemap::Span;
+use syntax_pos::Span;
pub struct NodeCounter {
pub count: usize,
}
}
-impl<'v> Visitor<'v> for NodeCounter {
+impl Visitor for NodeCounter {
fn visit_ident(&mut self, span: Span, ident: Ident) {
self.count += 1;
walk_ident(self, span, ident);
}
- fn visit_mod(&mut self, m: &'v Mod, _s: Span, _n: NodeId) {
+ fn visit_mod(&mut self, m: &Mod, _s: Span, _n: NodeId) {
self.count += 1;
walk_mod(self, m)
}
- fn visit_foreign_item(&mut self, i: &'v ForeignItem) {
+ fn visit_foreign_item(&mut self, i: &ForeignItem) {
self.count += 1;
walk_foreign_item(self, i)
}
- fn visit_item(&mut self, i: &'v Item) {
+ fn visit_item(&mut self, i: &Item) {
self.count += 1;
walk_item(self, i)
}
- fn visit_local(&mut self, l: &'v Local) {
+ fn visit_local(&mut self, l: &Local) {
self.count += 1;
walk_local(self, l)
}
- fn visit_block(&mut self, b: &'v Block) {
+ fn visit_block(&mut self, b: &Block) {
self.count += 1;
walk_block(self, b)
}
- fn visit_stmt(&mut self, s: &'v Stmt) {
+ fn visit_stmt(&mut self, s: &Stmt) {
self.count += 1;
walk_stmt(self, s)
}
- fn visit_arm(&mut self, a: &'v Arm) {
+ fn visit_arm(&mut self, a: &Arm) {
self.count += 1;
walk_arm(self, a)
}
- fn visit_pat(&mut self, p: &'v Pat) {
+ fn visit_pat(&mut self, p: &Pat) {
self.count += 1;
walk_pat(self, p)
}
- fn visit_decl(&mut self, d: &'v Decl) {
- self.count += 1;
- walk_decl(self, d)
- }
- fn visit_expr(&mut self, ex: &'v Expr) {
+ fn visit_expr(&mut self, ex: &Expr) {
self.count += 1;
walk_expr(self, ex)
}
- fn visit_ty(&mut self, t: &'v Ty) {
+ fn visit_ty(&mut self, t: &Ty) {
self.count += 1;
walk_ty(self, t)
}
- fn visit_generics(&mut self, g: &'v Generics) {
+ fn visit_generics(&mut self, g: &Generics) {
self.count += 1;
walk_generics(self, g)
}
- fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: &'v Block, s: Span, _: NodeId) {
+ fn visit_fn(&mut self, fk: FnKind, fd: &FnDecl, b: &Block, s: Span, _: NodeId) {
self.count += 1;
walk_fn(self, fk, fd, b, s)
}
- fn visit_trait_item(&mut self, ti: &'v TraitItem) {
+ fn visit_trait_item(&mut self, ti: &TraitItem) {
self.count += 1;
walk_trait_item(self, ti)
}
- fn visit_impl_item(&mut self, ii: &'v ImplItem) {
+ fn visit_impl_item(&mut self, ii: &ImplItem) {
self.count += 1;
walk_impl_item(self, ii)
}
- fn visit_trait_ref(&mut self, t: &'v TraitRef) {
+ fn visit_trait_ref(&mut self, t: &TraitRef) {
self.count += 1;
walk_trait_ref(self, t)
}
- fn visit_ty_param_bound(&mut self, bounds: &'v TyParamBound) {
+ fn visit_ty_param_bound(&mut self, bounds: &TyParamBound) {
self.count += 1;
walk_ty_param_bound(self, bounds)
}
- fn visit_poly_trait_ref(&mut self, t: &'v PolyTraitRef, m: &'v TraitBoundModifier) {
+ fn visit_poly_trait_ref(&mut self, t: &PolyTraitRef, m: &TraitBoundModifier) {
self.count += 1;
walk_poly_trait_ref(self, t, m)
}
- fn visit_variant_data(&mut self, s: &'v VariantData, _: Ident,
- _: &'v Generics, _: NodeId, _: Span) {
+ fn visit_variant_data(&mut self, s: &VariantData, _: Ident,
+ _: &Generics, _: NodeId, _: Span) {
self.count += 1;
walk_struct_def(self, s)
}
- fn visit_struct_field(&mut self, s: &'v StructField) {
+ fn visit_struct_field(&mut self, s: &StructField) {
self.count += 1;
walk_struct_field(self, s)
}
- fn visit_enum_def(&mut self, enum_definition: &'v EnumDef,
- generics: &'v Generics, item_id: NodeId, _: Span) {
+ fn visit_enum_def(&mut self, enum_definition: &EnumDef,
+ generics: &Generics, item_id: NodeId, _: Span) {
self.count += 1;
walk_enum_def(self, enum_definition, generics, item_id)
}
- fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics, item_id: NodeId) {
+ fn visit_variant(&mut self, v: &Variant, g: &Generics, item_id: NodeId) {
self.count += 1;
walk_variant(self, v, g, item_id)
}
- fn visit_lifetime(&mut self, lifetime: &'v Lifetime) {
+ fn visit_lifetime(&mut self, lifetime: &Lifetime) {
self.count += 1;
walk_lifetime(self, lifetime)
}
- fn visit_lifetime_def(&mut self, lifetime: &'v LifetimeDef) {
+ fn visit_lifetime_def(&mut self, lifetime: &LifetimeDef) {
self.count += 1;
walk_lifetime_def(self, lifetime)
}
- fn visit_mac(&mut self, _mac: &'v Mac) {
+ fn visit_mac(&mut self, _mac: &Mac) {
self.count += 1;
walk_mac(self, _mac)
}
- fn visit_path(&mut self, path: &'v Path, _id: NodeId) {
+ fn visit_path(&mut self, path: &Path, _id: NodeId) {
self.count += 1;
walk_path(self, path)
}
- fn visit_path_list_item(&mut self, prefix: &'v Path, item: &'v PathListItem) {
+ fn visit_path_list_item(&mut self, prefix: &Path, item: &PathListItem) {
self.count += 1;
walk_path_list_item(self, prefix, item)
}
- fn visit_path_parameters(&mut self, path_span: Span, path_parameters: &'v PathParameters) {
+ fn visit_path_parameters(&mut self, path_span: Span, path_parameters: &PathParameters) {
self.count += 1;
walk_path_parameters(self, path_span, path_parameters)
}
- fn visit_assoc_type_binding(&mut self, type_binding: &'v TypeBinding) {
+ fn visit_assoc_type_binding(&mut self, type_binding: &TypeBinding) {
self.count += 1;
walk_assoc_type_binding(self, type_binding)
}
- fn visit_attribute(&mut self, _attr: &'v Attribute) {
+ fn visit_attribute(&mut self, _attr: &Attribute) {
self.count += 1;
}
- fn visit_macro_def(&mut self, macro_def: &'v MacroDef) {
+ fn visit_macro_def(&mut self, macro_def: &MacroDef) {
self.count += 1;
walk_macro_def(self, macro_def)
}
use parse::parser::Parser;
use parse::token;
use ptr::P;
+use tokenstream;
use std::iter::Peekable;
/// Map a string to tts, using a made-up filename:
-pub fn string_to_tts(source_str: String) -> Vec<ast::TokenTree> {
+pub fn string_to_tts(source_str: String) -> Vec<tokenstream::TokenTree> {
let ps = ParseSess::new();
filemap_to_tts(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str))
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// A vector type optimized for cases where this size is usually 0 (c.f. `SmallVector`).
+/// The `Option<Box<..>>` wrapping allows us to represent a zero sized vector with `None`,
+/// which uses only a single (null) pointer.
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
+pub struct ThinVec<T>(Option<Box<Vec<T>>>);
+
+impl<T> ThinVec<T> {
+ pub fn new() -> Self {
+ ThinVec(None)
+ }
+}
+
+impl<T> From<Vec<T>> for ThinVec<T> {
+ fn from(vec: Vec<T>) -> Self {
+ if vec.is_empty() {
+ ThinVec(None)
+ } else {
+ ThinVec(Some(Box::new(vec)))
+ }
+ }
+}
+
+impl<T> Into<Vec<T>> for ThinVec<T> {
+ fn into(self) -> Vec<T> {
+ match self {
+ ThinVec(None) => Vec::new(),
+ ThinVec(Some(vec)) => *vec,
+ }
+ }
+}
+
+impl<T> ::std::ops::Deref for ThinVec<T> {
+ type Target = [T];
+ fn deref(&self) -> &[T] {
+ match *self {
+ ThinVec(None) => &[],
+ ThinVec(Some(ref vec)) => vec,
+ }
+ }
+}
+
+impl<T> Extend<T> for ThinVec<T> {
+ fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ match *self {
+ ThinVec(Some(ref mut vec)) => vec.extend(iter),
+ ThinVec(None) => *self = iter.into_iter().collect::<Vec<_>>().into(),
+ }
+ }
+}
use abi::Abi;
use ast::*;
-use attr::ThinAttributesExt;
-use codemap::{Span, Spanned};
+use syntax_pos::Span;
+use codemap::Spanned;
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum FnKind<'a> {
/// explicitly, you need to override each method. (And you also need
/// to monitor future changes to `Visitor` in case a new method with a
/// new default implementation gets introduced.)
-pub trait Visitor<'v> : Sized {
+pub trait Visitor: Sized {
fn visit_name(&mut self, _span: Span, _name: Name) {
// Nothing to do.
}
fn visit_ident(&mut self, span: Span, ident: Ident) {
walk_ident(self, span, ident);
}
- fn visit_mod(&mut self, m: &'v Mod, _s: Span, _n: NodeId) { walk_mod(self, m) }
- fn visit_foreign_item(&mut self, i: &'v ForeignItem) { walk_foreign_item(self, i) }
- fn visit_item(&mut self, i: &'v Item) { walk_item(self, i) }
- fn visit_local(&mut self, l: &'v Local) { walk_local(self, l) }
- fn visit_block(&mut self, b: &'v Block) { walk_block(self, b) }
- fn visit_stmt(&mut self, s: &'v Stmt) { walk_stmt(self, s) }
- fn visit_arm(&mut self, a: &'v Arm) { walk_arm(self, a) }
- fn visit_pat(&mut self, p: &'v Pat) { walk_pat(self, p) }
- fn visit_decl(&mut self, d: &'v Decl) { walk_decl(self, d) }
- fn visit_expr(&mut self, ex: &'v Expr) { walk_expr(self, ex) }
- fn visit_expr_post(&mut self, _ex: &'v Expr) { }
- fn visit_ty(&mut self, t: &'v Ty) { walk_ty(self, t) }
- fn visit_generics(&mut self, g: &'v Generics) { walk_generics(self, g) }
- fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: &'v Block, s: Span, _: NodeId) {
+ fn visit_mod(&mut self, m: &Mod, _s: Span, _n: NodeId) { walk_mod(self, m) }
+ fn visit_foreign_item(&mut self, i: &ForeignItem) { walk_foreign_item(self, i) }
+ fn visit_item(&mut self, i: &Item) { walk_item(self, i) }
+ fn visit_local(&mut self, l: &Local) { walk_local(self, l) }
+ fn visit_block(&mut self, b: &Block) { walk_block(self, b) }
+ fn visit_stmt(&mut self, s: &Stmt) { walk_stmt(self, s) }
+ fn visit_arm(&mut self, a: &Arm) { walk_arm(self, a) }
+ fn visit_pat(&mut self, p: &Pat) { walk_pat(self, p) }
+ fn visit_expr(&mut self, ex: &Expr) { walk_expr(self, ex) }
+ fn visit_expr_post(&mut self, _ex: &Expr) { }
+ fn visit_ty(&mut self, t: &Ty) { walk_ty(self, t) }
+ fn visit_generics(&mut self, g: &Generics) { walk_generics(self, g) }
+ fn visit_fn(&mut self, fk: FnKind, fd: &FnDecl, b: &Block, s: Span, _: NodeId) {
walk_fn(self, fk, fd, b, s)
}
- fn visit_trait_item(&mut self, ti: &'v TraitItem) { walk_trait_item(self, ti) }
- fn visit_impl_item(&mut self, ii: &'v ImplItem) { walk_impl_item(self, ii) }
- fn visit_trait_ref(&mut self, t: &'v TraitRef) { walk_trait_ref(self, t) }
- fn visit_ty_param_bound(&mut self, bounds: &'v TyParamBound) {
+ fn visit_trait_item(&mut self, ti: &TraitItem) { walk_trait_item(self, ti) }
+ fn visit_impl_item(&mut self, ii: &ImplItem) { walk_impl_item(self, ii) }
+ fn visit_trait_ref(&mut self, t: &TraitRef) { walk_trait_ref(self, t) }
+ fn visit_ty_param_bound(&mut self, bounds: &TyParamBound) {
walk_ty_param_bound(self, bounds)
}
- fn visit_poly_trait_ref(&mut self, t: &'v PolyTraitRef, m: &'v TraitBoundModifier) {
+ fn visit_poly_trait_ref(&mut self, t: &PolyTraitRef, m: &TraitBoundModifier) {
walk_poly_trait_ref(self, t, m)
}
- fn visit_variant_data(&mut self, s: &'v VariantData, _: Ident,
- _: &'v Generics, _: NodeId, _: Span) {
+ fn visit_variant_data(&mut self, s: &VariantData, _: Ident,
+ _: &Generics, _: NodeId, _: Span) {
walk_struct_def(self, s)
}
- fn visit_struct_field(&mut self, s: &'v StructField) { walk_struct_field(self, s) }
- fn visit_enum_def(&mut self, enum_definition: &'v EnumDef,
- generics: &'v Generics, item_id: NodeId, _: Span) {
+ fn visit_struct_field(&mut self, s: &StructField) { walk_struct_field(self, s) }
+ fn visit_enum_def(&mut self, enum_definition: &EnumDef,
+ generics: &Generics, item_id: NodeId, _: Span) {
walk_enum_def(self, enum_definition, generics, item_id)
}
- fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics, item_id: NodeId) {
+ fn visit_variant(&mut self, v: &Variant, g: &Generics, item_id: NodeId) {
walk_variant(self, v, g, item_id)
}
- fn visit_lifetime(&mut self, lifetime: &'v Lifetime) {
+ fn visit_lifetime(&mut self, lifetime: &Lifetime) {
walk_lifetime(self, lifetime)
}
- fn visit_lifetime_def(&mut self, lifetime: &'v LifetimeDef) {
+ fn visit_lifetime_def(&mut self, lifetime: &LifetimeDef) {
walk_lifetime_def(self, lifetime)
}
- fn visit_mac(&mut self, _mac: &'v Mac) {
+ fn visit_mac(&mut self, _mac: &Mac) {
panic!("visit_mac disabled by default");
// NB: see note about macros above.
// if you really want a visitor that
// definition in your trait impl:
// visit::walk_mac(self, _mac)
}
- fn visit_path(&mut self, path: &'v Path, _id: NodeId) {
+ fn visit_path(&mut self, path: &Path, _id: NodeId) {
walk_path(self, path)
}
- fn visit_path_list_item(&mut self, prefix: &'v Path, item: &'v PathListItem) {
+ fn visit_path_list_item(&mut self, prefix: &Path, item: &PathListItem) {
walk_path_list_item(self, prefix, item)
}
- fn visit_path_segment(&mut self, path_span: Span, path_segment: &'v PathSegment) {
+ fn visit_path_segment(&mut self, path_span: Span, path_segment: &PathSegment) {
walk_path_segment(self, path_span, path_segment)
}
- fn visit_path_parameters(&mut self, path_span: Span, path_parameters: &'v PathParameters) {
+ fn visit_path_parameters(&mut self, path_span: Span, path_parameters: &PathParameters) {
walk_path_parameters(self, path_span, path_parameters)
}
- fn visit_assoc_type_binding(&mut self, type_binding: &'v TypeBinding) {
+ fn visit_assoc_type_binding(&mut self, type_binding: &TypeBinding) {
walk_assoc_type_binding(self, type_binding)
}
- fn visit_attribute(&mut self, _attr: &'v Attribute) {}
- fn visit_macro_def(&mut self, macro_def: &'v MacroDef) {
+ fn visit_attribute(&mut self, _attr: &Attribute) {}
+ fn visit_macro_def(&mut self, macro_def: &MacroDef) {
walk_macro_def(self, macro_def)
}
- fn visit_vis(&mut self, vis: &'v Visibility) {
+ fn visit_vis(&mut self, vis: &Visibility) {
walk_vis(self, vis)
}
}
}
}
-pub fn walk_opt_name<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, opt_name: Option<Name>) {
+pub fn walk_opt_name<V: Visitor>(visitor: &mut V, span: Span, opt_name: Option<Name>) {
if let Some(name) = opt_name {
visitor.visit_name(span, name);
}
}
-pub fn walk_opt_ident<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, opt_ident: Option<Ident>) {
+pub fn walk_opt_ident<V: Visitor>(visitor: &mut V, span: Span, opt_ident: Option<Ident>) {
if let Some(ident) = opt_ident {
visitor.visit_ident(span, ident);
}
}
-pub fn walk_opt_sp_ident<'v, V: Visitor<'v>>(visitor: &mut V,
- opt_sp_ident: &Option<Spanned<Ident>>) {
+pub fn walk_opt_sp_ident<V: Visitor>(visitor: &mut V, opt_sp_ident: &Option<Spanned<Ident>>) {
if let Some(ref sp_ident) = *opt_sp_ident {
visitor.visit_ident(sp_ident.span, sp_ident.node);
}
}
-pub fn walk_ident<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, ident: Ident) {
+pub fn walk_ident<V: Visitor>(visitor: &mut V, span: Span, ident: Ident) {
visitor.visit_name(span, ident.name);
}
-pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) {
+pub fn walk_crate<V: Visitor>(visitor: &mut V, krate: &Crate) {
visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID);
walk_list!(visitor, visit_attribute, &krate.attrs);
walk_list!(visitor, visit_macro_def, &krate.exported_macros);
}
-pub fn walk_macro_def<'v, V: Visitor<'v>>(visitor: &mut V, macro_def: &'v MacroDef) {
+pub fn walk_macro_def<V: Visitor>(visitor: &mut V, macro_def: &MacroDef) {
visitor.visit_ident(macro_def.span, macro_def.ident);
walk_opt_ident(visitor, macro_def.span, macro_def.imported_from);
walk_list!(visitor, visit_attribute, ¯o_def.attrs);
}
-pub fn walk_mod<'v, V: Visitor<'v>>(visitor: &mut V, module: &'v Mod) {
+pub fn walk_mod<V: Visitor>(visitor: &mut V, module: &Mod) {
walk_list!(visitor, visit_item, &module.items);
}
-pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local) {
- for attr in local.attrs.as_attr_slice() {
+pub fn walk_local<V: Visitor>(visitor: &mut V, local: &Local) {
+ for attr in local.attrs.iter() {
visitor.visit_attribute(attr);
}
visitor.visit_pat(&local.pat);
walk_list!(visitor, visit_expr, &local.init);
}
-pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
+pub fn walk_lifetime<V: Visitor>(visitor: &mut V, lifetime: &Lifetime) {
visitor.visit_name(lifetime.span, lifetime.name);
}
-pub fn walk_lifetime_def<'v, V: Visitor<'v>>(visitor: &mut V,
- lifetime_def: &'v LifetimeDef) {
+pub fn walk_lifetime_def<V: Visitor>(visitor: &mut V, lifetime_def: &LifetimeDef) {
visitor.visit_lifetime(&lifetime_def.lifetime);
walk_list!(visitor, visit_lifetime, &lifetime_def.bounds);
}
-pub fn walk_poly_trait_ref<'v, V>(visitor: &mut V,
- trait_ref: &'v PolyTraitRef,
- _modifier: &'v TraitBoundModifier)
- where V: Visitor<'v>
+pub fn walk_poly_trait_ref<V>(visitor: &mut V, trait_ref: &PolyTraitRef, _: &TraitBoundModifier)
+ where V: Visitor,
{
walk_list!(visitor, visit_lifetime_def, &trait_ref.bound_lifetimes);
visitor.visit_trait_ref(&trait_ref.trait_ref);
}
-pub fn walk_trait_ref<'v,V>(visitor: &mut V,
- trait_ref: &'v TraitRef)
- where V: Visitor<'v>
-{
+pub fn walk_trait_ref<V: Visitor>(visitor: &mut V, trait_ref: &TraitRef) {
visitor.visit_path(&trait_ref.path, trait_ref.ref_id)
}
-pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {
+pub fn walk_item<V: Visitor>(visitor: &mut V, item: &Item) {
visitor.visit_vis(&item.vis);
visitor.visit_ident(item.span, item.ident);
match item.node {
walk_list!(visitor, visit_attribute, &item.attrs);
}
-pub fn walk_enum_def<'v, V: Visitor<'v>>(visitor: &mut V,
- enum_definition: &'v EnumDef,
- generics: &'v Generics,
- item_id: NodeId) {
+pub fn walk_enum_def<V: Visitor>(visitor: &mut V,
+ enum_definition: &EnumDef,
+ generics: &Generics,
+ item_id: NodeId) {
walk_list!(visitor, visit_variant, &enum_definition.variants, generics, item_id);
}
-pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V,
- variant: &'v Variant,
- generics: &'v Generics,
- item_id: NodeId) {
+pub fn walk_variant<V>(visitor: &mut V, variant: &Variant, generics: &Generics, item_id: NodeId)
+ where V: Visitor,
+{
visitor.visit_ident(variant.span, variant.node.name);
visitor.visit_variant_data(&variant.node.data, variant.node.name,
generics, item_id, variant.span);
walk_list!(visitor, visit_attribute, &variant.node.attrs);
}
-pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) {
+pub fn walk_ty<V: Visitor>(visitor: &mut V, typ: &Ty) {
match typ.node {
TyKind::Vec(ref ty) | TyKind::Paren(ref ty) => {
visitor.visit_ty(ty)
}
}
-pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) {
+pub fn walk_path<V: Visitor>(visitor: &mut V, path: &Path) {
for segment in &path.segments {
visitor.visit_path_segment(path.span, segment);
}
}
-pub fn walk_path_list_item<'v, V: Visitor<'v>>(visitor: &mut V, _prefix: &'v Path,
- item: &'v PathListItem) {
+pub fn walk_path_list_item<V: Visitor>(visitor: &mut V, _prefix: &Path, item: &PathListItem) {
walk_opt_ident(visitor, item.span, item.node.name());
walk_opt_ident(visitor, item.span, item.node.rename());
}
-pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V,
- path_span: Span,
- segment: &'v PathSegment) {
+pub fn walk_path_segment<V: Visitor>(visitor: &mut V, path_span: Span, segment: &PathSegment) {
visitor.visit_ident(path_span, segment.identifier);
visitor.visit_path_parameters(path_span, &segment.parameters);
}
-pub fn walk_path_parameters<'v, V: Visitor<'v>>(visitor: &mut V,
- _path_span: Span,
- path_parameters: &'v PathParameters) {
+pub fn walk_path_parameters<V>(visitor: &mut V, _path_span: Span, path_parameters: &PathParameters)
+ where V: Visitor,
+{
match *path_parameters {
PathParameters::AngleBracketed(ref data) => {
walk_list!(visitor, visit_ty, &data.types);
}
}
-pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V,
- type_binding: &'v TypeBinding) {
+pub fn walk_assoc_type_binding<V: Visitor>(visitor: &mut V, type_binding: &TypeBinding) {
visitor.visit_ident(type_binding.span, type_binding.ident);
visitor.visit_ty(&type_binding.ty);
}
-pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) {
+pub fn walk_pat<V: Visitor>(visitor: &mut V, pattern: &Pat) {
match pattern.node {
PatKind::TupleStruct(ref path, ref children, _) => {
visitor.visit_path(path, pattern.id);
walk_list!(visitor, visit_pat, children);
}
- PatKind::Path(ref path) => {
- visitor.visit_path(path, pattern.id);
- }
- PatKind::QPath(ref qself, ref path) => {
- visitor.visit_ty(&qself.ty);
+ PatKind::Path(ref opt_qself, ref path) => {
+ if let Some(ref qself) = *opt_qself {
+ visitor.visit_ty(&qself.ty);
+ }
visitor.visit_path(path, pattern.id)
}
PatKind::Struct(ref path, ref fields, _) => {
}
}
-pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V,
- foreign_item: &'v ForeignItem) {
+pub fn walk_foreign_item<V: Visitor>(visitor: &mut V, foreign_item: &ForeignItem) {
visitor.visit_vis(&foreign_item.vis);
visitor.visit_ident(foreign_item.span, foreign_item.ident);
walk_list!(visitor, visit_attribute, &foreign_item.attrs);
}
-pub fn walk_ty_param_bound<'v, V: Visitor<'v>>(visitor: &mut V,
- bound: &'v TyParamBound) {
+pub fn walk_ty_param_bound<V: Visitor>(visitor: &mut V, bound: &TyParamBound) {
match *bound {
TraitTyParamBound(ref typ, ref modifier) => {
visitor.visit_poly_trait_ref(typ, modifier);
}
}
-pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics) {
+pub fn walk_generics<V: Visitor>(visitor: &mut V, generics: &Generics) {
for param in &generics.ty_params {
visitor.visit_ident(param.span, param.ident);
walk_list!(visitor, visit_ty_param_bound, ¶m.bounds);
}
}
-pub fn walk_fn_ret_ty<'v, V: Visitor<'v>>(visitor: &mut V, ret_ty: &'v FunctionRetTy) {
+pub fn walk_fn_ret_ty<V: Visitor>(visitor: &mut V, ret_ty: &FunctionRetTy) {
if let FunctionRetTy::Ty(ref output_ty) = *ret_ty {
visitor.visit_ty(output_ty)
}
}
-pub fn walk_fn_decl<'v, V: Visitor<'v>>(visitor: &mut V, function_declaration: &'v FnDecl) {
+pub fn walk_fn_decl<V: Visitor>(visitor: &mut V, function_declaration: &FnDecl) {
for argument in &function_declaration.inputs {
visitor.visit_pat(&argument.pat);
visitor.visit_ty(&argument.ty)
walk_fn_ret_ty(visitor, &function_declaration.output)
}
-pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V,
- function_kind: FnKind<'v>) {
+pub fn walk_fn_kind<V: Visitor>(visitor: &mut V, function_kind: FnKind) {
match function_kind {
FnKind::ItemFn(_, generics, _, _, _, _) => {
visitor.visit_generics(generics);
}
}
-pub fn walk_fn<'v, V: Visitor<'v>>(visitor: &mut V,
- function_kind: FnKind<'v>,
- function_declaration: &'v FnDecl,
- function_body: &'v Block,
- _span: Span) {
- walk_fn_decl(visitor, function_declaration);
- walk_fn_kind(visitor, function_kind);
- visitor.visit_block(function_body)
+pub fn walk_fn<V>(visitor: &mut V, kind: FnKind, declaration: &FnDecl, body: &Block, _span: Span)
+ where V: Visitor,
+{
+ walk_fn_decl(visitor, declaration);
+ walk_fn_kind(visitor, kind);
+ visitor.visit_block(body)
}
-pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem) {
+pub fn walk_trait_item<V: Visitor>(visitor: &mut V, trait_item: &TraitItem) {
visitor.visit_ident(trait_item.span, trait_item.ident);
walk_list!(visitor, visit_attribute, &trait_item.attrs);
match trait_item.node {
walk_list!(visitor, visit_ty_param_bound, bounds);
walk_list!(visitor, visit_ty, default);
}
+ TraitItemKind::Macro(ref mac) => {
+ visitor.visit_mac(mac);
+ }
}
}
-pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplItem) {
+pub fn walk_impl_item<V: Visitor>(visitor: &mut V, impl_item: &ImplItem) {
visitor.visit_vis(&impl_item.vis);
visitor.visit_ident(impl_item.span, impl_item.ident);
walk_list!(visitor, visit_attribute, &impl_item.attrs);
}
}
-pub fn walk_struct_def<'v, V: Visitor<'v>>(visitor: &mut V,
- struct_definition: &'v VariantData) {
+pub fn walk_struct_def<V: Visitor>(visitor: &mut V, struct_definition: &VariantData) {
walk_list!(visitor, visit_struct_field, struct_definition.fields());
}
-pub fn walk_struct_field<'v, V: Visitor<'v>>(visitor: &mut V,
- struct_field: &'v StructField) {
+pub fn walk_struct_field<V: Visitor>(visitor: &mut V, struct_field: &StructField) {
visitor.visit_vis(&struct_field.vis);
walk_opt_ident(visitor, struct_field.span, struct_field.ident);
visitor.visit_ty(&struct_field.ty);
walk_list!(visitor, visit_attribute, &struct_field.attrs);
}
-pub fn walk_block<'v, V: Visitor<'v>>(visitor: &mut V, block: &'v Block) {
+pub fn walk_block<V: Visitor>(visitor: &mut V, block: &Block) {
walk_list!(visitor, visit_stmt, &block.stmts);
- walk_list!(visitor, visit_expr, &block.expr);
}
-pub fn walk_stmt<'v, V: Visitor<'v>>(visitor: &mut V, statement: &'v Stmt) {
+pub fn walk_stmt<V: Visitor>(visitor: &mut V, statement: &Stmt) {
match statement.node {
- StmtKind::Decl(ref declaration, _) => visitor.visit_decl(declaration),
- StmtKind::Expr(ref expression, _) | StmtKind::Semi(ref expression, _) => {
+ StmtKind::Local(ref local) => visitor.visit_local(local),
+ StmtKind::Item(ref item) => visitor.visit_item(item),
+ StmtKind::Expr(ref expression) | StmtKind::Semi(ref expression) => {
visitor.visit_expr(expression)
}
- StmtKind::Mac(ref mac, _, ref attrs) => {
+ StmtKind::Mac(ref mac) => {
+ let (ref mac, _, ref attrs) = **mac;
visitor.visit_mac(mac);
- for attr in attrs.as_attr_slice() {
+ for attr in attrs.iter() {
visitor.visit_attribute(attr);
}
}
}
}
-pub fn walk_decl<'v, V: Visitor<'v>>(visitor: &mut V, declaration: &'v Decl) {
- match declaration.node {
- DeclKind::Local(ref local) => visitor.visit_local(local),
- DeclKind::Item(ref item) => visitor.visit_item(item),
- }
-}
-
-pub fn walk_mac<'v, V: Visitor<'v>>(_: &mut V, _: &'v Mac) {
+pub fn walk_mac<V: Visitor>(_: &mut V, _: &Mac) {
// Empty!
}
-pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
- for attr in expression.attrs.as_attr_slice() {
+pub fn walk_expr<V: Visitor>(visitor: &mut V, expression: &Expr) {
+ for attr in expression.attrs.iter() {
visitor.visit_attribute(attr);
}
match expression.node {
}
visitor.visit_path(path, expression.id)
}
- ExprKind::Break(ref opt_sp_ident) | ExprKind::Again(ref opt_sp_ident) => {
+ ExprKind::Break(ref opt_sp_ident) | ExprKind::Continue(ref opt_sp_ident) => {
walk_opt_sp_ident(visitor, opt_sp_ident);
}
ExprKind::Ret(ref optional_expression) => {
visitor.visit_expr_post(expression)
}
-pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm) {
+pub fn walk_arm<V: Visitor>(visitor: &mut V, arm: &Arm) {
walk_list!(visitor, visit_pat, &arm.pats);
walk_list!(visitor, visit_expr, &arm.guard);
visitor.visit_expr(&arm.body);
walk_list!(visitor, visit_attribute, &arm.attrs);
}
-pub fn walk_vis<'v, V: Visitor<'v>>(visitor: &mut V, vis: &'v Visibility) {
+pub fn walk_vis<V: Visitor>(visitor: &mut V, vis: &Visibility) {
if let Visibility::Restricted { ref path, id } = *vis {
visitor.visit_path(path, id);
}
fmt_macros = { path = "../libfmt_macros" }
log = { path = "../liblog" }
syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
+rustc_errors = { path = "../librustc_errors" }
\ No newline at end of file
use syntax::ast;
use syntax::codemap;
-use syntax::codemap::Span;
use syntax::ext::base;
use syntax::ext::base::*;
use syntax::feature_gate;
use syntax::parse::{self, token};
use syntax::ptr::P;
use syntax::ast::AsmDialect;
+use syntax_pos::Span;
+use syntax::tokenstream;
enum State {
Asm,
const OPTIONS: &'static [&'static str] = &["volatile", "alignstack", "intel"];
-pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'cx> {
if !cx.ecfg.enable_asm() {
feature_gate::emit_feature_err(
// parsed as `asm!(z)` with `z = "x": y` which is type ascription.
let first_colon = tts.iter().position(|tt| {
match *tt {
- ast::TokenTree::Token(_, token::Colon) |
- ast::TokenTree::Token(_, token::ModSep) => true,
+ tokenstream::TokenTree::Token(_, token::Colon) |
+ tokenstream::TokenTree::Token(_, token::ModSep) => true,
_ => false
}
}).unwrap_or(tts.len());
expn_id: expn_id,
}),
span: sp,
- attrs: None,
+ attrs: ast::ThinVec::new(),
}))
}
/// a literal `true` or `false` based on whether the given cfg matches the
/// current compilation environment.
-use syntax::ast;
-use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::ext::base;
use syntax::ext::build::AstBuilder;
use syntax::attr;
+use syntax::tokenstream;
use syntax::parse::token;
+use syntax_pos::Span;
pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
sp: Span,
- tts: &[ast::TokenTree])
+ tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'static> {
let mut p = cx.new_parser_from_tts(tts);
let cfg = panictry!(p.parse_meta_item());
// except according to those terms.
use syntax::ast;
-use syntax::codemap;
use syntax::ext::base;
use syntax::ext::build::AstBuilder;
use syntax::parse::token;
+use syntax_pos;
+use syntax::tokenstream;
use std::string::String;
pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
- sp: codemap::Span,
- tts: &[ast::TokenTree])
+ sp: syntax_pos::Span,
+ tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'static> {
let es = match base::get_exprs_from_tts(cx, sp, tts) {
Some(e) => e,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use syntax::ast::{self, TokenTree};
-use syntax::codemap::Span;
+use syntax::ast;
use syntax::ext::base::*;
use syntax::ext::base;
use syntax::feature_gate;
use syntax::parse::token;
use syntax::parse::token::str_to_ident;
use syntax::ptr::P;
+use syntax_pos::Span;
+use syntax::tokenstream::TokenTree;
pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree])
-> Box<base::MacResult+'cx> {
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, self.path()),
span: self.span,
- attrs: None,
+ attrs: ast::ThinVec::new(),
}))
}
use deriving::generic::ty::*;
use syntax::ast::MetaItem;
-use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, Annotatable};
+use syntax_pos::Span;
pub fn expand_deriving_unsafe_bound(cx: &mut ExtCtxt,
span: Span,
use syntax::ast::{Expr, ItemKind, Generics, MetaItem, VariantData};
use syntax::attr;
-use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::parse::token::InternedString;
use syntax::ptr::P;
+use syntax_pos::Span;
#[derive(PartialEq)]
enum Mode { Deep, Shallow }
match mode {
Mode::Shallow => {
- cx.expr_block(cx.block(trait_span,
- all_fields.iter()
- .map(subcall)
- .map(|e| cx.stmt_expr(e))
- .collect(),
- Some(cx.expr_deref(trait_span, cx.expr_self(trait_span)))))
+ let mut stmts: Vec<_> =
+ all_fields.iter().map(subcall).map(|e| cx.stmt_expr(e)).collect();
+ stmts.push(cx.stmt_expr(cx.expr_deref(trait_span, cx.expr_self(trait_span))));
+ cx.expr_block(cx.block(trait_span, stmts))
}
Mode::Deep => {
match *vdata {
use deriving::generic::ty::*;
use syntax::ast::{MetaItem, Expr};
-use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::parse::token::InternedString;
use syntax::ptr::P;
+use syntax_pos::Span;
pub fn expand_deriving_eq(cx: &mut ExtCtxt,
span: Span,
// create `a.<method>(); b.<method>(); c.<method>(); ...`
// (where method is `assert_receiver_is_total_eq`)
let stmts = exprs.into_iter().map(|e| cx.stmt_expr(e)).collect();
- let block = cx.block(span, stmts, None);
+ let block = cx.block(span, stmts);
cx.expr_block(block)
},
Box::new(|cx, sp, _, _| {
use deriving::generic::ty::*;
use syntax::ast::{MetaItem, Expr, self};
-use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::parse::token::InternedString;
use syntax::ptr::P;
+use syntax_pos::Span;
pub fn expand_deriving_ord(cx: &mut ExtCtxt,
span: Span,
use deriving::generic::ty::*;
use syntax::ast::{MetaItem, Expr, BinOpKind};
-use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::parse::token::InternedString;
use syntax::ptr::P;
+use syntax_pos::Span;
pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt,
span: Span,
use deriving::generic::ty::*;
use syntax::ast::{MetaItem, Expr, BinOpKind, self};
-use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::parse::token::InternedString;
use syntax::ptr::P;
+use syntax_pos::Span;
pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt,
span: Span,
use syntax::ast;
use syntax::ast::{MetaItem, Expr};
-use syntax::codemap::{Span, respan, DUMMY_SP};
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax::ptr::P;
+use syntax_pos::{Span, DUMMY_SP};
pub fn expand_deriving_debug(cx: &mut ExtCtxt,
span: Span,
let fmt = substr.nonself_args[0].clone();
- let stmts = match *substr.fields {
+ let mut stmts = match *substr.fields {
Struct(_, ref fields) | EnumMatching(_, _, ref fields) => {
let mut stmts = vec![];
if !is_struct {
token::str_to_ident("finish"),
vec![]);
- let block = cx.block(span, stmts, Some(expr));
+ stmts.push(cx.stmt_expr(expr));
+ let block = cx.block(span, stmts);
cx.expr_block(block)
}
init: Some(expr),
id: ast::DUMMY_NODE_ID,
span: sp,
- attrs: None,
+ attrs: ast::ThinVec::new(),
});
- let decl = respan(sp, ast::DeclKind::Local(local));
- respan(sp, ast::StmtKind::Decl(P(decl), ast::DUMMY_NODE_ID))
+ ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: ast::StmtKind::Local(local),
+ span: sp,
+ }
}
use syntax::ast;
use syntax::ast::{MetaItem, Expr, Mutability};
-use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::ptr::P;
+use syntax_pos::Span;
pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt,
span: Span,
use deriving::generic::ty::*;
use syntax::ast::{MetaItem, Expr};
-use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::parse::token::InternedString;
use syntax::ptr::P;
+use syntax_pos::Span;
pub fn expand_deriving_default(cx: &mut ExtCtxt,
span: Span,
use deriving::generic::ty::*;
use syntax::ast::{MetaItem, Expr, ExprKind, Mutability};
-use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt,Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax::ptr::P;
+use syntax_pos::Span;
pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt,
span: Span,
cx.expr_str(trait_span, substr.type_ident.name.as_str()),
blk
));
- cx.expr_block(cx.block(trait_span, vec!(me), Some(ret)))
+ cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)]))
}
_ => cx.bug("expected Struct or EnumMatching in derive(Encodable)")
use syntax::attr::AttrMetaMethods;
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
-use syntax::codemap::{self, respan, DUMMY_SP};
-use syntax::codemap::Span;
-use syntax::errors::Handler;
+use syntax::codemap::{self, respan};
use syntax::util::move_map::MoveMap;
use syntax::parse::token::{keywords, InternedString};
use syntax::ptr::P;
+use syntax_pos::{Span, DUMMY_SP};
+use errors::Handler;
use self::ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty};
types: Vec<P<ast::Ty>>,
}
- impl<'a> visit::Visitor<'a> for Visitor<'a> {
- fn visit_ty(&mut self, ty: &'a ast::Ty) {
+ impl<'a> visit::Visitor for Visitor<'a> {
+ fn visit_ty(&mut self, ty: &ast::Ty) {
match ty.node {
ast::TyKind::Path(_, ref path) if !path.global => {
match path.segments.first() {
// }
let all_match = cx.expr_match(sp, match_arg, match_arms);
let arm_expr = cx.expr_if(sp, discriminant_test, all_match, Some(arm_expr));
- cx.expr_block(
- cx.block_all(sp, index_let_stmts, Some(arm_expr)))
+ index_let_stmts.push(cx.stmt_expr(arm_expr));
+ cx.expr_block(cx.block(sp, index_let_stmts))
} else if variants.is_empty() {
// As an additional wrinkle, For a zero-variant enum A,
// currently the compiler
use syntax::ast::{Expr, Generics, Ident, SelfKind};
use syntax::ext::base::ExtCtxt;
use syntax::ext::build::AstBuilder;
-use syntax::codemap::{Span,respan};
+use syntax::codemap::respan;
use syntax::ptr::P;
+use syntax_pos::Span;
/// The types of pointers
#[derive(Clone, Eq, PartialEq)]
use deriving::generic::ty::*;
use syntax::ast::{MetaItem, Expr, Mutability};
-use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
+use syntax_pos::Span;
pub fn expand_deriving_hash(cx: &mut ExtCtxt,
span: Span,
stmts.push(call_hash(span, self_.clone()));
}
- cx.expr_block(cx.block(trait_span, stmts, None))
+ cx.expr_block(cx.block(trait_span, stmts))
}
use syntax::ext::base::{MultiDecorator, MultiItemDecorator, MultiModifier};
use syntax::ext::build::AstBuilder;
use syntax::feature_gate;
-use syntax::codemap::{self, Span};
+use syntax::codemap;
use syntax::parse::token::{intern, intern_and_get_ident};
use syntax::ptr::P;
+use syntax_pos::Span;
macro_rules! pathvec {
($($x:ident)::+) => (
let call = cx.expr_call_global(span, path, args);
cx.expr_block(P(ast::Block {
- stmts: vec![],
- expr: Some(call),
+ stmts: vec![cx.stmt_expr(call)],
id: ast::DUMMY_NODE_ID,
rules: ast::BlockCheckMode::Unsafe(ast::CompilerGenerated),
span: span }))
*/
use syntax::ast;
-use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::ext::base;
use syntax::ext::build::AstBuilder;
use syntax::parse::token;
+use syntax_pos::Span;
+use syntax::tokenstream;
use std::env;
-pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'cx> {
let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") {
None => return DummyResult::expr(sp),
MacEager::expr(e)
}
-pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'cx> {
let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
Some(ref exprs) if exprs.is_empty() => {
use fmt_macros as parse;
use syntax::ast;
-use syntax::codemap::{Span, respan, DUMMY_SP};
use syntax::ext::base::*;
use syntax::ext::base;
use syntax::ext::build::AstBuilder;
use syntax::fold::Folder;
use syntax::parse::token::{self, keywords};
use syntax::ptr::P;
+use syntax_pos::{Span, DUMMY_SP};
+use syntax::tokenstream;
use std::collections::HashMap;
/// Some((fmtstr, unnamed arguments, ordering of named arguments,
/// named arguments))
/// ```
-fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
+fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
-> Option<(P<ast::Expr>, Vec<P<ast::Expr>>, Vec<String>,
HashMap<String, P<ast::Expr>>)> {
let mut args = Vec::new();
let name = ecx.ident_of(name);
let item = ecx.item(sp, name, vec![], st);
- let decl = respan(sp, ast::DeclKind::Item(item));
+ let stmt = ast::Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: ast::StmtKind::Item(item),
+ span: sp,
+ };
// Wrap the declaration in a block so that it forms a single expression.
- ecx.expr_block(ecx.block(sp,
- vec![respan(sp, ast::StmtKind::Decl(P(decl), ast::DUMMY_NODE_ID))],
- Some(ecx.expr_ident(sp, name))))
+ ecx.expr_block(ecx.block(sp, vec![stmt, ecx.stmt_expr(ecx.expr_ident(sp, name))]))
}
/// Actually builds the expression which the iformat! block will be expanded
}
pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt, sp: Span,
- tts: &[ast::TokenTree])
+ tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'cx> {
match parse_args(ecx, sp, tts) {
#[macro_use] extern crate log;
#[macro_use]
extern crate syntax;
+extern crate syntax_pos;
+extern crate rustc_errors as errors;
use syntax::ext::base::{MacroExpanderFn, NormalTT};
use syntax::ext::base::{SyntaxEnv, SyntaxExtension};
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use syntax::ast;
-use syntax::codemap;
use syntax::ext::base;
use syntax::feature_gate;
use syntax::print;
+use syntax::tokenstream;
+use syntax_pos;
pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt,
- sp: codemap::Span,
- tts: &[ast::TokenTree])
+ sp: syntax_pos::Span,
+ tts: &[tokenstream::TokenTree])
-> Box<base::MacResult+'cx> {
if !cx.ecfg.enable_log_syntax() {
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use syntax::ast::TokenTree;
-use syntax::codemap::Span;
use syntax::ext::base::ExtCtxt;
use syntax::ext::base;
use syntax::feature_gate;
use syntax::parse::token::keywords;
-
+use syntax_pos::Span;
+use syntax::tokenstream::TokenTree;
pub fn expand_trace_macros(cx: &mut ExtCtxt,
sp: Span,
--- /dev/null
+[package]
+authors = ["The Rust Project Developers"]
+name = "syntax_pos"
+version = "0.0.0"
+
+[lib]
+name = "syntax_pos"
+path = "lib.rs"
+crate-type = ["dylib"]
+
+[dependencies]
+serialize = { path = "../libserialize" }
--- /dev/null
+// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! The source positions and related helper functions
+//!
+//! # Note
+//!
+//! This API is completely unstable and subject to change.
+
+#![crate_name = "syntax_pos"]
+#![unstable(feature = "rustc_private", issue = "27812")]
+#![crate_type = "dylib"]
+#![crate_type = "rlib"]
+#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
+ html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
+ html_root_url = "https://doc.rust-lang.org/nightly/")]
+#![cfg_attr(not(stage0), deny(warnings))]
+
+#![feature(custom_attribute)]
+#![allow(unused_attributes)]
+#![feature(rustc_private)]
+#![feature(staged_api)]
+#![feature(question_mark)]
+
+use std::cell::{Cell, RefCell};
+use std::ops::{Add, Sub};
+use std::rc::Rc;
+use std::cmp;
+
+use std::fmt;
+
+use serialize::{Encodable, Decodable, Encoder, Decoder};
+
+extern crate serialize;
+extern crate serialize as rustc_serialize; // used by deriving
+
+pub type FileName = String;
+
+/// Spans represent a region of code, used for error reporting. Positions in spans
+/// are *absolute* positions from the beginning of the codemap, not positions
+/// relative to FileMaps. Methods on the CodeMap can be used to relate spans back
+/// to the original source.
+/// You must be careful if the span crosses more than one file - you will not be
+/// able to use many of the functions on spans in codemap and you cannot assume
+/// that the length of the span = hi - lo; there may be space in the BytePos
+/// range between files.
+#[derive(Clone, Copy, Hash, PartialEq, Eq)]
+pub struct Span {
+ pub lo: BytePos,
+ pub hi: BytePos,
+ /// Information about where the macro came from, if this piece of
+ /// code was created by a macro expansion.
+ pub expn_id: ExpnId
+}
+
+/// A collection of spans. Spans have two orthogonal attributes:
+///
+/// - they can be *primary spans*. In this case they are the locus of
+/// the error, and would be rendered with `^^^`.
+/// - they can have a *label*. In this case, the label is written next
+/// to the mark in the snippet when we render.
+#[derive(Clone)]
+pub struct MultiSpan {
+ primary_spans: Vec<Span>,
+ span_labels: Vec<(Span, String)>,
+}
+
+impl Span {
+ /// Returns a new span representing just the end-point of this span
+ pub fn end_point(self) -> Span {
+ let lo = cmp::max(self.hi.0 - 1, self.lo.0);
+ Span { lo: BytePos(lo), hi: self.hi, expn_id: self.expn_id}
+ }
+
+ /// Returns `self` if `self` is not the dummy span, and `other` otherwise.
+ pub fn substitute_dummy(self, other: Span) -> Span {
+ if self.source_equal(&DUMMY_SP) { other } else { self }
+ }
+
+ pub fn contains(self, other: Span) -> bool {
+ self.lo <= other.lo && other.hi <= self.hi
+ }
+
+ /// Return true if the spans are equal with regards to the source text.
+ ///
+ /// Use this instead of `==` when either span could be generated code,
+ /// and you only care that they point to the same bytes of source text.
+ pub fn source_equal(&self, other: &Span) -> bool {
+ self.lo == other.lo && self.hi == other.hi
+ }
+
+ /// Returns `Some(span)`, a union of `self` and `other`, on overlap.
+ pub fn merge(self, other: Span) -> Option<Span> {
+ if self.expn_id != other.expn_id {
+ return None;
+ }
+
+ if (self.lo <= other.lo && self.hi > other.lo) ||
+ (self.lo >= other.lo && self.lo < other.hi) {
+ Some(Span {
+ lo: cmp::min(self.lo, other.lo),
+ hi: cmp::max(self.hi, other.hi),
+ expn_id: self.expn_id,
+ })
+ } else {
+ None
+ }
+ }
+
+ /// Returns `Some(span)`, where the start is trimmed by the end of `other`
+ pub fn trim_start(self, other: Span) -> Option<Span> {
+ if self.hi > other.hi {
+ Some(Span { lo: cmp::max(self.lo, other.hi), .. self })
+ } else {
+ None
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct SpanLabel {
+ /// The span we are going to include in the final snippet.
+ pub span: Span,
+
+ /// Is this a primary span? This is the "locus" of the message,
+ /// and is indicated with a `^^^^` underline, versus `----`.
+ pub is_primary: bool,
+
+ /// What label should we attach to this span (if any)?
+ pub label: Option<String>,
+}
+
+impl Encodable for Span {
+ fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+ s.emit_struct("Span", 2, |s| {
+ s.emit_struct_field("lo", 0, |s| {
+ self.lo.encode(s)
+ })?;
+
+ s.emit_struct_field("hi", 1, |s| {
+ self.hi.encode(s)
+ })
+ })
+ }
+}
+
+impl Decodable for Span {
+ fn decode<D: Decoder>(d: &mut D) -> Result<Span, D::Error> {
+ d.read_struct("Span", 2, |d| {
+ let lo = d.read_struct_field("lo", 0, |d| {
+ BytePos::decode(d)
+ })?;
+
+ let hi = d.read_struct_field("hi", 1, |d| {
+ BytePos::decode(d)
+ })?;
+
+ Ok(mk_sp(lo, hi))
+ })
+ }
+}
+
+fn default_span_debug(span: Span, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "Span {{ lo: {:?}, hi: {:?}, expn_id: {:?} }}",
+ span.lo, span.hi, span.expn_id)
+}
+
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ SPAN_DEBUG.with(|span_debug| span_debug.get()(*self, f))
+ }
+}
+
+pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION };
+
+// Generic span to be used for code originating from the command line
+pub const COMMAND_LINE_SP: Span = Span { lo: BytePos(0),
+ hi: BytePos(0),
+ expn_id: COMMAND_LINE_EXPN };
+
+impl MultiSpan {
+ pub fn new() -> MultiSpan {
+ MultiSpan {
+ primary_spans: vec![],
+ span_labels: vec![]
+ }
+ }
+
+ pub fn from_span(primary_span: Span) -> MultiSpan {
+ MultiSpan {
+ primary_spans: vec![primary_span],
+ span_labels: vec![]
+ }
+ }
+
+ pub fn from_spans(vec: Vec<Span>) -> MultiSpan {
+ MultiSpan {
+ primary_spans: vec,
+ span_labels: vec![]
+ }
+ }
+
+ pub fn push_span_label(&mut self, span: Span, label: String) {
+ self.span_labels.push((span, label));
+ }
+
+ /// Selects the first primary span (if any)
+ pub fn primary_span(&self) -> Option<Span> {
+ self.primary_spans.first().cloned()
+ }
+
+ /// Returns all primary spans.
+ pub fn primary_spans(&self) -> &[Span] {
+ &self.primary_spans
+ }
+
+ /// Returns the strings to highlight. We always ensure that there
+ /// is an entry for each of the primary spans -- for each primary
+ /// span P, if there is at least one label with span P, we return
+ /// those labels (marked as primary). But otherwise we return
+ /// `SpanLabel` instances with empty labels.
+ pub fn span_labels(&self) -> Vec<SpanLabel> {
+ let is_primary = |span| self.primary_spans.contains(&span);
+ let mut span_labels = vec![];
+
+ for &(span, ref label) in &self.span_labels {
+ span_labels.push(SpanLabel {
+ span: span,
+ is_primary: is_primary(span),
+ label: Some(label.clone())
+ });
+ }
+
+ for &span in &self.primary_spans {
+ if !span_labels.iter().any(|sl| sl.span == span) {
+ span_labels.push(SpanLabel {
+ span: span,
+ is_primary: true,
+ label: None
+ });
+ }
+ }
+
+ span_labels
+ }
+}
+
+impl From<Span> for MultiSpan {
+ fn from(span: Span) -> MultiSpan {
+ MultiSpan::from_span(span)
+ }
+}
+
+#[derive(PartialEq, Eq, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Copy)]
+pub struct ExpnId(pub u32);
+
+pub const NO_EXPANSION: ExpnId = ExpnId(!0);
+// For code appearing from the command line
+pub const COMMAND_LINE_EXPN: ExpnId = ExpnId(!1);
+
+impl ExpnId {
+ pub fn from_u32(id: u32) -> ExpnId {
+ ExpnId(id)
+ }
+
+ pub fn into_u32(self) -> u32 {
+ self.0
+ }
+}
+
+/// Identifies an offset of a multi-byte character in a FileMap
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)]
+pub struct MultiByteChar {
+ /// The absolute offset of the character in the CodeMap
+ pub pos: BytePos,
+ /// The number of bytes, >=2
+ pub bytes: usize,
+}
+
+/// A single source in the CodeMap.
+pub struct FileMap {
+ /// The name of the file that the source came from, source that doesn't
+ /// originate from files has names between angle brackets by convention,
+ /// e.g. `<anon>`
+ pub name: FileName,
+ /// The absolute path of the file that the source came from.
+ pub abs_path: Option<FileName>,
+ /// The complete source code
+ pub src: Option<Rc<String>>,
+ /// The start position of this source in the CodeMap
+ pub start_pos: BytePos,
+ /// The end position of this source in the CodeMap
+ pub end_pos: BytePos,
+ /// Locations of lines beginnings in the source code
+ pub lines: RefCell<Vec<BytePos>>,
+ /// Locations of multi-byte characters in the source code
+ pub multibyte_chars: RefCell<Vec<MultiByteChar>>,
+}
+
+impl Encodable for FileMap {
+ fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+ s.emit_struct("FileMap", 6, |s| {
+ s.emit_struct_field("name", 0, |s| self.name.encode(s))?;
+ s.emit_struct_field("abs_path", 1, |s| self.abs_path.encode(s))?;
+ s.emit_struct_field("start_pos", 2, |s| self.start_pos.encode(s))?;
+ s.emit_struct_field("end_pos", 3, |s| self.end_pos.encode(s))?;
+ s.emit_struct_field("lines", 4, |s| {
+ let lines = self.lines.borrow();
+ // store the length
+ s.emit_u32(lines.len() as u32)?;
+
+ if !lines.is_empty() {
+ // In order to preserve some space, we exploit the fact that
+ // the lines list is sorted and individual lines are
+ // probably not that long. Because of that we can store lines
+ // as a difference list, using as little space as possible
+ // for the differences.
+ let max_line_length = if lines.len() == 1 {
+ 0
+ } else {
+ lines.windows(2)
+ .map(|w| w[1] - w[0])
+ .map(|bp| bp.to_usize())
+ .max()
+ .unwrap()
+ };
+
+ let bytes_per_diff: u8 = match max_line_length {
+ 0 ... 0xFF => 1,
+ 0x100 ... 0xFFFF => 2,
+ _ => 4
+ };
+
+ // Encode the number of bytes used per diff.
+ bytes_per_diff.encode(s)?;
+
+ // Encode the first element.
+ lines[0].encode(s)?;
+
+ let diff_iter = (&lines[..]).windows(2)
+ .map(|w| (w[1] - w[0]));
+
+ match bytes_per_diff {
+ 1 => for diff in diff_iter { (diff.0 as u8).encode(s)? },
+ 2 => for diff in diff_iter { (diff.0 as u16).encode(s)? },
+ 4 => for diff in diff_iter { diff.0.encode(s)? },
+ _ => unreachable!()
+ }
+ }
+
+ Ok(())
+ })?;
+ s.emit_struct_field("multibyte_chars", 5, |s| {
+ (*self.multibyte_chars.borrow()).encode(s)
+ })
+ })
+ }
+}
+
+impl Decodable for FileMap {
+ fn decode<D: Decoder>(d: &mut D) -> Result<FileMap, D::Error> {
+
+ d.read_struct("FileMap", 6, |d| {
+ let name: String = d.read_struct_field("name", 0, |d| Decodable::decode(d))?;
+ let abs_path: Option<String> =
+ d.read_struct_field("abs_path", 1, |d| Decodable::decode(d))?;
+ let start_pos: BytePos = d.read_struct_field("start_pos", 2, |d| Decodable::decode(d))?;
+ let end_pos: BytePos = d.read_struct_field("end_pos", 3, |d| Decodable::decode(d))?;
+ let lines: Vec<BytePos> = d.read_struct_field("lines", 4, |d| {
+ let num_lines: u32 = Decodable::decode(d)?;
+ let mut lines = Vec::with_capacity(num_lines as usize);
+
+ if num_lines > 0 {
+ // Read the number of bytes used per diff.
+ let bytes_per_diff: u8 = Decodable::decode(d)?;
+
+ // Read the first element.
+ let mut line_start: BytePos = Decodable::decode(d)?;
+ lines.push(line_start);
+
+ for _ in 1..num_lines {
+ let diff = match bytes_per_diff {
+ 1 => d.read_u8()? as u32,
+ 2 => d.read_u16()? as u32,
+ 4 => d.read_u32()?,
+ _ => unreachable!()
+ };
+
+ line_start = line_start + BytePos(diff);
+
+ lines.push(line_start);
+ }
+ }
+
+ Ok(lines)
+ })?;
+ let multibyte_chars: Vec<MultiByteChar> =
+ d.read_struct_field("multibyte_chars", 5, |d| Decodable::decode(d))?;
+ Ok(FileMap {
+ name: name,
+ abs_path: abs_path,
+ start_pos: start_pos,
+ end_pos: end_pos,
+ src: None,
+ lines: RefCell::new(lines),
+ multibyte_chars: RefCell::new(multibyte_chars)
+ })
+ })
+ }
+}
+
+impl fmt::Debug for FileMap {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ write!(fmt, "FileMap({})", self.name)
+ }
+}
+
+impl FileMap {
+ /// EFFECT: register a start-of-line offset in the
+ /// table of line-beginnings.
+ /// UNCHECKED INVARIANT: these offsets must be added in the right
+ /// order and must be in the right places; there is shared knowledge
+ /// about what ends a line between this file and parse.rs
+ /// WARNING: pos param here is the offset relative to start of CodeMap,
+ /// and CodeMap will append a newline when adding a filemap without a newline at the end,
+ /// so the safe way to call this is with value calculated as
+ /// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap.
+ pub fn next_line(&self, pos: BytePos) {
+ // the new charpos must be > the last one (or it's the first one).
+ let mut lines = self.lines.borrow_mut();
+ let line_len = lines.len();
+ assert!(line_len == 0 || ((*lines)[line_len - 1] < pos));
+ lines.push(pos);
+ }
+
+ /// get a line from the list of pre-computed line-beginnings.
+ /// line-number here is 0-based.
+ pub fn get_line(&self, line_number: usize) -> Option<&str> {
+ match self.src {
+ Some(ref src) => {
+ let lines = self.lines.borrow();
+ lines.get(line_number).map(|&line| {
+ let begin: BytePos = line - self.start_pos;
+ let begin = begin.to_usize();
+ // We can't use `lines.get(line_number+1)` because we might
+ // be parsing when we call this function and thus the current
+ // line is the last one we have line info for.
+ let slice = &src[begin..];
+ match slice.find('\n') {
+ Some(e) => &slice[..e],
+ None => slice
+ }
+ })
+ }
+ None => None
+ }
+ }
+
+ pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) {
+ assert!(bytes >=2 && bytes <= 4);
+ let mbc = MultiByteChar {
+ pos: pos,
+ bytes: bytes,
+ };
+ self.multibyte_chars.borrow_mut().push(mbc);
+ }
+
+ pub fn is_real_file(&self) -> bool {
+ !(self.name.starts_with("<") &&
+ self.name.ends_with(">"))
+ }
+
+ pub fn is_imported(&self) -> bool {
+ self.src.is_none()
+ }
+
+ pub fn count_lines(&self) -> usize {
+ self.lines.borrow().len()
+ }
+}
+
+// _____________________________________________________________________________
+// Pos, BytePos, CharPos
+//
+
+pub trait Pos {
+ fn from_usize(n: usize) -> Self;
+ fn to_usize(&self) -> usize;
+}
+
+/// A byte offset. Keep this small (currently 32-bits), as AST contains
+/// a lot of them.
+#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
+pub struct BytePos(pub u32);
+
+/// A character offset. Because of multibyte utf8 characters, a byte offset
+/// is not equivalent to a character offset. The CodeMap will convert BytePos
+/// values to CharPos values as necessary.
+#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
+pub struct CharPos(pub usize);
+
+// FIXME: Lots of boilerplate in these impls, but so far my attempts to fix
+// have been unsuccessful
+
+impl Pos for BytePos {
+ fn from_usize(n: usize) -> BytePos { BytePos(n as u32) }
+ fn to_usize(&self) -> usize { let BytePos(n) = *self; n as usize }
+}
+
+impl Add for BytePos {
+ type Output = BytePos;
+
+ fn add(self, rhs: BytePos) -> BytePos {
+ BytePos((self.to_usize() + rhs.to_usize()) as u32)
+ }
+}
+
+impl Sub for BytePos {
+ type Output = BytePos;
+
+ fn sub(self, rhs: BytePos) -> BytePos {
+ BytePos((self.to_usize() - rhs.to_usize()) as u32)
+ }
+}
+
+impl Encodable for BytePos {
+ fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+ s.emit_u32(self.0)
+ }
+}
+
+impl Decodable for BytePos {
+ fn decode<D: Decoder>(d: &mut D) -> Result<BytePos, D::Error> {
+ Ok(BytePos(d.read_u32()?))
+ }
+}
+
+impl Pos for CharPos {
+ fn from_usize(n: usize) -> CharPos { CharPos(n) }
+ fn to_usize(&self) -> usize { let CharPos(n) = *self; n }
+}
+
+impl Add for CharPos {
+ type Output = CharPos;
+
+ fn add(self, rhs: CharPos) -> CharPos {
+ CharPos(self.to_usize() + rhs.to_usize())
+ }
+}
+
+impl Sub for CharPos {
+ type Output = CharPos;
+
+ fn sub(self, rhs: CharPos) -> CharPos {
+ CharPos(self.to_usize() - rhs.to_usize())
+ }
+}
+
+// _____________________________________________________________________________
+// Loc, LocWithOpt, FileMapAndLine, FileMapAndBytePos
+//
+
+/// A source code location used for error reporting
+#[derive(Debug)]
+pub struct Loc {
+ /// Information about the original source
+ pub file: Rc<FileMap>,
+ /// The (1-based) line number
+ pub line: usize,
+ /// The (0-based) column offset
+ pub col: CharPos
+}
+
+/// A source code location used as the result of lookup_char_pos_adj
+// Actually, *none* of the clients use the filename *or* file field;
+// perhaps they should just be removed.
+#[derive(Debug)]
+pub struct LocWithOpt {
+ pub filename: FileName,
+ pub line: usize,
+ pub col: CharPos,
+ pub file: Option<Rc<FileMap>>,
+}
+
+// used to be structural records. Better names, anyone?
+#[derive(Debug)]
+pub struct FileMapAndLine { pub fm: Rc<FileMap>, pub line: usize }
+#[derive(Debug)]
+pub struct FileMapAndBytePos { pub fm: Rc<FileMap>, pub pos: BytePos }
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct LineInfo {
+ /// Index of line, starting from 0.
+ pub line_index: usize,
+
+ /// Column in line where span begins, starting from 0.
+ pub start_col: CharPos,
+
+ /// Column in line where span ends, starting from 0, exclusive.
+ pub end_col: CharPos,
+}
+
+pub struct FileLines {
+ pub file: Rc<FileMap>,
+ pub lines: Vec<LineInfo>
+}
+
+thread_local!(pub static SPAN_DEBUG: Cell<fn(Span, &mut fmt::Formatter) -> fmt::Result> =
+ Cell::new(default_span_debug));
+
+/* assuming that we're not in macro expansion */
+pub fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
+ Span {lo: lo, hi: hi, expn_id: NO_EXPANSION}
+}
+
+pub struct MacroBacktrace {
+ /// span where macro was applied to generate this code
+ pub call_site: Span,
+
+ /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]")
+ pub macro_decl_name: String,
+
+ /// span where macro was defined (if known)
+ pub def_site_span: Option<Span>,
+}
+
+// _____________________________________________________________________________
+// SpanLinesError, SpanSnippetError, DistinctSources, MalformedCodemapPositions
+//
+
+pub type FileLinesResult = Result<FileLines, SpanLinesError>;
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub enum SpanLinesError {
+ IllFormedSpan(Span),
+ DistinctSources(DistinctSources),
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub enum SpanSnippetError {
+ IllFormedSpan(Span),
+ DistinctSources(DistinctSources),
+ MalformedForCodemap(MalformedCodemapPositions),
+ SourceNotAvailable { filename: String }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct DistinctSources {
+ pub begin: (String, BytePos),
+ pub end: (String, BytePos)
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct MalformedCodemapPositions {
+ pub name: String,
+ pub source_len: usize,
+ pub begin_pos: BytePos,
+ pub end_pos: BytePos
+}
+
"rustc_bitflags 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
+ "rustc_errors 0.0.0",
"rustc_llvm 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"log 0.0.0",
"rustc 0.0.0",
"rustc_data_structures 0.0.0",
+ "rustc_errors 0.0.0",
"rustc_mir 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc_const_math 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc_back 0.0.0",
"rustc_borrowck 0.0.0",
"rustc_const_eval 0.0.0",
+ "rustc_errors 0.0.0",
"rustc_incremental 0.0.0",
"rustc_lint 0.0.0",
"rustc_llvm 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
"syntax_ext 0.0.0",
+ "syntax_pos 0.0.0",
+]
+
+[[package]]
+name = "rustc_errors"
+version = "0.0.0"
+dependencies = [
+ "log 0.0.0",
+ "serialize 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc_data_structures 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc_back 0.0.0",
"rustc_const_eval 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc_back 0.0.0",
"rustc_bitflags 0.0.0",
"rustc_const_math 0.0.0",
+ "rustc_errors 0.0.0",
"rustc_llvm 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc 0.0.0",
"rustc_const_eval 0.0.0",
"rustc_const_math 0.0.0",
+ "rustc_errors 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc 0.0.0",
"rustc_back 0.0.0",
"rustc_bitflags 0.0.0",
+ "rustc_errors 0.0.0",
"rustc_metadata 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
dependencies = [
"rustc 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"arena 0.0.0",
"log 0.0.0",
"rustc 0.0.0",
+ "rustc_errors 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc_const_eval 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
+ "rustc_errors 0.0.0",
"rustc_incremental 0.0.0",
"rustc_llvm 0.0.0",
"rustc_platform_intrinsics 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc_back 0.0.0",
"rustc_const_eval 0.0.0",
"rustc_const_math 0.0.0",
+ "rustc_errors 0.0.0",
"rustc_platform_intrinsics 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
"rustc_back 0.0.0",
"rustc_const_eval 0.0.0",
"rustc_driver 0.0.0",
+ "rustc_errors 0.0.0",
"rustc_lint 0.0.0",
"rustc_metadata 0.0.0",
"rustc_resolve 0.0.0",
"rustc_trans 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
dependencies = [
"log 0.0.0",
"rustc_bitflags 0.0.0",
+ "rustc_errors 0.0.0",
"serialize 0.0.0",
+ "syntax_pos 0.0.0",
]
[[package]]
dependencies = [
"fmt_macros 0.0.0",
"log 0.0.0",
+ "rustc_errors 0.0.0",
"syntax 0.0.0",
+ "syntax_pos 0.0.0",
+]
+
+[[package]]
+name = "syntax_pos"
+version = "0.0.0"
+dependencies = [
+ "serialize 0.0.0",
]
#![feature(plugin_registrar, quote, rustc_private)]
extern crate syntax;
+extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
-use syntax::ast::{self, TokenTree, Item, MetaItem, ImplItem, TraitItem, ItemKind};
-use syntax::codemap::Span;
+use syntax::ast::{self, Item, MetaItem, ImplItem, TraitItem, ItemKind};
use syntax::ext::base::*;
use syntax::parse::{self, token};
use syntax::ptr::P;
+use syntax::tokenstream::TokenTree;
+use syntax_pos::Span;
use rustc_plugin::Registry;
#[macro_export]
extern crate syntax;
use syntax::ast;
-use syntax::codemap::Span;
use syntax::parse;
+use syntax_pos::Span;
struct ParseSess;
#![feature(quote, rustc_private)]
extern crate syntax;
+extern crate syntax_pos;
use syntax::ast;
-use syntax::codemap::{self, DUMMY_SP};
use syntax::parse;
use syntax::print::pprust;
+use syntax_pos::DUMMY_SP;
fn main() {
let ps = syntax::parse::ParseSess::new();
// except according to those terms.
// error-pattern: requires at least a format string argument
-// error-pattern: bad-format-args.rs:19:5: 19:15 note: in this expansion
+// error-pattern: in this expansion
// error-pattern: expected token: `,`
-// error-pattern: bad-format-args.rs:20:5: 20:19 note: in this expansion
-// error-pattern: bad-format-args.rs:21:5: 21:22 note: in this expansion
+// error-pattern: in this expansion
+// error-pattern: in this expansion
fn main() {
format!();
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rustc_attrs)]
+#![allow(unused)]
+
+macro_rules! make_item {
+ () => { fn f() {} }
+}
+
+macro_rules! make_stmt {
+ () => { let x = 0; }
+}
+
+fn f() {
+ make_item! {}
+}
+
+fn g() {
+ make_stmt! {}
+}
+
+#[rustc_error]
+fn main() {} //~ ERROR compilation successful
fn main() {
match 0u32 {
<Foo as MyTrait>::trait_bar => {}
- //~^ ERROR expected associated constant, found method `trait_bar`
+ //~^ ERROR expected variant, struct or constant, found method `trait_bar`
}
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+macro_rules! bah {
+ ($a:expr) => ($a)
+ //~^ ERROR expected one of `const`, `extern`, `fn`, `type`, or `unsafe`, found `2`
+}
+
+trait bar {
+ bah!(2);
+}
+
+fn main() {}
#![feature(quote, rustc_private)]
extern crate syntax;
+extern crate syntax_pos;
use syntax::ast;
-use syntax::codemap::{self, DUMMY_SP};
+use syntax::codemap;
use syntax::parse;
use syntax::print::pprust;
+use syntax_pos::DUMMY_SP;
fn main() {
let ps = syntax::parse::ParseSess::new();
extern crate rustc_llvm as llvm;
extern crate rustc_metadata;
extern crate rustc_resolve;
+extern crate rustc_errors;
+extern crate rustc_errors as errors;
#[macro_use] extern crate syntax;
use std::ffi::{CStr, CString};
use rustc_metadata::cstore::CStore;
use libc::c_void;
-use syntax::diagnostics::registry::Registry;
+use rustc_errors::registry::Registry;
use syntax::parse::token;
fn main() {
extern crate rustc_driver;
extern crate rustc_lint;
extern crate rustc_metadata;
+extern crate rustc_errors;
extern crate syntax;
use rustc::dep_graph::DepGraph;
use rustc::session::config::{basic_options, build_configuration, Input, OutputType};
use rustc_driver::driver::{compile_input, CompileController, anon_src};
use rustc_metadata::cstore::CStore;
-use syntax::diagnostics::registry::Registry;
+use rustc_errors::registry::Registry;
use syntax::parse::token;
use std::path::PathBuf;
let actual = &e.attrs;
str_compare(es,
&expected.iter().map(|r| attr(r, &ps).unwrap()).collect::<Vec<_>>(),
- actual.as_attr_slice(),
+ &actual,
pprust::attribute_to_string);
}
extern crate syntax;
extern crate syntax_ext;
+extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
use syntax::ast;
-use syntax::codemap::Span;
use syntax::ext::base::{MultiDecorator, ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax::ptr::P;
use syntax_ext::deriving::generic::{cs_fold, TraitDef, MethodDef, combine_substructure};
use syntax_ext::deriving::generic::ty::{Literal, LifetimeBounds, Path, borrowed_explicit_self};
+use syntax_pos::Span;
use rustc_plugin::Registry;
#[plugin_registrar]
extern crate syntax;
extern crate syntax_ext;
+extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
use syntax::ast;
use syntax::attr::AttrMetaMethods;
-use syntax::codemap::Span;
use syntax::ext::base::{MultiDecorator, ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax_ext::deriving::generic::{cs_fold, TraitDef, MethodDef, combine_substructure};
use syntax_ext::deriving::generic::{Substructure, Struct, EnumMatching};
use syntax_ext::deriving::generic::ty::{Literal, LifetimeBounds, Path, borrowed_explicit_self};
+use syntax_pos::Span;
use rustc_plugin::Registry;
#[plugin_registrar]
extern crate syntax;
extern crate rustc;
extern crate rustc_plugin;
+extern crate syntax_pos;
use syntax::ast;
-use syntax::codemap;
use syntax::ext::base::{ExtCtxt, MacResult, MacEager};
use syntax::util::small_vector::SmallVector;
+use syntax::tokenstream;
use rustc_plugin::Registry;
#[plugin_registrar]
reg.register_macro("multiple_items", expand)
}
-fn expand(cx: &mut ExtCtxt, _: codemap::Span, _: &[ast::TokenTree]) -> Box<MacResult+'static> {
+fn expand(cx: &mut ExtCtxt, _: syntax_pos::Span, _: &[tokenstream::TokenTree])
+ -> Box<MacResult+'static> {
MacEager::items(SmallVector::many(vec![
quote_item!(cx, struct Struct1;).unwrap(),
quote_item!(cx, struct Struct2;).unwrap()
extern crate syntax;
extern crate rustc;
extern crate rustc_plugin;
+extern crate syntax_pos;
-use syntax::ast::{self, TokenTree, Item, MetaItem, ImplItem, TraitItem, ItemKind};
-use syntax::codemap::Span;
+use syntax::ast::{self, Item, MetaItem, ImplItem, TraitItem, ItemKind};
use syntax::ext::base::*;
use syntax::parse::{self, token};
use syntax::ptr::P;
+use syntax::tokenstream::TokenTree;
+use syntax_pos::Span;
use rustc_plugin::Registry;
#[macro_export]
#![feature(box_syntax, rustc_private)]
extern crate syntax;
+extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
use std::borrow::ToOwned;
use syntax::ast;
-use syntax::codemap::Span;
use syntax::ext::build::AstBuilder;
use syntax::ext::base::{TTMacroExpander, ExtCtxt, MacResult, MacEager, NormalTT};
use syntax::parse::token;
use syntax::print::pprust;
use syntax::ptr::P;
+use syntax_pos::Span;
+use syntax::tokenstream;
use rustc_plugin::Registry;
struct Expander {
fn expand<'cx>(&self,
ecx: &'cx mut ExtCtxt,
sp: Span,
- _: &[ast::TokenTree]) -> Box<MacResult+'cx> {
+ _: &[tokenstream::TokenTree]) -> Box<MacResult+'cx> {
let args = self.args.iter().map(|i| pprust::meta_item_to_string(&*i))
.collect::<Vec<_>>().join(", ");
let interned = token::intern_and_get_ident(&args[..]);
#![feature(plugin_registrar, quote, rustc_private)]
extern crate syntax;
+extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
-use syntax::codemap::Span;
use syntax::parse::token::{self, str_to_ident, NtExpr, NtPat};
-use syntax::ast::{TokenTree, Pat};
+use syntax::ast::{Pat};
+use syntax::tokenstream::{TokenTree};
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
use syntax::ext::build::AstBuilder;
use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
use syntax::ext::tt::macro_parser::{Success, Failure, Error};
use syntax::ptr::P;
+use syntax_pos::Span;
use rustc_plugin::Registry;
fn expand_mbe_matches(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
#![feature(slice_patterns)]
extern crate syntax;
+extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
-use syntax::codemap::Span;
-use syntax::ast::TokenTree;
use syntax::parse::token;
+use syntax::tokenstream::TokenTree;
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
use syntax::ext::build::AstBuilder; // trait for expr_usize
+use syntax_pos::Span;
use rustc_plugin::Registry;
// WARNING WARNING WARNING WARNING WARNING
extern crate syntax_extension_with_dll_deps_1 as other;
extern crate syntax;
+extern crate syntax_pos;
extern crate rustc;
extern crate rustc_plugin;
-use syntax::ast::{TokenTree, Item, MetaItem};
-use syntax::codemap::Span;
+use syntax::ast::{Item, MetaItem};
use syntax::ext::base::*;
+use syntax::tokenstream::TokenTree;
+use syntax_pos::Span;
use rustc_plugin::Registry;
#[plugin_registrar]
extern crate rustc;
extern crate rustc_driver;
extern crate syntax;
+extern crate rustc_errors as errors;
use rustc::session::Session;
use rustc::session::config::{self, Input};
use rustc_driver::{driver, CompilerCalls, Compilation};
-use syntax::{diagnostics, errors};
use std::path::PathBuf;
fn early_callback(&mut self,
_: &getopts::Matches,
_: &config::Options,
- _: &diagnostics::registry::Registry,
+ _: &errors::registry::Registry,
_: config::ErrorOutputType)
-> Compilation {
self.count *= 2;
_: &config::Options,
_: &Option<PathBuf>,
_: &Option<PathBuf>,
- _: &diagnostics::registry::Registry)
+ _: &errors::registry::Registry)
-> Option<(Input, Option<PathBuf>)> {
panic!("This shouldn't happen");
}
#![feature(quote, rustc_private)]
extern crate syntax;
+extern crate syntax_pos;
-use syntax::codemap::DUMMY_SP;
use syntax::print::pprust::*;
use syntax::parse::token::intern;
+use syntax_pos::DUMMY_SP;
fn main() {
let ps = syntax::parse::ParseSess::new();
use syntax::parse::PResult;
fn syntax_extension(cx: &ExtCtxt) {
- let e_toks : Vec<syntax::ast::TokenTree> = quote_tokens!(cx, 1 + 2);
- let p_toks : Vec<syntax::ast::TokenTree> = quote_tokens!(cx, (x, 1 .. 4, *));
+ let e_toks : Vec<syntax::tokenstream::TokenTree> = quote_tokens!(cx, 1 + 2);
+ let p_toks : Vec<syntax::tokenstream::TokenTree> = quote_tokens!(cx, (x, 1 .. 4, *));
let a: P<syntax::ast::Expr> = quote_expr!(cx, 1 + 2);
let _b: Option<P<syntax::ast::Item>> = quote_item!(cx, static foo : isize = $e_toks; );
let _l: P<syntax::ast::Ty> = quote_ty!(cx, &isize);
- let _m: Vec<syntax::ast::TokenTree> = quote_matcher!(cx, $($foo:tt,)* bar);
+ let _m: Vec<syntax::tokenstream::TokenTree> = quote_matcher!(cx, $($foo:tt,)* bar);
let _n: syntax::ast::Attribute = quote_attr!(cx, #![cfg(foo, bar = "baz")]);
let _o: Option<P<syntax::ast::Item>> = quote_item!(cx, fn foo<T: ?Sized>() {});
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Issue #34183
+
+macro_rules! foo {
+ () => {
+ fn foo() { }
+ }
+}
+
+macro_rules! bar {
+ () => {
+ fn bar();
+ }
+}
+
+trait Bleh {
+ foo!();
+ bar!();
+}
+
+struct Test;
+
+impl Bleh for Test {
+ fn bar() {}
+}
+
+fn main() {
+ Test::bar();
+ Test::foo();
+}
use runtest::{ProcRes};
// These structs are a subset of the ones found in
-// `syntax::errors::json`.
+// `syntax::json`.
#[derive(RustcEncodable, RustcDecodable)]
struct Diagnostic {