}
// Converts a library file-stem into a cc -l argument
- fn unlib(config: @session::Config, stem: &str) -> ~str {
- if stem.starts_with("lib") &&
- config.os != abi::OsWin32 {
+ fn unlib(config: &session::Config, stem: &str) -> ~str {
+ if stem.starts_with("lib") && config.os != abi::OsWin32 {
stem.slice(3, stem.len()).to_owned()
} else {
stem.to_owned()
// what its name is
let dir = cratepath.dirname_str().unwrap();
if !dir.is_empty() { args.push("-L" + dir); }
- let libarg = unlib(sess.targ_cfg, cratepath.filestem_str().unwrap());
+ let libarg = unlib(&sess.targ_cfg, cratepath.filestem_str().unwrap());
args.push("-l" + libarg);
}
}
use middle;
use util::common::time;
use util::ppaux;
-use util::nodemap::NodeSet;
+use util::nodemap::{NodeMap, NodeSet};
use serialize::{json, Encodable};
use std::os;
use std::vec_ng::Vec;
use std::vec_ng;
-use collections::HashMap;
use getopts::{optopt, optmulti, optflag, optflagopt};
use getopts;
use syntax::ast;
pub fn source_name(input: &Input) -> ~str {
match *input {
- // FIXME (#9639): This needs to handle non-utf8 paths
- FileInput(ref ifile) => ifile.as_str().unwrap().to_str(),
- StrInput(_) => anon_src()
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ FileInput(ref ifile) => ifile.as_str().unwrap().to_str(),
+ StrInput(_) => anon_src()
}
}
-> (ast::Crate, syntax::ast_map::Map) {
let time_passes = sess.time_passes();
- sess.building_library.set(session::building_library(sess.opts, &krate));
+ sess.building_library.set(session::building_library(&sess.opts, &krate));
sess.crate_types.set(session::collect_crate_types(sess,
krate.attrs
.as_slice()));
sess.macro_registrar_fn.with_mut(|r| *r =
time(time_passes, "looking for macro registrar", (), |_|
syntax::ext::registrar::find_macro_registrar(
- sess.span_diagnostic, krate)));
+ sess.diagnostic(), krate)));
let freevars = time(time_passes, "freevar finding", (), |_|
freevars::annotate_freevars(def_map, krate));
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
- let files: Vec<~str> = {
- let files = sess.codemap.files.borrow();
- files.get()
- .iter()
- .filter_map(|fmap| {
- if fmap.is_real_file() {
- Some(fmap.name.clone())
- } else {
- None
- }
- })
- .collect()
- };
+ let files: Vec<~str> = sess.codemap().files.borrow().get()
+ .iter().filter_map(|fmap| {
+ if fmap.deref().is_real_file() {
+ Some(fmap.deref().name.clone())
+ } else {
+ None
+ }
+ }).collect();
let mut file = try!(io::File::create(&deps_filename));
for path in out_filenames.iter() {
try!(write!(&mut file as &mut Writer,
("mips", abi::Mips)];
-pub fn build_target_config(sopts: @session::Options)
- -> @session::Config {
+pub fn build_target_config(sopts: &session::Options) -> session::Config {
let os = match get_os(sopts.target_triple) {
Some(os) => os,
None => early_error("unknown operating system")
abi::Arm => arm::get_target_strs(target_triple, os),
abi::Mips => mips::get_target_strs(target_triple, os)
};
- let target_cfg = @session::Config {
+ session::Config {
os: os,
arch: arch,
target_strs: target_strs,
int_type: int_type,
uint_type: uint_type,
- };
- return target_cfg;
+ }
}
pub fn host_triple() -> ~str {
matches.opt_present("crate-file-name"));
let cg = build_codegen_options(matches);
- @session::Options {
+ session::Options {
crate_types: crate_types,
gc: gc,
optimize: opt_level,
return cg;
}
-pub fn build_session(sopts: @session::Options,
+pub fn build_session(sopts: session::Options,
local_crate_source_file: Option<Path>)
-> Session {
- let codemap = @codemap::CodeMap::new();
+ let codemap = codemap::CodeMap::new();
let diagnostic_handler =
diagnostic::default_handler();
let span_diagnostic_handler =
diagnostic::mk_span_handler(diagnostic_handler, codemap);
- build_session_(sopts, local_crate_source_file, codemap, span_diagnostic_handler)
+ build_session_(sopts, local_crate_source_file, span_diagnostic_handler)
}
-pub fn build_session_(sopts: @session::Options,
+pub fn build_session_(sopts: session::Options,
local_crate_source_file: Option<Path>,
- codemap: @codemap::CodeMap,
- span_diagnostic_handler: @diagnostic::SpanHandler)
+ span_diagnostic: diagnostic::SpanHandler)
-> Session {
- let target_cfg = build_target_config(sopts);
- let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler, codemap);
+ let target_cfg = build_target_config(&sopts);
+ let p_s = parse::new_parse_sess_special_handler(span_diagnostic);
let default_sysroot = match sopts.maybe_sysroot {
Some(_) => None,
None => Some(filesearch::get_or_default_sysroot())
opts: sopts,
cstore: CStore::new(token::get_ident_interner()),
parse_sess: p_s,
- codemap: codemap,
// For a library crate, this is always none
entry_fn: RefCell::new(None),
entry_type: Cell::new(None),
macro_registrar_fn: RefCell::new(None),
- span_diagnostic: span_diagnostic_handler,
default_sysroot: default_sysroot,
building_library: Cell::new(false),
local_crate_source_file: local_crate_source_file,
working_dir: os::getcwd(),
- lints: RefCell::new(HashMap::new()),
+ lints: RefCell::new(NodeMap::new()),
node_id: Cell::new(1),
- crate_types: @RefCell::new(Vec::new()),
+ crate_types: RefCell::new(Vec::new()),
features: front::feature_gate::Features::new(),
recursion_limit: Cell::new(64),
}
use metadata::filesearch;
use metadata;
use middle::lint;
+use util::nodemap::NodeMap;
use syntax::attr::AttrMetaMethods;
use syntax::ast::NodeId;
use std::cell::{Cell, RefCell};
use std::vec_ng::Vec;
-use collections::{HashMap,HashSet};
+use collections::HashSet;
pub struct Config {
os: abi::Os,
}
pub struct Session {
- targ_cfg: @Config,
- opts: @Options,
+ targ_cfg: Config,
+ opts: Options,
cstore: metadata::cstore::CStore,
parse_sess: ParseSess,
- codemap: @codemap::CodeMap,
// For a library crate, this is always none
entry_fn: RefCell<Option<(NodeId, codemap::Span)>>,
entry_type: Cell<Option<EntryFnType>>,
- span_diagnostic: @diagnostic::SpanHandler,
macro_registrar_fn: RefCell<Option<ast::DefId>>,
default_sysroot: Option<Path>,
building_library: Cell<bool>,
// expected to be absolute. `None` means that there is no source file.
local_crate_source_file: Option<Path>,
working_dir: Path,
- lints: RefCell<HashMap<ast::NodeId,
- Vec<(lint::Lint, codemap::Span, ~str)> >>,
+ lints: RefCell<NodeMap<Vec<(lint::Lint, codemap::Span, ~str)>>>,
node_id: Cell<ast::NodeId>,
- crate_types: @RefCell<Vec<CrateType> >,
+ crate_types: RefCell<Vec<CrateType>>,
features: front::feature_gate::Features,
/// The maximum recursion limit for potentially infinitely recursive
impl Session {
pub fn span_fatal(&self, sp: Span, msg: &str) -> ! {
- self.span_diagnostic.span_fatal(sp, msg)
+ self.diagnostic().span_fatal(sp, msg)
}
pub fn fatal(&self, msg: &str) -> ! {
- self.span_diagnostic.handler().fatal(msg)
+ self.diagnostic().handler().fatal(msg)
}
pub fn span_err(&self, sp: Span, msg: &str) {
- self.span_diagnostic.span_err(sp, msg)
+ self.diagnostic().span_err(sp, msg)
}
pub fn err(&self, msg: &str) {
- self.span_diagnostic.handler().err(msg)
+ self.diagnostic().handler().err(msg)
}
pub fn err_count(&self) -> uint {
- self.span_diagnostic.handler().err_count()
+ self.diagnostic().handler().err_count()
}
pub fn has_errors(&self) -> bool {
- self.span_diagnostic.handler().has_errors()
+ self.diagnostic().handler().has_errors()
}
pub fn abort_if_errors(&self) {
- self.span_diagnostic.handler().abort_if_errors()
+ self.diagnostic().handler().abort_if_errors()
}
pub fn span_warn(&self, sp: Span, msg: &str) {
- self.span_diagnostic.span_warn(sp, msg)
+ self.diagnostic().span_warn(sp, msg)
}
pub fn warn(&self, msg: &str) {
- self.span_diagnostic.handler().warn(msg)
+ self.diagnostic().handler().warn(msg)
}
pub fn span_note(&self, sp: Span, msg: &str) {
- self.span_diagnostic.span_note(sp, msg)
+ self.diagnostic().span_note(sp, msg)
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
- self.span_diagnostic.span_end_note(sp, msg)
+ self.diagnostic().span_end_note(sp, msg)
}
pub fn note(&self, msg: &str) {
- self.span_diagnostic.handler().note(msg)
+ self.diagnostic().handler().note(msg)
}
pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
- self.span_diagnostic.span_bug(sp, msg)
+ self.diagnostic().span_bug(sp, msg)
}
pub fn bug(&self, msg: &str) -> ! {
- self.span_diagnostic.handler().bug(msg)
+ self.diagnostic().handler().bug(msg)
}
pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
- self.span_diagnostic.span_unimpl(sp, msg)
+ self.diagnostic().span_unimpl(sp, msg)
}
pub fn unimpl(&self, msg: &str) -> ! {
- self.span_diagnostic.handler().unimpl(msg)
+ self.diagnostic().handler().unimpl(msg)
}
pub fn add_lint(&self,
lint: lint::Lint,
v
}
- pub fn diagnostic(&self) -> @diagnostic::SpanHandler {
- self.span_diagnostic
+ pub fn diagnostic<'a>(&'a self) -> &'a diagnostic::SpanHandler {
+ &self.parse_sess.span_diagnostic
}
pub fn debugging_opt(&self, opt: u64) -> bool {
(self.opts.debugging_opts & opt) != 0
}
+ pub fn codemap<'a>(&'a self) -> &'a codemap::CodeMap {
+ &self.parse_sess.span_diagnostic.cm
+ }
// This exists to help with refactoring to eliminate impossible
// cases later on
pub fn impossible_case(&self, sp: Span, msg: &str) -> ! {
}
/// Some reasonable defaults
-pub fn basic_options() -> @Options {
- @Options {
+pub fn basic_options() -> Options {
+ Options {
crate_types: Vec::new(),
gc: false,
optimize: No,
d::parse_pretty(&sess, a)
});
match pretty {
- Some::<d::PpMode>(ppm) => {
- d::pretty_print_input(sess, cfg, &input, ppm);
- return;
- }
- None::<d::PpMode> => {/* continue */ }
+ Some::<d::PpMode>(ppm) => {
+ d::pretty_print_input(sess, cfg, &input, ppm);
+ return;
+ }
+ None::<d::PpMode> => {/* continue */ }
}
let ls = matches.opt_present("ls");
if ls {
match input {
- d::FileInput(ref ifile) => {
- let mut stdout = io::stdout();
- d::list_metadata(&sess, &(*ifile), &mut stdout).unwrap();
- }
- d::StrInput(_) => {
- d::early_error("can not list metadata for stdin");
- }
+ d::FileInput(ref ifile) => {
+ let mut stdout = io::stdout();
+ d::list_metadata(&sess, &(*ifile), &mut stdout).unwrap();
+ }
+ d::StrInput(_) => {
+ d::early_error("can not list metadata for stdin");
+ }
}
return;
}
- let (crate_id, crate_name, crate_file_name) = sopts.print_metas;
+ let (crate_id, crate_name, crate_file_name) = sess.opts.print_metas;
// these nasty nested conditions are to avoid doing extra work
if crate_id || crate_name || crate_file_name {
let attrs = parse_crate_attrs(&sess, &input);
}
fn warn_if_multiple_versions(e: &mut Env,
- diag: @SpanHandler,
+ diag: &SpanHandler,
crate_cache: &[cache_entry]) {
if crate_cache.len() != 0u {
let name = crate_cache[crate_cache.len() - 1].crate_id.name.clone();
let cstore = &tcx.sess.cstore;
let cdata = cstore.get_crate_data(class_id.krate);
let all_items = reader::get_doc(reader::Doc(cdata.data()), tag_items);
- let class_doc = expect(tcx.diag,
+ let class_doc = expect(tcx.sess.diagnostic(),
decoder::maybe_find_item(class_id.node, all_items),
|| format!("get_field_type: class ID {:?} not found",
class_id) );
- let the_field = expect(tcx.diag,
+ let the_field = expect(tcx.sess.diagnostic(),
decoder::maybe_find_item(def.node, class_doc),
|| format!("get_field_type: in class {:?}, field ID {:?} not found",
class_id, def) );
ii: InlinedItemRef|;
pub struct EncodeParams<'a> {
- diag: @SpanHandler,
+ diag: &'a SpanHandler,
tcx: &'a ty::ctxt,
reexports2: middle::resolve::ExportMap2,
item_symbols: &'a RefCell<NodeMap<~str>>,
}
pub struct EncodeContext<'a> {
- diag: @SpanHandler,
+ diag: &'a SpanHandler,
tcx: &'a ty::ctxt,
stats: @Stats,
reexports2: middle::resolve::ExportMap2,
}
debug!("encoding info for item at {}",
- ecx.tcx.sess.codemap.span_to_str(item.span));
+ ecx.tcx.sess.codemap().span_to_str(item.span));
let def_id = local_def(item.id);
match item.node {
fn visit_item(&mut self, item: &Item, _: ()) {
match item.node {
ItemMac(..) => {
- let def = self.ecx.tcx.sess.codemap.span_to_snippet(item.span)
+ let def = self.ecx.tcx.sess.codemap().span_to_snippet(item.span)
.expect("Unable to find source for macro");
self.ebml_w.start_tag(tag_macro_def);
self.ebml_w.wr_str(def);
// Get the encoded string for a type
pub fn encoded_ty(tcx: &ty::ctxt, t: ty::t) -> ~str {
let cx = &tyencode::ctxt {
- diag: tcx.diag,
+ diag: tcx.sess.diagnostic(),
ds: def_to_str,
tcx: tcx,
abbrevs: tyencode::ac_no_abbrevs};
}
}
-pub fn note_crateid_attr(diag: @SpanHandler, crateid: &CrateId) {
+pub fn note_crateid_attr(diag: &SpanHandler, crateid: &CrateId) {
diag.handler().note(format!("crate_id: {}", crateid.to_str()));
}
use syntax::ast::*;
use syntax::diagnostic::SpanHandler;
use syntax::parse::token;
-use syntax::print::pprust::*;
macro_rules! mywrite( ($wr:expr, $($arg:tt)*) => (
format_args!(|a| { mywrite($wr, a) }, $($arg)*)
) )
pub struct ctxt<'a> {
- diag: @SpanHandler,
+ diag: &'a SpanHandler,
// Def -> str Callback:
- ds: extern "Rust" fn(DefId) -> ~str,
+ ds: fn(DefId) -> ~str,
// The type context.
tcx: &'a ty::ctxt,
abbrevs: abbrev_ctxt
}
fn live_node_kind_to_str(lnk: LiveNodeKind, cx: &ty::ctxt) -> ~str {
- let cm = cx.sess.codemap;
+ let cm = cx.sess.codemap();
match lnk {
FreeVarNode(s) => format!("Free var node [{}]", cm.span_to_str(s)),
ExprNode(s) => format!("Expr node [{}]", cm.span_to_str(s)),
body.id={}, \
cx.parent={})",
id,
- visitor.sess.codemap.span_to_str(sp),
+ visitor.sess.codemap().span_to_str(sp),
body.id,
cx.parent);
let import_count = imports.get().len();
if index != import_count {
let sn = self.session
- .codemap
+ .codemap()
.span_to_snippet(imports.get().get(index).span)
.unwrap();
if sn.contains("::") {
// public or private item, we will check the correct thing, dependent on how the import
// is used.
fn finalize_import(&mut self, id: NodeId, span: Span) {
- debug!("finalizing import uses for {}", self.session.codemap.span_to_snippet(span));
+ debug!("finalizing import uses for {}", self.session.codemap().span_to_snippet(span));
if !self.used_imports.contains(&(id, TypeNS)) &&
!self.used_imports.contains(&(id, ValueNS)) {
pub fn add_span_comment(&self, sp: Span, text: &str) {
if self.ccx.sess().asm_comments() {
- let s = format!("{} ({})", text, self.ccx.sess().codemap.span_to_str(sp));
+ let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_str(sp));
debug!("{}", s);
self.add_comment(s);
}
pub fn filename_and_line_num_from_span(bcx: &Block, span: Span)
-> (ValueRef, ValueRef) {
- let loc = bcx.sess().parse_sess.cm.lookup_char_pos(span.lo);
+ let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
let filename_cstr = C_cstr(bcx.ccx(),
- token::intern_and_get_ident(loc.file.name));
+ token::intern_and_get_ident(loc.file.deref().name));
let filename = build::PointerCast(bcx, filename_cstr, Type::i8p(bcx.ccx()));
let line = C_int(bcx.ccx(), loc.line as int);
(filename, line)
let metadata_llmod = format!("{}_metadata", name).with_c_str(|buf| {
llvm::LLVMModuleCreateWithNameInContext(buf, llcx)
});
- let data_layout: &str = tcx.sess.targ_cfg.target_strs.data_layout;
- let targ_triple: &str = tcx.sess.targ_cfg.target_strs.target_triple;
- data_layout.with_c_str(|buf| {
+ tcx.sess.targ_cfg.target_strs.data_layout.with_c_str(|buf| {
llvm::LLVMSetDataLayout(llmod, buf);
llvm::LLVMSetDataLayout(metadata_llmod, buf);
});
- targ_triple.with_c_str(|buf| {
+ tcx.sess.targ_cfg.target_strs.target_triple.with_c_str(|buf| {
llvm::LLVMRustSetNormalizedTarget(llmod, buf);
llvm::LLVMRustSetNormalizedTarget(metadata_llmod, buf);
});
sp: Span,
fail_str: InternedString)
-> &'a Block<'a> {
- let v_fail_str = C_cstr(bcx.ccx(), fail_str);
- let _icx = push_ctxt("trans_fail_value");
let ccx = bcx.ccx();
- let sess = bcx.sess();
- let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo);
- let v_filename = C_cstr(bcx.ccx(),
- token::intern_and_get_ident(loc.file.name));
+ let v_fail_str = C_cstr(ccx, fail_str);
+ let _icx = push_ctxt("trans_fail_value");
+ let loc = bcx.sess().codemap().lookup_char_pos(sp.lo);
+ let v_filename = C_cstr(ccx, token::intern_and_get_ident(loc.file.deref().name));
let v_line = loc.line as int;
let v_str = PointerCast(bcx, v_fail_str, Type::i8p(ccx));
let v_filename = PointerCast(bcx, v_filename, Type::i8p(ccx));
let cx = fcx.ccx;
- debug!("set_source_location: {}", cx.sess().codemap.span_to_str(span));
+ debug!("set_source_location: {}", cx.sess().codemap().span_to_str(span));
if fcx.debug_context.get_ref(cx, span).source_locations_enabled.get() {
let loc = span_start(cx, span);
}
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.deref().name);
let function_type_metadata = unsafe {
let fn_signature = get_function_signature(cx, fn_ast_id, fn_decl, param_substs, span);
span: Span) {
let cx: &CrateContext = bcx.ccx();
- let filename = span_start(cx, span).file.name.clone();
+ let filename = span_start(cx, span).file.deref().name.clone();
let file_metadata = file_metadata(cx, filename);
let name = token::get_ident(variable_ident);
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
- let file_name = span_start(cx, definition_span).file.name.clone();
+ let file_name = span_start(cx, definition_span).file.deref().name.clone();
let file_metadata = file_metadata(cx, file_name);
let struct_metadata_stub = create_struct_stub(cx,
let tuple_llvm_type = type_of::type_of(cx, tuple_type);
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.deref().name);
UnfinishedMetadata {
cache_id: cache_id_for_type(tuple_type),
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id);
let loc = span_start(cx, definition_span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.deref().name);
// For empty enums there is an early exit. Just describe it as an empty struct with the
// appropriate type name
];
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.deref().name);
return composite_type_metadata(
cx,
assert!(member_descriptions.len() == member_llvm_types.len());
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.deref().name);
- return composite_type_metadata(
+ composite_type_metadata(
cx,
vec_llvm_type,
vec_type_name,
member_descriptions,
file_metadata,
file_metadata,
- span);
+ span)
}
fn vec_slice_metadata(cx: &CrateContext,
assert!(member_descriptions.len() == member_llvm_types.len());
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.deref().name);
return composite_type_metadata(
cx,
span: Span)
-> DICompositeType {
let loc = span_start(cx, span);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let file_metadata = file_metadata(cx, loc.file.deref().name);
let mut signature_metadata: Vec<DIType> =
Vec::with_capacity(signature.inputs.len() + 1);
let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, def_id);
- let file_name = span_start(cx, definition_span).file.name.clone();
+ let file_name = span_start(cx, definition_span).file.deref().name.clone();
let file_metadata = file_metadata(cx, file_name);
let trait_llvm_type = type_of::type_of(cx, trait_type);
/// Return codemap::Loc corresponding to the beginning of the span
fn span_start(cx: &CrateContext, span: Span) -> codemap::Loc {
- cx.sess().codemap.lookup_char_pos(span.lo)
+ cx.sess().codemap().lookup_char_pos(span.lo)
}
fn size_and_align_of(cx: &CrateContext, llvm_type: Type) -> (u64, u64) {
&mut Vec<ScopeStackEntry> ,
&mut HashMap<ast::NodeId, DIScope>|) {
// Create a new lexical scope and push it onto the stack
- let loc = cx.sess().codemap.lookup_char_pos(scope_span.lo);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo);
+ let file_metadata = file_metadata(cx, loc.file.deref().name);
let parent_scope = scope_stack.last().unwrap().scope_metadata;
let scope_metadata = unsafe {
if need_new_scope {
// Create a new lexical scope and push it onto the stack
- let loc = cx.sess().codemap.lookup_char_pos(pat.span.lo);
- let file_metadata = file_metadata(cx, loc.file.name);
+ let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo);
+ let file_metadata = file_metadata(cx, loc.file.deref().name);
let parent_scope = scope_stack.last().unwrap().scope_metadata;
let scope_metadata = unsafe {
/// generates so that so that it can be reused and doesn't have to be redone
/// later on.
pub struct ctxt {
- diag: @syntax::diagnostic::SpanHandler,
// Specifically use a speedy hash algorithm for this hash map, it's used
// quite often.
interner: RefCell<FnvHashMap<intern_key, ~t_box_>>,
ctxt {
named_region_map: named_region_map,
item_variance_map: RefCell::new(DefIdMap::new()),
- diag: s.diagnostic(),
interner: RefCell::new(FnvHashMap::new()),
next_id: Cell::new(primitives::LAST_PRIMITIVE_ID),
sess: s,
};
fn explain_span(cx: &ctxt, heading: &str, span: Span)
- -> (~str, Option<Span>)
- {
- let lo = cx.sess.codemap.lookup_char_pos_adj(span.lo);
+ -> (~str, Option<Span>) {
+ let lo = cx.sess.codemap().lookup_char_pos_adj(span.lo);
(format!("the {} at {}:{}", heading,
lo.line, lo.col.to_uint()), Some(span))
}
match cx.map.find(node_id) {
Some(ast_map::NodeBlock(ref blk)) => {
format!("<block at {}>",
- cx.sess.codemap.span_to_str(blk.span))
+ cx.sess.codemap().span_to_str(blk.span))
}
Some(ast_map::NodeExpr(expr)) => {
match expr.node {
ast::ExprCall(..) => {
format!("<call at {}>",
- cx.sess.codemap.span_to_str(expr.span))
+ cx.sess.codemap().span_to_str(expr.span))
}
ast::ExprMatch(..) => {
format!("<match at {}>",
- cx.sess.codemap.span_to_str(expr.span))
+ cx.sess.codemap().span_to_str(expr.span))
}
ast::ExprAssignOp(..) |
ast::ExprUnary(..) |
ast::ExprBinary(..) |
ast::ExprIndex(..) => {
format!("<method at {}>",
- cx.sess.codemap.span_to_str(expr.span))
+ cx.sess.codemap().span_to_str(expr.span))
}
_ => {
format!("<expression at {}>",
- cx.sess.codemap.span_to_str(expr.span))
+ cx.sess.codemap().span_to_str(expr.span))
}
}
}
impl Repr for Span {
fn repr(&self, tcx: &ctxt) -> ~str {
- tcx.sess.codemap.span_to_str(*self)
+ tcx.sess.codemap().span_to_str(*self)
}
}
None
}
-pub fn require_unique_names(diagnostic: @SpanHandler, metas: &[@MetaItem]) {
+pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[@MetaItem]) {
let mut set = HashSet::new();
for meta in metas.iter() {
let name = meta.name();
* present (before fields, if any) with that type; reprensentation
* optimizations which would remove it will not be done.
*/
-pub fn find_repr_attr(diagnostic: @SpanHandler, attr: @ast::MetaItem, acc: ReprAttr)
+pub fn find_repr_attr(diagnostic: &SpanHandler, attr: @ast::MetaItem, acc: ReprAttr)
-> ReprAttr {
let mut acc = acc;
match attr.node {
// Not a "repr" hint: ignore.
_ => { }
}
- return acc;
+ acc
}
fn int_type_of_word(s: &str) -> Option<IntType> {
use std::cell::RefCell;
use std::cmp;
+use std::rc::Rc;
use std::vec_ng::Vec;
use serialize::{Encodable, Decodable, Encoder, Decoder};
/// A source code location used for error reporting
pub struct Loc {
/// Information about the original source
- file: @FileMap,
+ file: Rc<FileMap>,
/// The (1-based) line number
line: uint,
/// The (0-based) column offset
filename: FileName,
line: uint,
col: CharPos,
- file: Option<@FileMap>,
+ file: Option<Rc<FileMap>>,
}
// used to be structural records. Better names, anyone?
-pub struct FileMapAndLine {fm: @FileMap, line: uint}
-pub struct FileMapAndBytePos {fm: @FileMap, pos: BytePos}
+pub struct FileMapAndLine {fm: Rc<FileMap>, line: uint}
+pub struct FileMapAndBytePos {fm: Rc<FileMap>, pos: BytePos}
#[deriving(Clone, Hash, Show)]
pub enum MacroFormat {
pub type FileName = ~str;
-pub struct FileLines
-{
- file: @FileMap,
- lines: Vec<uint> }
+pub struct FileLines {
+ file: Rc<FileMap>,
+ lines: Vec<uint>
+}
/// Identifies an offset of a multi-byte character in a FileMap
pub struct MultiByteChar {
pos: pos,
bytes: bytes,
};
- let mut multibyte_chars = self.multibyte_chars.borrow_mut();
- multibyte_chars.get().push(mbc);
+ self.multibyte_chars.borrow_mut().get().push(mbc);
}
pub fn is_real_file(&self) -> bool {
}
pub struct CodeMap {
- files: RefCell<Vec<@FileMap> >
+ files: RefCell<Vec<Rc<FileMap>>>
}
impl CodeMap {
}
}
- pub fn new_filemap(&self, filename: FileName, mut src: ~str) -> @FileMap {
+ pub fn new_filemap(&self, filename: FileName, mut src: ~str) -> Rc<FileMap> {
let mut files = self.files.borrow_mut();
let start_pos = match files.get().last() {
None => 0,
- Some(last) => last.start_pos.to_uint() + last.src.len(),
+ Some(last) => last.deref().start_pos.to_uint() + last.deref().src.len(),
};
// Append '\n' in case it's not already there.
src.push_char('\n');
}
- let filemap = @FileMap {
+ let filemap = Rc::new(FileMap {
name: filename,
src: src,
start_pos: Pos::from_uint(start_pos),
lines: RefCell::new(Vec::new()),
multibyte_chars: RefCell::new(Vec::new()),
- };
+ });
- files.get().push(filemap);
+ files.get().push(filemap.clone());
- return filemap;
+ filemap
}
pub fn mk_substr_filename(&self, sp: Span) -> ~str {
let pos = self.lookup_char_pos(sp.lo);
- return format!("<{}:{}:{}>", pos.file.name,
- pos.line, pos.col.to_uint() + 1)
+ format!("<{}:{}:{}>", pos.file.deref().name, pos.line, pos.col.to_uint() + 1)
}
/// Lookup source information about a BytePos
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
- return self.lookup_pos(pos);
+ self.lookup_pos(pos)
}
pub fn lookup_char_pos_adj(&self, pos: BytePos) -> LocWithOpt {
let loc = self.lookup_char_pos(pos);
LocWithOpt {
- filename: loc.file.name.to_str(),
+ filename: loc.file.deref().name.to_str(),
line: loc.line,
col: loc.col,
file: Some(loc.file)
}
pub fn span_to_str(&self, sp: Span) -> ~str {
- {
- let files = self.files.borrow();
- if files.get().len() == 0 && sp == DUMMY_SP {
- return ~"no-location";
- }
+ if self.files.borrow().get().len() == 0 && sp == DUMMY_SP {
+ return ~"no-location";
}
let lo = self.lookup_char_pos_adj(sp.lo);
}
pub fn span_to_filename(&self, sp: Span) -> FileName {
- let lo = self.lookup_char_pos(sp.lo);
- lo.file.name.to_str()
+ self.lookup_char_pos(sp.lo).file.deref().name.to_str()
}
- pub fn span_to_lines(&self, sp: Span) -> @FileLines {
+ pub fn span_to_lines(&self, sp: Span) -> FileLines {
let lo = self.lookup_char_pos(sp.lo);
let hi = self.lookup_char_pos(sp.hi);
let mut lines = Vec::new();
for i in range(lo.line - 1u, hi.line as uint) {
lines.push(i);
};
- return @FileLines {file: lo.file, lines: lines};
+ FileLines {file: lo.file, lines: lines}
}
pub fn span_to_snippet(&self, sp: Span) -> Option<~str> {
// it's testing isn't true for all spans in the AST, so to allow the
// caller to not have to fail (and it can't catch it since the CodeMap
// isn't sendable), return None
- if begin.fm.start_pos != end.fm.start_pos {
+ if begin.fm.deref().start_pos != end.fm.deref().start_pos {
None
} else {
- Some(begin.fm.src.slice( begin.pos.to_uint(), end.pos.to_uint()).to_owned())
+ Some(begin.fm.deref().src.slice( begin.pos.to_uint(), end.pos.to_uint()).to_owned())
}
}
- pub fn get_filemap(&self, filename: &str) -> @FileMap {
- let files = self.files.borrow();
- for fm in files.get().iter() {
- if filename == fm.name {
- return *fm
+ pub fn get_filemap(&self, filename: &str) -> Rc<FileMap> {
+ for fm in self.files.borrow().get().iter() {
+ if filename == fm.deref().name {
+ return fm.clone();
}
}
- //XXjdm the following triggers a mismatched type bug
- // (or expected function, found _|_)
- fail!(); // ("asking for " + filename + " which we don't know about");
+ fail!("asking for {} which we don't know about", filename);
}
-}
-impl CodeMap {
fn lookup_filemap_idx(&self, pos: BytePos) -> uint {
let files = self.files.borrow();
let files = files.get();
let mut b = len;
while b - a > 1u {
let m = (a + b) / 2u;
- if files.get(m).start_pos > pos {
+ if files.get(m).deref().start_pos > pos {
b = m;
} else {
a = m;
// filemap, but are not the filemaps we want (because they are length 0, they cannot
// contain what we are looking for). So, rewind until we find a useful filemap.
loop {
- let lines = files.get(a).lines.borrow();
+ let lines = files.get(a).deref().lines.borrow();
let lines = lines.get();
if lines.len() > 0 {
break;
return a;
}
- fn lookup_line(&self, pos: BytePos) -> FileMapAndLine
- {
+ fn lookup_line(&self, pos: BytePos) -> FileMapAndLine {
let idx = self.lookup_filemap_idx(pos);
let files = self.files.borrow();
- let f = *files.get().get(idx);
+ let f = files.get().get(idx).clone();
let mut a = 0u;
- let mut lines = f.lines.borrow_mut();
- let mut b = lines.get().len();
- while b - a > 1u {
- let m = (a + b) / 2u;
- if *lines.get().get(m) > pos { b = m; } else { a = m; }
+ {
+ let mut lines = f.deref().lines.borrow_mut();
+ let mut b = lines.get().len();
+ while b - a > 1u {
+ let m = (a + b) / 2u;
+ if *lines.get().get(m) > pos { b = m; } else { a = m; }
+ }
}
- return FileMapAndLine {fm: f, line: a};
+ FileMapAndLine {fm: f, line: a}
}
fn lookup_pos(&self, pos: BytePos) -> Loc {
let FileMapAndLine {fm: f, line: a} = self.lookup_line(pos);
let line = a + 1u; // Line numbers start at 1
let chpos = self.bytepos_to_file_charpos(pos);
- let lines = f.lines.borrow();
- let linebpos = *lines.get().get(a);
+ let linebpos = *f.deref().lines.borrow().get().get(a);
let linechpos = self.bytepos_to_file_charpos(linebpos);
debug!("codemap: byte pos {:?} is on the line at byte pos {:?}",
pos, linebpos);
chpos, linechpos);
debug!("codemap: byte is on line: {:?}", line);
assert!(chpos >= linechpos);
- return Loc {
+ Loc {
file: f,
line: line,
col: chpos - linechpos
- };
+ }
}
- fn lookup_byte_offset(&self, bpos: BytePos)
- -> FileMapAndBytePos {
+ fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
let idx = self.lookup_filemap_idx(bpos);
- let files = self.files.borrow();
- let fm = *files.get().get(idx);
- let offset = bpos - fm.start_pos;
- return FileMapAndBytePos {fm: fm, pos: offset};
+ let fm = self.files.borrow().get().get(idx).clone();
+ let offset = bpos - fm.deref().start_pos;
+ FileMapAndBytePos {fm: fm, pos: offset}
}
// Converts an absolute BytePos to a CharPos relative to the filemap.
// The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0;
- let multibyte_chars = map.multibyte_chars.borrow();
- for mbc in multibyte_chars.get().iter() {
+ for mbc in map.deref().multibyte_chars.borrow().get().iter() {
debug!("codemap: {:?}-byte char at {:?}", mbc.bytes, mbc.pos);
if mbc.pos < bpos {
// every character is at least one byte, so we only
}
}
- assert!(map.start_pos.to_uint() + total_extra_bytes <= bpos.to_uint());
- CharPos(bpos.to_uint() - map.start_pos.to_uint() - total_extra_bytes)
+ assert!(map.deref().start_pos.to_uint() + total_extra_bytes <= bpos.to_uint());
+ CharPos(bpos.to_uint() - map.deref().start_pos.to_uint() - total_extra_bytes)
}
}
// accepts span information for source-location
// reporting.
pub struct SpanHandler {
- handler: @Handler,
- cm: @codemap::CodeMap,
+ handler: Handler,
+ cm: codemap::CodeMap,
}
impl SpanHandler {
pub fn span_fatal(&self, sp: Span, msg: &str) -> ! {
- self.handler.emit(Some((&*self.cm, sp)), msg, Fatal);
+ self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
fail!(FatalError);
}
pub fn span_err(&self, sp: Span, msg: &str) {
- self.handler.emit(Some((&*self.cm, sp)), msg, Error);
+ self.handler.emit(Some((&self.cm, sp)), msg, Error);
self.handler.bump_err_count();
}
pub fn span_warn(&self, sp: Span, msg: &str) {
- self.handler.emit(Some((&*self.cm, sp)), msg, Warning);
+ self.handler.emit(Some((&self.cm, sp)), msg, Warning);
}
pub fn span_note(&self, sp: Span, msg: &str) {
- self.handler.emit(Some((&*self.cm, sp)), msg, Note);
+ self.handler.emit(Some((&self.cm, sp)), msg, Note);
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
- self.handler.custom_emit(&*self.cm, sp, msg, Note);
+ self.handler.custom_emit(&self.cm, sp, msg, Note);
}
pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
- self.handler.emit(Some((&*self.cm, sp)), msg, Bug);
+ self.handler.emit(Some((&self.cm, sp)), msg, Bug);
fail!(ExplicitBug);
}
pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
self.span_bug(sp, ~"unimplemented " + msg);
}
- pub fn handler(&self) -> @Handler {
- self.handler
+ pub fn handler<'a>(&'a self) -> &'a Handler {
+ &self.handler
}
}
}
}
-pub fn mk_span_handler(handler: @Handler, cm: @codemap::CodeMap)
- -> @SpanHandler {
- @SpanHandler {
+pub fn mk_span_handler(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
+ SpanHandler {
handler: handler,
cm: cm,
}
}
-pub fn default_handler() -> @Handler {
+pub fn default_handler() -> Handler {
mk_handler(~EmitterWriter::stderr())
}
-pub fn mk_handler(e: ~Emitter) -> @Handler {
- @Handler {
+pub fn mk_handler(e: ~Emitter) -> Handler {
+ Handler {
err_count: Cell::new(0),
emit: RefCell::new(e),
}
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
- lines: &codemap::FileLines) -> io::IoResult<()> {
- let fm = lines.file;
+ lines: codemap::FileLines) -> io::IoResult<()> {
+ let fm = lines.file.deref();
let mut elided = false;
let mut display_lines = lines.lines.as_slice();
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
- lines: &codemap::FileLines) -> io::IoResult<()> {
- let fm = lines.file;
+ lines: codemap::FileLines) -> io::IoResult<()> {
+ let fm = lines.file.deref();
let lines = lines.lines.as_slice();
if lines.len() > MAX_LINES {
Ok(())
}
-pub fn expect<T:Clone>(diag: @SpanHandler, opt: Option<T>, msg: || -> ~str)
- -> T {
+pub fn expect<T:Clone>(diag: &SpanHandler, opt: Option<T>, msg: || -> ~str) -> T {
match opt {
Some(ref t) => (*t).clone(),
None => diag.handler().bug(msg()),
}
}
- pub fn codemap(&self) -> @CodeMap { self.parse_sess.cm }
+ pub fn codemap(&self) -> &'a CodeMap { &self.parse_sess.span_diagnostic.cm }
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
pub fn cfg(&self) -> ast::CrateConfig { self.cfg.clone() }
pub fn call_site(&self) -> Span {
vec!(
self.expr_str(span, msg),
self.expr_str(span,
- token::intern_and_get_ident(loc.file.name)),
+ token::intern_and_get_ident(loc.file.deref().name)),
self.expr_uint(span, loc.line)))
}
}
}
-pub fn find_macro_registrar(diagnostic: @diagnostic::SpanHandler,
+pub fn find_macro_registrar(diagnostic: &diagnostic::SpanHandler,
krate: &ast::Crate) -> Option<ast::DefId> {
let mut ctx = MacroRegistrarContext { registrars: Vec::new() };
visit::walk_crate(&mut ctx, krate, ());
let topmost = topmost_expn_info(cx.backtrace().unwrap());
let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);
- let filename = token::intern_and_get_ident(loc.file.name);
+ let filename = token::intern_and_get_ident(loc.file.deref().name);
base::MRExpr(cx.expr_str(topmost.call_site, filename))
}
// dependency information
let filename = file.display().to_str();
let interned = token::intern_and_get_ident(src);
- cx.parse_sess.cm.new_filemap(filename, src);
+ cx.codemap().new_filemap(filename, src);
base::MRExpr(cx.expr_str(sp, interned))
}
-> base::MacResult {
let sess = cx.parse_sess();
let cfg = cx.cfg();
- let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic,
+ let tt_rdr = new_tt_reader(&cx.parse_sess().span_diagnostic,
None,
tt.iter().map(|x| (*x).clone()).collect());
let mut rust_parser = Parser(sess, cfg.clone(), tt_rdr.dup());
let mut best_fail_spot = DUMMY_SP;
let mut best_fail_msg = ~"internal error: ran no matchers";
- let s_d = cx.parse_sess().span_diagnostic;
-
for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
match **lhs {
MatchedNonterminal(NtMatchers(ref mtcs)) => {
// `None` is because we're not interpolating
- let arg_rdr = new_tt_reader(s_d,
+ let arg_rdr = new_tt_reader(&cx.parse_sess().span_diagnostic,
None,
arg.iter()
.map(|x| (*x).clone())
_ => cx.span_bug(sp, "bad thing in rhs")
};
// rhs has holes ( `$id` and `$(...)` that need filled)
- let trncbr = new_tt_reader(s_d, Some(named_matches),
+ let trncbr = new_tt_reader(&cx.parse_sess().span_diagnostic,
+ Some(named_matches),
rhs);
let p = Parser(cx.parse_sess(), cx.cfg(), ~trncbr);
// Let the context choose how to interpret the result.
// Parse the macro_rules! invocation (`none` is for no interpolations):
- let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic,
+ let arg_reader = new_tt_reader(&cx.parse_sess().span_diagnostic,
None,
arg.clone());
let argument_map = parse_or_else(cx.parse_sess(),
up: Option<@TtFrame>,
}
-pub struct TtReader {
- sp_diag: @SpanHandler,
+pub struct TtReader<'a> {
+ sp_diag: &'a SpanHandler,
// the unzipped tree:
priv stack: RefCell<@TtFrame>,
/* for MBE-style macro transcription */
/** This can do Macro-By-Example transcription. On the other hand, if
* `src` contains no `TTSeq`s and `TTNonterminal`s, `interp` can (and
* should) be none. */
-pub fn new_tt_reader(sp_diag: @SpanHandler,
- interp: Option<HashMap<Ident, @NamedMatch>>,
- src: Vec<ast::TokenTree> )
- -> TtReader {
+pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
+ interp: Option<HashMap<Ident, @NamedMatch>>,
+ src: Vec<ast::TokenTree> )
+ -> TtReader<'a> {
let r = TtReader {
sp_diag: sp_diag,
stack: RefCell::new(@TtFrame {
cur_span: RefCell::new(DUMMY_SP),
};
tt_next_token(&r); /* get cur_tok and cur_span set up */
- return r;
+ r
}
fn dup_tt_frame(f: @TtFrame) -> @TtFrame {
}
}
-pub fn dup_tt_reader(r: &TtReader) -> TtReader {
+pub fn dup_tt_reader<'a>(r: &TtReader<'a>) -> TtReader<'a> {
TtReader {
sp_diag: r.sp_diag,
stack: RefCell::new(dup_tt_frame(r.stack.get())),
// it appears this function is called only from pprust... that's
// probably not a good thing.
pub fn gather_comments_and_literals(span_diagnostic:
- @diagnostic::SpanHandler,
+ &diagnostic::SpanHandler,
path: ~str,
srdr: &mut io::Reader)
- -> (Vec<Comment> , Vec<Literal> ) {
+ -> (Vec<Comment>, Vec<Literal>) {
let src = srdr.read_to_end().unwrap();
let src = str::from_utf8_owned(src).unwrap();
let cm = CodeMap::new();
use std::cell::{Cell, RefCell};
use std::char;
+use std::rc::Rc;
use std::mem::replace;
use std::num::from_str_radix;
fn is_eof(&self) -> bool;
fn next_token(&self) -> TokenAndSpan;
fn fatal(&self, ~str) -> !;
- fn span_diag(&self) -> @SpanHandler;
+ fn span_diag<'a>(&'a self) -> &'a SpanHandler;
fn peek(&self) -> TokenAndSpan;
fn dup(&self) -> ~Reader:;
}
sp: Span,
}
-pub struct StringReader {
- span_diagnostic: @SpanHandler,
+pub struct StringReader<'a> {
+ span_diagnostic: &'a SpanHandler,
// The absolute offset within the codemap of the next character to read
pos: Cell<BytePos>,
// The absolute offset within the codemap of the last character read(curr)
col: Cell<CharPos>,
// The last character to be read
curr: Cell<Option<char>>,
- filemap: @codemap::FileMap,
+ filemap: Rc<codemap::FileMap>,
/* cached: */
peek_tok: RefCell<token::Token>,
peek_span: RefCell<Span>,
}
-impl StringReader {
+impl<'a> StringReader<'a> {
pub fn curr_is(&self, c: char) -> bool {
self.curr.get() == Some(c)
}
}
-pub fn new_string_reader(span_diagnostic: @SpanHandler,
- filemap: @codemap::FileMap)
- -> StringReader {
+pub fn new_string_reader<'a>(span_diagnostic: &'a SpanHandler,
+ filemap: Rc<codemap::FileMap>)
+ -> StringReader<'a> {
let r = new_low_level_string_reader(span_diagnostic, filemap);
string_advance_token(&r); /* fill in peek_* */
r
}
/* For comments.rs, which hackily pokes into 'pos' and 'curr' */
-pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler,
- filemap: @codemap::FileMap)
- -> StringReader {
+pub fn new_low_level_string_reader<'a>(span_diagnostic: &'a SpanHandler,
+ filemap: Rc<codemap::FileMap>)
+ -> StringReader<'a> {
// Force the initial reader bump to start on a fresh line
let initial_char = '\n';
let r = StringReader {
span_diagnostic: span_diagnostic,
- pos: Cell::new(filemap.start_pos),
- last_pos: Cell::new(filemap.start_pos),
+ pos: Cell::new(filemap.deref().start_pos),
+ last_pos: Cell::new(filemap.deref().start_pos),
col: Cell::new(CharPos(0)),
curr: Cell::new(Some(initial_char)),
filemap: filemap,
// duplicating the string reader is probably a bad idea, in
// that using them will cause interleaved pushes of line
// offsets to the underlying filemap...
-fn dup_string_reader(r: &StringReader) -> StringReader {
+fn dup_string_reader<'a>(r: &StringReader<'a>) -> StringReader<'a> {
StringReader {
span_diagnostic: r.span_diagnostic,
pos: Cell::new(r.pos.get()),
last_pos: Cell::new(r.last_pos.get()),
col: Cell::new(r.col.get()),
curr: Cell::new(r.curr.get()),
- filemap: r.filemap,
+ filemap: r.filemap.clone(),
peek_tok: r.peek_tok.clone(),
peek_span: r.peek_span.clone(),
}
}
-impl Reader for StringReader {
+impl<'a> Reader for StringReader<'a> {
fn is_eof(&self) -> bool { is_eof(self) }
// return the next token. EFFECT: advances the string_reader.
fn next_token(&self) -> TokenAndSpan {
fn fatal(&self, m: ~str) -> ! {
self.span_diagnostic.span_fatal(self.peek_span.get(), m)
}
- fn span_diag(&self) -> @SpanHandler { self.span_diagnostic }
+ fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.span_diagnostic }
fn peek(&self) -> TokenAndSpan {
// FIXME(pcwalton): Bad copy!
TokenAndSpan {
fn dup(&self) -> ~Reader: { ~dup_string_reader(self) as ~Reader: }
}
-impl Reader for TtReader {
+impl<'a> Reader for TtReader<'a> {
fn is_eof(&self) -> bool {
let cur_tok = self.cur_tok.borrow();
*cur_tok.get() == token::EOF
fn fatal(&self, m: ~str) -> ! {
self.sp_diag.span_fatal(self.cur_span.get(), m);
}
- fn span_diag(&self) -> @SpanHandler { self.sp_diag }
+ fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.sp_diag }
fn peek(&self) -> TokenAndSpan {
TokenAndSpan {
tok: self.cur_tok.get(),
-> ! {
let mut m = m;
m.push_str(": ");
- let s = rdr.filemap.src.slice(
+ let s = rdr.filemap.deref().src.slice(
byte_offset(rdr, from_pos).to_uint(),
byte_offset(rdr, to_pos).to_uint());
m.push_str(s);
}
fn byte_offset(rdr: &StringReader, pos: BytePos) -> BytePos {
- (pos - rdr.filemap.start_pos)
+ (pos - rdr.filemap.deref().start_pos)
}
/// Calls `f` with a string slice of the source text spanning from `start`
end: BytePos,
f: |s: &str| -> T)
-> T {
- f(rdr.filemap.src.slice(
+ f(rdr.filemap.deref().src.slice(
byte_offset(rdr, start).to_uint(),
byte_offset(rdr, end).to_uint()))
}
pub fn bump(rdr: &StringReader) {
rdr.last_pos.set(rdr.pos.get());
let current_byte_offset = byte_offset(rdr, rdr.pos.get()).to_uint();
- if current_byte_offset < (rdr.filemap.src).len() {
+ if current_byte_offset < rdr.filemap.deref().src.len() {
assert!(rdr.curr.get().is_some());
let last_char = rdr.curr.get().unwrap();
- let next = rdr.filemap.src.char_range_at(current_byte_offset);
+ let next = rdr.filemap.deref().src.char_range_at(current_byte_offset);
let byte_offset_diff = next.next - current_byte_offset;
rdr.pos.set(rdr.pos.get() + Pos::from_uint(byte_offset_diff));
rdr.curr.set(Some(next.ch));
rdr.col.set(rdr.col.get() + CharPos(1u));
if last_char == '\n' {
- rdr.filemap.next_line(rdr.last_pos.get());
+ rdr.filemap.deref().next_line(rdr.last_pos.get());
rdr.col.set(CharPos(0u));
}
if byte_offset_diff > 1 {
- rdr.filemap.record_multibyte_char(rdr.last_pos.get(), byte_offset_diff);
+ rdr.filemap.deref().record_multibyte_char(rdr.last_pos.get(), byte_offset_diff);
}
} else {
rdr.curr.set(None);
}
pub fn nextch(rdr: &StringReader) -> Option<char> {
let offset = byte_offset(rdr, rdr.pos.get()).to_uint();
- if offset < (rdr.filemap.src).len() {
- Some(rdr.filemap.src.char_at(offset))
+ if offset < rdr.filemap.deref().src.len() {
+ Some(rdr.filemap.deref().src.char_at(offset))
} else {
None
}
-> Option<TokenAndSpan> {
if rdr.curr_is('/') {
match nextch(rdr) {
- Some('/') => {
- bump(rdr);
- bump(rdr);
- // line comments starting with "///" or "//!" are doc-comments
- if rdr.curr_is('/') || rdr.curr_is('!') {
- let start_bpos = rdr.pos.get() - BytePos(3);
- while !rdr.curr_is('\n') && !is_eof(rdr) {
- bump(rdr);
- }
- let ret = with_str_from(rdr, start_bpos, |string| {
- // but comments with only more "/"s are not
- if !is_line_non_doc_comment(string) {
- Some(TokenAndSpan{
- tok: token::DOC_COMMENT(str_to_ident(string)),
- sp: codemap::mk_sp(start_bpos, rdr.pos.get())
- })
- } else {
- None
+ Some('/') => {
+ bump(rdr);
+ bump(rdr);
+ // line comments starting with "///" or "//!" are doc-comments
+ if rdr.curr_is('/') || rdr.curr_is('!') {
+ let start_bpos = rdr.pos.get() - BytePos(3);
+ while !rdr.curr_is('\n') && !is_eof(rdr) {
+ bump(rdr);
}
- });
+ let ret = with_str_from(rdr, start_bpos, |string| {
+ // but comments with only more "/"s are not
+ if !is_line_non_doc_comment(string) {
+ Some(TokenAndSpan{
+ tok: token::DOC_COMMENT(str_to_ident(string)),
+ sp: codemap::mk_sp(start_bpos, rdr.pos.get())
+ })
+ } else {
+ None
+ }
+ });
- if ret.is_some() {
- return ret;
+ if ret.is_some() {
+ return ret;
+ }
+ } else {
+ while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); }
}
- } else {
- while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); }
+ // Restart whitespace munch.
+ consume_whitespace_and_comments(rdr)
}
- // Restart whitespace munch.
- return consume_whitespace_and_comments(rdr);
- }
- Some('*') => { bump(rdr); bump(rdr); return consume_block_comment(rdr); }
- _ => ()
+ Some('*') => { bump(rdr); bump(rdr); consume_block_comment(rdr) }
+ _ => None
}
} else if rdr.curr_is('#') {
if nextch_is(rdr, '!') {
// I guess this is the only way to figure out if
// we're at the beginning of the file...
- let cmap = @CodeMap::new();
- {
- let mut files = cmap.files.borrow_mut();
- files.get().push(rdr.filemap);
- }
+ let cmap = CodeMap::new();
+ cmap.files.borrow_mut().get().push(rdr.filemap.clone());
let loc = cmap.lookup_char_pos_adj(rdr.last_pos.get());
if loc.line == 1u && loc.col == CharPos(0u) {
while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); }
return consume_whitespace_and_comments(rdr);
}
}
+ None
+ } else {
+ None
}
- return None;
}
pub fn is_block_non_doc_comment(s: &str) -> bool {
let writer = ~util::NullWriter;
let emitter = diagnostic::EmitterWriter::new(writer);
let handler = diagnostic::mk_handler(~emitter);
- let span_handler = diagnostic::mk_span_handler(handler, @cm);
+ let span_handler = diagnostic::mk_span_handler(handler, cm);
Env {
string_reader: new_string_reader(span_handler,fm)
}
use ast;
use codemap::{Span, CodeMap, FileMap};
-use codemap;
use diagnostic::{SpanHandler, mk_span_handler, default_handler};
use parse::attr::ParserAttr;
use parse::parser::Parser;
use std::cell::RefCell;
use std::io::File;
+use std::rc::Rc;
use std::str;
use std::vec_ng::Vec;
// info about a parsing session.
pub struct ParseSess {
- cm: @codemap::CodeMap, // better be the same as the one in the reader!
- span_diagnostic: @SpanHandler, // better be the same as the one in the reader!
+ span_diagnostic: SpanHandler, // better be the same as the one in the reader!
/// Used to determine and report recursive mod inclusions
- included_mod_stack: RefCell<Vec<Path> >,
+ included_mod_stack: RefCell<Vec<Path>>,
}
pub fn new_parse_sess() -> ParseSess {
- let cm = @CodeMap::new();
ParseSess {
- cm: cm,
- span_diagnostic: mk_span_handler(default_handler(), cm),
+ span_diagnostic: mk_span_handler(default_handler(), CodeMap::new()),
included_mod_stack: RefCell::new(Vec::new()),
}
}
-pub fn new_parse_sess_special_handler(sh: @SpanHandler,
- cm: @codemap::CodeMap)
- -> ParseSess {
+pub fn new_parse_sess_special_handler(sh: SpanHandler) -> ParseSess {
ParseSess {
- cm: cm,
span_diagnostic: sh,
included_mod_stack: RefCell::new(Vec::new()),
}
// Create a new parser from a source string
pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess,
- cfg: ast::CrateConfig,
- name: ~str,
- source: ~str)
- -> Parser<'a> {
- filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg)
+ cfg: ast::CrateConfig,
+ name: ~str,
+ source: ~str)
+ -> Parser<'a> {
+ filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg)
}
/// Create a new parser, handling errors as appropriate
/// if the file doesn't exist
-pub fn new_parser_from_file<'a>(
- sess: &'a ParseSess,
- cfg: ast::CrateConfig,
- path: &Path
-) -> Parser<'a> {
- filemap_to_parser(sess,file_to_filemap(sess,path,None),cfg)
+pub fn new_parser_from_file<'a>(sess: &'a ParseSess,
+ cfg: ast::CrateConfig,
+ path: &Path) -> Parser<'a> {
+ filemap_to_parser(sess, file_to_filemap(sess, path, None), cfg)
}
/// Given a session, a crate config, a path, and a span, add
/// the file at the given path to the codemap, and return a parser.
/// On an error, use the given span as the source of the problem.
-pub fn new_sub_parser_from_file<'a>(
- sess: &'a ParseSess,
- cfg: ast::CrateConfig,
- path: &Path,
- sp: Span
-) -> Parser<'a> {
- filemap_to_parser(sess,file_to_filemap(sess,path,Some(sp)),cfg)
+pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
+ cfg: ast::CrateConfig,
+ path: &Path,
+ sp: Span) -> Parser<'a> {
+ filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp)), cfg)
}
/// Given a filemap and config, return a parser
pub fn filemap_to_parser<'a>(sess: &'a ParseSess,
- filemap: @FileMap,
+ filemap: Rc<FileMap>,
cfg: ast::CrateConfig) -> Parser<'a> {
- tts_to_parser(sess,filemap_to_tts(sess,filemap),cfg)
+ tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg)
}
// must preserve old name for now, because quote! from the *existing*
pub fn new_parser_from_tts<'a>(sess: &'a ParseSess,
cfg: ast::CrateConfig,
tts: Vec<ast::TokenTree>) -> Parser<'a> {
- tts_to_parser(sess,tts,cfg)
+ tts_to_parser(sess, tts, cfg)
}
/// Given a session and a path and an optional span (for error reporting),
/// add the path to the session's codemap and return the new filemap.
pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
- -> @FileMap {
+ -> Rc<FileMap> {
let err = |msg: &str| {
match spanopt {
Some(sp) => sess.span_diagnostic.span_fatal(sp, msg),
// given a session and a string, add the string to
// the session's codemap and return the new filemap
pub fn string_to_filemap(sess: &ParseSess, source: ~str, path: ~str)
- -> @FileMap {
- sess.cm.new_filemap(path, source)
+ -> Rc<FileMap> {
+ sess.span_diagnostic.cm.new_filemap(path, source)
}
// given a filemap, produce a sequence of token-trees
-pub fn filemap_to_tts(sess: &ParseSess, filemap: @FileMap)
+pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
-> Vec<ast::TokenTree> {
// it appears to me that the cfg doesn't matter here... indeed,
// parsing tt's probably shouldn't require a parser at all.
let cfg = Vec::new();
- let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap);
+ let srdr = lexer::new_string_reader(&sess.span_diagnostic, filemap);
let mut p1 = Parser(sess, cfg, ~srdr);
p1.parse_all_token_trees()
}
pub fn tts_to_parser<'a>(sess: &'a ParseSess,
tts: Vec<ast::TokenTree>,
cfg: ast::CrateConfig) -> Parser<'a> {
- let trdr = lexer::new_tt_reader(sess.span_diagnostic, None, tts);
+ let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts);
Parser(sess, cfg, ~trdr)
}
outer_attrs: &[ast::Attribute],
id_sp: Span)
-> (ast::Item_, Vec<ast::Attribute> ) {
- let mut prefix = Path::new(self.sess.cm.span_to_filename(self.span));
+ let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span));
prefix.pop();
let mod_path = Path::new(".").join_many(self.mod_path_stack.as_slice());
let dir_path = prefix.join(&mod_path);