+Version 1.12.0 (2016-09-29)
+===========================
+
+Highlights
+----------
+
+* [`rustc` translates code to LLVM IR via its own "middle" IR (MIR)]
+ (https://github.com/rust-lang/rust/pull/34096).
+ This translation pass is far simpler than the previous AST->LLVM pass, and
+ creates opportunities to perform new optimizations directly on the MIR. It
+ was previously described [on the Rust blog]
+ (https://blog.rust-lang.org/2016/04/19/MIR.html).
+* [`rustc` presents a new, more readable error format, along with
+ machine-readable JSON error output for use by IDEs]
+ (https://github.com/rust-lang/rust/pull/35401).
+ Most common editors supporting Rust have been updated to work with it. It was
+ previously described [on the Rust blog]
+ (https://blog.rust-lang.org/2016/08/10/Shape-of-errors-to-come.html).
+
+Compiler
+--------
+
+* [`rustc` translates code to LLVM IR via its own "middle" IR (MIR)]
+ (https://github.com/rust-lang/rust/pull/34096).
+ This translation pass is far simpler than the previous AST->LLVM pass, and
+ creates opportunities to perform new optimizations directly on the MIR. It
+ was previously described [on the Rust blog]
+ (https://blog.rust-lang.org/2016/04/19/MIR.html).
+* [Print the Rust target name, not the LLVM target name, with
+ `--print target-list`]
+ (https://github.com/rust-lang/rust/pull/35489)
+* [The computation of `TypeId` is correct in some cases where it was previously
+ producing inconsistent results]
+ (https://github.com/rust-lang/rust/pull/35267)
+* [The `mips-unknown-linux-gnu` target uses hardware floating point by default]
+ (https://github.com/rust-lang/rust/pull/34910)
+* [The `rustc` arguments, `--print target-cpus`, `--print target-features`,
+ `--print relocation-models`, and `--print code-models` print the available
+ options to the `-C target-cpu`, `-C target-feature`, `-C relocation-model` and
+ `-C code-model` code generation arguments]
+ (https://github.com/rust-lang/rust/pull/34845)
+* [`rustc` supports three new MUSL targets on ARM: `arm-unknown-linux-musleabi`,
+ `arm-unknown-linux-musleabihf`, and `armv7-unknown-linux-musleabihf`]
+ (https://github.com/rust-lang/rust/pull/35060).
+ These targets produce statically-linked binaries. There are no binary release
+ builds yet though.
+
+Diagnostics
+-----------
+
+* [`rustc` presents a new, more readable error format, along with
+ machine-readable JSON error output for use by IDEs]
+ (https://github.com/rust-lang/rust/pull/35401).
+ Most common editors supporting Rust have been updated to work with it. It was
+ previously described [on the Rust blog]
+ (https://blog.rust-lang.org/2016/08/10/Shape-of-errors-to-come.html).
+* [In error descriptions, references are now described in plain english,
+ instead of as "&-ptr"]
+ (https://github.com/rust-lang/rust/pull/35611)
+* [In error type descriptions, unknown numeric types are named `{integer}` or
+ `{float}` instead of `_`]
+ (https://github.com/rust-lang/rust/pull/35080)
+* [`rustc` emits a clearer error when inner attributes follow a doc comment]
+ (https://github.com/rust-lang/rust/pull/34676)
+
+Language
+--------
+
+* [`macro_rules!` invocations can be made within `macro_rules!` invocations]
+ (https://github.com/rust-lang/rust/pull/34925)
+* [`macro_rules!` meta-variables are hygienic]
+ (https://github.com/rust-lang/rust/pull/35453)
+* [`macro_rules!` `tt` matchers can be reparsed correctly, making them much more
+ useful]
+ (https://github.com/rust-lang/rust/pull/34908)
+* [`macro_rules!` `stmt` matchers correctly consume the entire contents when
+ inside non-braces invocations]
+ (https://github.com/rust-lang/rust/pull/34886)
+* [Semicolons are properly required as statement delimeters inside
+ `macro_rules!` invocations]
+ (https://github.com/rust-lang/rust/pull/34660)
+* [`cfg_attr` works on `path` attributes]
+ (https://github.com/rust-lang/rust/pull/34546)
+
+Stabilized APIs
+---------------
+
+* [`Cell::as_ptr`]
+ (https://doc.rust-lang.org/std/cell/struct.Cell.html#method.as_ptr)
+* [`RefCell::as_ptr`]
+ (https://doc.rust-lang.org/std/cell/struct.RefCell.html#method.as_ptr)
+* [`IpAddr::is_unspecified`]
+ (https://doc.rust-lang.org/std/net/enum.IpAddr.html#method.is_unspecified)
+* [`IpAddr::is_loopback`]
+ (https://doc.rust-lang.org/std/net/enum.IpAddr.html#method.is_loopback)
+* [`IpAddr::is_multicast`]
+ (https://doc.rust-lang.org/std/net/enum.IpAddr.html#method.is_multicast)
+* [`Ipv4Addr::is_unspecified`]
+ (https://doc.rust-lang.org/std/net/struct.Ipv4Addr.html#method.is_unspecified)
+* [`Ipv6Addr::octets`]
+ (https://doc.rust-lang.org/std/net/struct.Ipv6Addr.html#method.octets)
+* [`LinkedList::contains`]
+ (https://doc.rust-lang.org/std/collections/linked_list/struct.LinkedList.html#method.contains)
+* [`VecDeque::contains`]
+ (https://doc.rust-lang.org/std/collections/vec_deque/struct.VecDeque.html#method.contains)
+* [`ExitStatusExt::from_raw`]
+ (https://doc.rust-lang.org/std/os/unix/process/trait.ExitStatusExt.html#tymethod.from_raw).
+ Both on Unix and Windows.
+* [`Receiver::recv_timeout`]
+ (https://doc.rust-lang.org/std/sync/mpsc/struct.Receiver.html#method.recv_timeout)
+* [`RecvTimeoutError`]
+ (https://doc.rust-lang.org/std/sync/mpsc/enum.RecvTimeoutError.html)
+* [`BinaryHeap::peek_mut`]
+ (https://doc.rust-lang.org/std/collections/binary_heap/struct.BinaryHeap.html#method.peek_mut)
+* [`PeekMut`]
+ (https://doc.rust-lang.org/std/collections/binary_heap/struct.PeekMut.html)
+* [`iter::Product`]
+ (https://doc.rust-lang.org/std/iter/trait.Product.html)
+* [`iter::Sum`]
+ (https://doc.rust-lang.org/std/iter/trait.Sum.html)
+* [`OccupiedEntry::remove_entry`]
+ (https://doc.rust-lang.org/std/collections/btree_map/struct.OccupiedEntry.html#method.remove_entry)
+* [`VacantEntry::into_key`]
+ (https://doc.rust-lang.org/std/collections/btree_map/struct.VacantEntry.html#method.into_key)
+
+Libraries
+---------
+
+* [The `format!` macro and friends now allow a single argument to be formatted
+ in multiple styles]
+ (https://github.com/rust-lang/rust/pull/33642)
+* [The lifetime bounds on `[T]::binary_search_by` and
+ `[T]::binary_search_by_key` have been adjusted to be more flexible]
+ (https://github.com/rust-lang/rust/pull/34762)
+* [`Option` implements `From` for its contained type]
+ (https://github.com/rust-lang/rust/pull/34828)
+* [`Cell`, `RefCell` and `UnsafeCell` implement `From` for their contained type]
+ (https://github.com/rust-lang/rust/pull/35392)
+* [`RwLock` panics if the reader count overflows]
+ (https://github.com/rust-lang/rust/pull/35378)
+* [`vec_deque::Drain`, `hash_map::Drain` and `hash_set::Drain` are covariant]
+ (https://github.com/rust-lang/rust/pull/35354)
+* [`vec::Drain` and `binary_heap::Drain` are covariant]
+ (https://github.com/rust-lang/rust/pull/34951)
+* [`Cow<str>` implements `FromIterator` for `char`, `&str` and `String`]
+ (https://github.com/rust-lang/rust/pull/35064)
+* [Sockets on Linux are correctly closed in subprocesses via `SOCK_CLOEXEC`]
+ (https://github.com/rust-lang/rust/pull/34946)
+* [`hash_map::Entry`, `hash_map::VacantEntry` and `hash_map::OccupiedEntry`
+ implement `Debug`]
+ (https://github.com/rust-lang/rust/pull/34946)
+* [`btree_map::Entry`, `btree_map::VacantEntry` and `btree_map::OccupiedEntry`
+ implement `Debug`]
+ (https://github.com/rust-lang/rust/pull/34885)
+* [`String` implements `AddAssign`]
+ (https://github.com/rust-lang/rust/pull/34890)
+* [Variadic `extern fn` pointers implement the `Clone`, `PartialEq`, `Eq`,
+ `PartialOrd`, `Ord`, `Hash`, `fmt::Pointer`, and `fmt::Debug` traits]
+ (https://github.com/rust-lang/rust/pull/34879)
+* [`FileType` implements `Debug`]
+ (https://github.com/rust-lang/rust/pull/34757)
+* [References to `Mutex` and `RwLock` are unwind-safe]
+ (https://github.com/rust-lang/rust/pull/34756)
+* [`mpsc::sync_channel` `Receiver`s return any available message before
+ reporting a disconnect]
+ (https://github.com/rust-lang/rust/pull/34731)
+* [Unicode definitions have been updated to 9.0]
+ (https://github.com/rust-lang/rust/pull/34599)
+* [`env` iterators implement `DoubleEndedIterator`]
+ (https://github.com/rust-lang/rust/pull/33312)
+
+Cargo
+-----
+
+* [Support local mirrors of registries]
+ (https://github.com/rust-lang/cargo/pull/2857)
+* [Add support for command aliases]
+ (https://github.com/rust-lang/cargo/pull/2679)
+* [Allow `opt-level="s"` / `opt-level="z"` in profile overrides]
+ (https://github.com/rust-lang/cargo/pull/3007)
+* [Make `cargo doc --open --target` work as expected]
+ (https://github.com/rust-lang/cargo/pull/2988)
+* [Speed up noop registry updates]
+ (https://github.com/rust-lang/cargo/pull/2974)
+* [Update OpenSSL]
+ (https://github.com/rust-lang/cargo/pull/2971)
+* [Fix `--panic=abort` with plugins]
+ (https://github.com/rust-lang/cargo/pull/2954)
+* [Always pass `-C metadata` to the compiler]
+ (https://github.com/rust-lang/cargo/pull/2946)
+* [Fix depending on git repos with workspaces]
+ (https://github.com/rust-lang/cargo/pull/2938)
+* [Add a `--lib` flag to `cargo new`]
+ (https://github.com/rust-lang/cargo/pull/2921)
+* [Add `http.cainfo` for custom certs]
+ (https://github.com/rust-lang/cargo/pull/2917)
+* [Indicate the compilation profile after compiling]
+ (https://github.com/rust-lang/cargo/pull/2909)
+* [Allow enabling features for dependencies with `--features`]
+ (https://github.com/rust-lang/cargo/pull/2876)
+* [Add `--jobs` flag to `cargo package`]
+ (https://github.com/rust-lang/cargo/pull/2867)
+* [Add `--dry-run` to `cargo publish`]
+ (https://github.com/rust-lang/cargo/pull/2849)
+* [Add support for `RUSTDOCFLAGS`]
+ (https://github.com/rust-lang/cargo/pull/2794)
+
+Performance
+-----------
+
+* [`panic::catch_unwind` is more optimized]
+ (https://github.com/rust-lang/rust/pull/35444)
+* [`panic::catch_unwind` no longer accesses thread-local storage on entry]
+ (https://github.com/rust-lang/rust/pull/34866)
+
+Tooling
+-------
+
+* [Test binaries now support a `--test-threads` argument to specify the number
+ of threads used to run tests, and which acts the same as the
+ `RUST_TEST_THREADS` environment variable]
+ (https://github.com/rust-lang/rust/pull/35414)
+* [The test runner now emits a warning when tests run over 60 seconds]
+ (https://github.com/rust-lang/rust/pull/35405)
+* [rustdoc: Fix methods in search results]
+ (https://github.com/rust-lang/rust/pull/34752)
+* [`rust-lldb` warns about unsupported versions of LLDB]
+ (https://github.com/rust-lang/rust/pull/34646)
+* [Rust releases now come with source packages that can be installed by rustup
+ via `rustup component add rust-src`]
+ (https://github.com/rust-lang/rust/pull/34366).
+ The resulting source code can be used by tools and IDES, located in the
+ sysroot under `lib/rustlib/src`.
+
+Misc
+----
+
+* [The compiler can now be built against LLVM 3.9]
+ (https://github.com/rust-lang/rust/pull/35594)
+* Many minor improvements to the documentation.
+* [The Rust exception handling "personality" routine is now written in Rust]
+ (https://github.com/rust-lang/rust/pull/34832)
+
+Compatibility Notes
+-------------------
+
+* [When printing Windows `OsStr`s, unpaired surrogate codepoints are escaped
+ with the lowercase format instead of the uppercase]
+ (https://github.com/rust-lang/rust/pull/35084)
+* [When formatting strings, if "precision" is specified, the "fill",
+ "align" and "width" specifiers are no longer ignored]
+ (https://github.com/rust-lang/rust/pull/34544)
+* [The `Debug` impl for strings no longer escapes all non-ASCII characters]
+ (https://github.com/rust-lang/rust/pull/34485)
+
+
Version 1.11.0 (2016-08-18)
===========================
CFG_CPUTYPE=$(isainfo -n)
;;
+ Haiku)
+ CFG_OSTYPE=unknown-haiku
+ ;;
+
MINGW*)
# msys' `uname` does not print gcc configuration, but prints msys
# configuration. so we cannot believe `uname -m`:
CFG_CPUTYPE=x86_64
;;
+ BePC)
+ CFG_CPUTYPE=i686
+ ;;
+
*)
err "unknown CPU type: $CFG_CPUTYPE"
esac
valopt infodir "${CFG_PREFIX}/share/info" "install additional info"
valopt llvm-root "" "set LLVM root"
valopt python "" "set path to python"
-valopt nodejs "" "set path to nodejs"
valopt jemalloc-root "" "set directory where libjemalloc_pic.a is located"
valopt build "${DEFAULT_BUILD}" "GNUs ./configure syntax LLVM build triple"
valopt android-cross-path "" "Android NDK standalone path (deprecated)"
valopt_nosave host "${CFG_BUILD}" "GNUs ./configure syntax LLVM host triples"
valopt_nosave target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples"
valopt_nosave mandir "${CFG_PREFIX}/share/man" "install man pages in PATH"
-valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install man pages in PATH"
+valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install documentation in PATH"
# On Windows this determines root of the subtree for target libraries.
# Host runtime libs always go to 'bin'.
err "Found $python_version, but Python 2.7 is required"
fi
-# Checking for node, but not required
-probe CFG_NODEJS nodejs node
-
# If we have no git directory then we are probably a tarball distribution
# and shouldn't attempt to load submodules
if [ ! -e ${CFG_SRC_DIR}.git ]
--- /dev/null
+# i686-unknown-haiku configuration
+CROSS_PREFIX_i686-unknown-haiku=i586-pc-haiku-
+CC_i686-unknown-haiku=$(CC)
+CXX_i686-unknown-haiku=$(CXX)
+CPP_i686-unknown-haiku=$(CPP)
+AR_i686-unknown-haiku=$(AR)
+CFG_LIB_NAME_i686-unknown-haiku=lib$(1).so
+CFG_STATIC_LIB_NAME_i686-unknown-haiku=lib$(1).a
+CFG_LIB_GLOB_i686-unknown-haiku=lib$(1)-*.so
+CFG_LIB_DSYM_GLOB_i686-unknown-haiku=lib$(1)-*.dylib.dSYM
+CFG_CFLAGS_i686-unknown-haiku := -m32 $(CFLAGS)
+CFG_GCCISH_CFLAGS_i686-unknown-haiku := -Wall -Werror -g -fPIC -m32 $(CFLAGS)
+CFG_GCCISH_CXXFLAGS_i686-unknown-haiku := -fno-rtti $(CXXFLAGS)
+CFG_GCCISH_LINK_FLAGS_i686-unknown-haiku := -shared -fPIC -ldl -pthread -lrt -g -m32
+CFG_GCCISH_PRE_LIB_FLAGS_i686-unknown-haiku := -Wl,-whole-archive
+CFG_GCCISH_POST_LIB_FLAGS_i686-unknown-haiku := -Wl,-no-whole-archive
+CFG_DEF_SUFFIX_i686-unknown-haiku := .linux.def
+CFG_LLC_FLAGS_i686-unknown-haiku :=
+CFG_INSTALL_NAME_i686-unknown-haiku =
+CFG_EXE_SUFFIX_i686-unknown-haiku =
+CFG_WINDOWSY_i686-unknown-haiku :=
+CFG_UNIXY_i686-unknown-haiku := 1
+CFG_PATH_MUNGE_i686-unknown-haiku := true
+CFG_LDPATH_i686-unknown-haiku :=
+CFG_RUN_i686-unknown-haiku=$(2)
+CFG_RUN_TARG_i686-unknown-haiku=$(call CFG_RUN_i686-unknown-haiku,,$(2))
+CFG_GNU_TRIPLE_i686-unknown-haiku := i686-unknown-haiku
--- /dev/null
+# x86_64-unknown-haiku configuration
+CROSS_PREFIX_x86_64-unknown-haiku=x86_64-unknown-haiku-
+CC_x86_64-unknown-haiku=$(CC)
+CXX_x86_64-unknown-haiku=$(CXX)
+CPP_x86_64-unknown-haiku=$(CPP)
+AR_x86_64-unknown-haiku=$(AR)
+CFG_LIB_NAME_x86_64-unknown-haiku=lib$(1).so
+CFG_STATIC_LIB_NAME_x86_64-unknown-haiku=lib$(1).a
+CFG_LIB_GLOB_x86_64-unknown-haiku=lib$(1)-*.so
+CFG_LIB_DSYM_GLOB_x86_64-unknown-haiku=lib$(1)-*.dylib.dSYM
+CFG_CFLAGS_x86_64-unknown-haiku := -m64 $(CFLAGS)
+CFG_GCCISH_CFLAGS_x86_64-unknown-haiku := -Wall -Werror -g -fPIC -m64 $(CFLAGS)
+CFG_GCCISH_CXXFLAGS_x86_64-unknown-haiku := -fno-rtti $(CXXFLAGS)
+CFG_GCCISH_LINK_FLAGS_x86_64-unknown-haiku := -shared -fPIC -ldl -pthread -lrt -g -m64
+CFG_GCCISH_PRE_LIB_FLAGS_x86_64-unknown-haiku := -Wl,-whole-archive
+CFG_GCCISH_POST_LIB_FLAGS_x86_64-unknown-haiku := -Wl,-no-whole-archive
+CFG_DEF_SUFFIX_x86_64-unknown-haiku := .linux.def
+CFG_LLC_FLAGS_x86_64-unknown-haiku :=
+CFG_INSTALL_NAME_x86_64-unknown-haiku =
+CFG_EXE_SUFFIX_x86_64-unknown-haiku =
+CFG_WINDOWSY_x86_64-unknown-haiku :=
+CFG_UNIXY_x86_64-unknown-haiku := 1
+CFG_PATH_MUNGE_x86_64-unknown-haiku := true
+CFG_LDPATH_x86_64-unknown-haiku :=
+CFG_RUN_x86_64-unknown-haiku=$(2)
+CFG_RUN_TARG_x86_64-unknown-haiku=$(call CFG_RUN_x86_64-unknown-haiku,,$(2))
+CFG_GNU_TRIPLE_x86_64-unknown-haiku := x86_64-unknown-haiku
rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \
rustc_const_eval rustc_const_math rustc_incremental rustc_macro
HOST_CRATES := syntax syntax_ext proc_macro syntax_pos $(RUSTC_CRATES) rustdoc fmt_macros \
- flate arena graphviz rbml log serialize
+ flate arena graphviz log serialize
TOOLS := compiletest rustdoc rustc rustbook error_index_generator
DEPS_core :=
DEPS_graphviz := std
DEPS_log := std
DEPS_num := std
-DEPS_rbml := std log serialize
DEPS_serialize := std log
DEPS_term := std
DEPS_test := std getopts term native:rust_test_helpers
DEPS_rustc_const_eval := rustc_const_math rustc syntax log serialize \
rustc_back graphviz syntax_pos
-DEPS_rustc := syntax fmt_macros flate arena serialize getopts rbml \
+DEPS_rustc := syntax fmt_macros flate arena serialize getopts \
log graphviz rustc_llvm rustc_back rustc_data_structures\
rustc_const_math syntax_pos rustc_errors
DEPS_rustc_back := std syntax flate log libc
DEPS_rustc_lint := rustc log syntax syntax_pos rustc_const_eval
DEPS_rustc_llvm := native:rustllvm libc std rustc_bitflags
DEPS_rustc_macro := std syntax
-DEPS_rustc_metadata := rustc syntax syntax_pos rustc_errors rbml rustc_const_math \
+DEPS_rustc_metadata := rustc syntax syntax_pos rustc_errors rustc_const_math \
rustc_macro syntax_ext
DEPS_rustc_passes := syntax syntax_pos rustc core rustc_const_eval rustc_errors
DEPS_rustc_mir := rustc syntax syntax_pos rustc_const_math rustc_const_eval rustc_bitflags
DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back \
log syntax serialize rustc_llvm rustc_platform_intrinsics \
rustc_const_math rustc_const_eval rustc_incremental rustc_errors syntax_pos
-DEPS_rustc_incremental := rbml rustc syntax_pos serialize rustc_data_structures
+DEPS_rustc_incremental := rustc syntax_pos serialize rustc_data_structures
DEPS_rustc_save_analysis := rustc log syntax syntax_pos serialize
DEPS_rustc_typeck := rustc syntax syntax_pos rustc_platform_intrinsics rustc_const_math \
rustc_const_eval rustc_errors
######################################################################
# The version number
-CFG_RELEASE_NUM=1.13.0
+CFG_RELEASE_NUM=1.14.0
# An optional number to put after the label, e.g. '.2' -> '-beta.2'
# NB Make sure it starts with a dot to conform to semver pre-release
else
ifeq ($$(CFG_WINDOWSY_$(3)),1)
LD_LIBRARY_PATH_ENV_NAME$(1)_T_$(2)_H_$(3) := PATH
+else
+ifeq ($$(OSTYPE_$(3)),unknown-haiku)
+ LD_LIBRARY_PATH_ENV_NAME$(1)_T_$(2)_H_$(3) := LIBRARY_PATH
else
LD_LIBRARY_PATH_ENV_NAME$(1)_T_$(2)_H_$(3) := LD_LIBRARY_PATH
endif
endif
+endif
LD_LIBRARY_PATH_ENV_HOSTDIR$(1)_T_$(2)_H_$(3) := \
$$(CURDIR)/$$(HLIB$(1)_H_$(3)):$$(CFG_LLVM_INST_DIR_$(3))/lib
$(foreach host,$(CFG_HOST), \
all-target-$(target)-host-$(host)))
-all: $(ALL_TARGET_RULES) $(GENERATED) docs
+all-no-docs: $(ALL_TARGET_RULES) $(GENERATED)
+all: all-no-docs docs
######################################################################
# Build system documentation
panic_abort,$(TARGET_CRATES)) \
collectionstest coretest
TEST_DOC_CRATES = $(DOC_CRATES) arena flate fmt_macros getopts graphviz \
- log rand rbml serialize syntax term test
+ log rand serialize syntax term test
TEST_HOST_CRATES = $(filter-out rustc_typeck rustc_borrowck rustc_resolve \
rustc_trans rustc_lint,\
$(HOST_CRATES))
} else {
("RUSTC_REAL", "RUSTC_LIBDIR")
};
- let stage = env::var("RUSTC_STAGE").unwrap();
+ let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
- let rustc = env::var_os(rustc).unwrap();
- let libdir = env::var_os(libdir).unwrap();
+ let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc));
+ let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir));
let mut dylib_path = bootstrap::util::dylib_path();
dylib_path.insert(0, PathBuf::from(libdir));
if let Some(target) = target {
// The stage0 compiler has a special sysroot distinct from what we
// actually downloaded, so we just always pass the `--sysroot` option.
- cmd.arg("--sysroot").arg(env::var_os("RUSTC_SYSROOT").unwrap());
+ cmd.arg("--sysroot").arg(env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set"));
// When we build Rust dylibs they're all intended for intermediate
// usage, so make sure we pass the -Cprefer-dynamic flag instead of
let is_panic_abort = args.windows(2).any(|a| {
&*a[0] == "--crate-name" && &*a[1] == "panic_abort"
});
- // FIXME(stage0): remove this `stage != "0"` condition
- if is_panic_abort && stage != "0" {
+ if is_panic_abort {
cmd.arg("-C").arg("panic=abort");
}
fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>();
- let rustdoc = env::var_os("RUSTDOC_REAL").unwrap();
- let libdir = env::var_os("RUSTC_LIBDIR").unwrap();
+ let rustdoc = env::var_os("RUSTDOC_REAL").expect("RUSTDOC_REAL was not set");
+ let libdir = env::var_os("RUSTC_LIBDIR").expect("RUSTC_LIBDIR was not set");
+ let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
let mut dylib_path = bootstrap::util::dylib_path();
dylib_path.insert(0, PathBuf::from(libdir));
let mut cmd = Command::new(rustdoc);
cmd.args(&args)
- .arg("--cfg").arg(format!("stage{}", env::var("RUSTC_STAGE").unwrap()))
+ .arg("--cfg").arg(format!("stage{}", stage))
.arg("--cfg").arg("dox")
.env(bootstrap::util::dylib_path_var(),
env::join_paths(&dylib_path).unwrap());
Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
})
}
-
def format_build_time(duration):
return str(datetime.timedelta(seconds=int(duration)))
-class RustBuild:
+
+class RustBuild(object):
def download_stage0(self):
cache_dst = os.path.join(self.build_dir, "cache")
rustc_cache = os.path.join(cache_dst, self.stage0_rustc_date())
os.makedirs(cargo_cache)
if self.rustc().startswith(self.bin_root()) and \
- (not os.path.exists(self.rustc()) or self.rustc_out_of_date()):
+ (not os.path.exists(self.rustc()) or self.rustc_out_of_date()):
if os.path.exists(self.bin_root()):
shutil.rmtree(self.bin_root())
channel = self.stage0_rustc_channel()
f.write(self.stage0_rustc_date())
if self.cargo().startswith(self.bin_root()) and \
- (not os.path.exists(self.cargo()) or self.cargo_out_of_date()):
+ (not os.path.exists(self.cargo()) or self.cargo_out_of_date()):
channel = self.stage0_cargo_channel()
filename = "cargo-{}-{}.tar.gz".format(channel, self.build)
url = "https://static.rust-lang.org/cargo-dist/" + self.stage0_cargo_date()
def get_string(self, line):
start = line.find('"')
- end = start + 1 + line[start+1:].find('"')
- return line[start+1:end]
+ end = start + 1 + line[start + 1:].find('"')
+ return line[start + 1:end]
def exe_suffix(self):
if sys.platform == 'win32':
use build_helper::output;
use filetime::FileTime;
-use util::{exe, staticlib, libdir, mtime, is_dylib, copy};
+use util::{exe, libdir, mtime, is_dylib, copy};
use {Build, Compiler, Mode};
/// Build the standard library.
let libdir = build.sysroot_libdir(compiler, target);
let _ = fs::remove_dir_all(&libdir);
t!(fs::create_dir_all(&libdir));
- // FIXME(stage0) remove this `if` after the next snapshot
- // The stage0 compiler still passes the `-lcompiler-rt` flag to the linker but now `bootstrap`
- // never builds a `libcopmiler-rt.a`! We'll fill the hole by simply copying stage0's
- // `libcompiler-rt.a` to where the stage1's one is expected (though we could as well just use
- // an empty `.a` archive). Note that the symbols of that stage0 `libcompiler-rt.a` won't make
- // it to the final binary because now `libcore.rlib` also contains the symbols that
- // `libcompiler-rt.a` provides. Since that rlib appears first in the linker arguments, its
- // symbols are used instead of `libcompiler-rt.a`'s.
- if compiler.stage == 0 {
- let rtlib = &staticlib("compiler-rt", target);
- let src = build.rustc.parent().unwrap().parent().unwrap().join("lib").join("rustlib")
- .join(target).join("lib").join(rtlib);
- copy(&src, &libdir.join(rtlib));
- }
// Some platforms have startup objects that may be required to produce the
// libstd dynamic library, for example.
self.rustc = Some(PathBuf::from(value).join("bin/rustc"));
self.cargo = Some(PathBuf::from(value).join("bin/cargo"));
}
- "CFG_NODEJS" if value.len() > 0 => {
- self.nodejs = Some(PathBuf::from(value));
- }
_ => {}
}
}
// Almost all of these are simple one-liners that shell out to the
// corresponding functionality in the extra modules, where more
// documentation can be found.
- for target in step::all(self) {
+ let steps = step::all(self);
+
+ self.verbose("bootstrap build plan:");
+ for step in &steps {
+ self.verbose(&format!("{:?}", step));
+ }
+
+ for target in steps {
let doc_out = self.out.join(&target.target).join("doc");
match target.src {
Llvm { _dummy } => {
continue
}
+ if !submodule.path.exists() {
+ t!(fs::create_dir_all(&submodule.path));
+ }
+
match submodule.state {
State::MaybeDirty => {
// drop staged changes
- self.run(git().arg("-C").arg(submodule.path).args(&["reset", "--hard"]));
+ self.run(git().current_dir(submodule.path)
+ .args(&["reset", "--hard"]));
// drops unstaged changes
- self.run(git().arg("-C").arg(submodule.path).args(&["clean", "-fdx"]));
+ self.run(git().current_dir(submodule.path)
+ .args(&["clean", "-fdx"]));
},
State::NotInitialized => {
self.run(git_submodule().arg("init").arg(submodule.path));
State::OutOfSync => {
// drops submodule commits that weren't reported to the (outer) git repository
self.run(git_submodule().arg("update").arg(submodule.path));
- self.run(git().arg("-C").arg(submodule.path).args(&["reset", "--hard"]));
- self.run(git().arg("-C").arg(submodule.path).args(&["clean", "-fdx"]));
+ self.run(git().current_dir(submodule.path)
+ .args(&["reset", "--hard"]));
+ self.run(git().current_dir(submodule.path)
+ .args(&["clean", "-fdx"]));
},
}
}
.env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
}
- // If we're building for OSX, inform the compiler and the linker that
- // we want to build a compiler runnable on 10.7
- if target.contains("apple-darwin") {
- cargo.env("MACOSX_DEPLOYMENT_TARGET", "10.7");
- }
-
// Environment variables *required* needed throughout the build
//
// FIXME: should update code to not require this env var
// LLVM/jemalloc/etc are all properly compiled.
if target.contains("apple-darwin") {
base.push("-stdlib=libc++".into());
- base.push("-mmacosx-version-min=10.7".into());
}
// This is a hack, because newer binutils broke things on some vms/distros
// (i.e., linking against unknown relocs disabled by the following flag)
/// Returns the path to the C++ compiler for the target specified, may panic
/// if no C++ compiler was configured for the target.
fn cxx(&self, target: &str) -> &Path {
- self.cxx[target].path()
+ match self.cxx.get(target) {
+ Some(p) => p.path(),
+ None => panic!("\n\ntarget `{}` is not configured as a host,
+ only as a target\n\n", target),
+ }
}
/// Returns flags to pass to the compiler to generate code for `target`.
panic!("PATH contains invalid character '\"'");
}
}
- let mut need_cmd = |cmd: &OsStr| {
- if !checked.insert(cmd.to_owned()) {
- return
- }
+ let have_cmd = |cmd: &OsStr| {
for path in env::split_paths(&path).map(|p| p.join(cmd)) {
if fs::metadata(&path).is_ok() ||
fs::metadata(path.with_extension("exe")).is_ok() {
- return
+ return Some(path);
}
}
- panic!("\n\ncouldn't find required command: {:?}\n\n", cmd);
+ return None;
+ };
+
+ let mut need_cmd = |cmd: &OsStr| {
+ if !checked.insert(cmd.to_owned()) {
+ return
+ }
+ if have_cmd(cmd).is_none() {
+ panic!("\n\ncouldn't find required command: {:?}\n\n", cmd);
+ }
};
// If we've got a git directory we're gona need git to update
need_cmd("python".as_ref());
- // If a manual nodejs was added to the config,
- // of if a nodejs install is detected through config, use it.
+ // Look for the nodejs command, needed for emscripten testing
+ if let Some(node) = have_cmd("node".as_ref()) {
+ build.config.nodejs = Some(node);
+ } else if let Some(node) = have_cmd("nodejs".as_ref()) {
+ build.config.nodejs = Some(node);
+ }
+
if let Some(ref s) = build.config.nodejs {
need_cmd(s.as_ref());
}
/// into a topologically sorted list which when executed left-to-right will
/// correctly sequence the entire build.
pub fn all(build: &Build) -> Vec<Step> {
+ build.verbose("inferred build steps:");
+
let mut ret = Vec::new();
let mut all = HashSet::new();
for target in top_level(build) {
set: &mut HashSet<Step<'a>>) {
if set.insert(target.clone()) {
for dep in target.deps(build) {
+ build.verbose(&format!("{:?}\n -> {:?}", target, dep));
fill(build, &dep, ret, set);
}
ret.push(target.clone());
`"C:\Program Files\Rust stable GNU 1.x\bin"`.
Rust does not do its own linking, and so you’ll need to have a linker
-installed. Doing so will depend on your specific system, consult its
-documentation for more details.
-
-If not, there are a number of places where we can get help. The easiest is
-[the #rust-beginners IRC channel on irc.mozilla.org][irc-beginners] and for
-general discussion [the #rust IRC channel on irc.mozilla.org][irc], which we
+installed. Doing so will depend on your specific system. For
+Linux-based systems, Rust will attempt to call `cc` for linking. On
+`windows-msvc` (Rust built on Windows with Microsoft Visual Studio),
+this depends on having [Microsoft Visual C++ Build Tools][msvbt]
+installed. These do not need to be in `%PATH%` as `rustc` will find
+them automatically. In general, if you have your linker in a
+non-traditional location you can call `rustc
+linker=/path/to/cc`, where `/path/to/cc` should point to your linker path.
+
+[msvbt]: http://landinghub.visualstudio.com/visual-cpp-build-tools
+
+If you are still stuck, there are a number of places where we can get
+help. The easiest is
+[the #rust-beginners IRC channel on irc.mozilla.org][irc-beginners]
+and for general discussion
+[the #rust IRC channel on irc.mozilla.org][irc], which we
can access through [Mibbit][mibbit]. Then we'll be chatting with other
Rustaceans (a silly nickname we call ourselves) who can help us out. Other great
resources include [the user’s forum][users] and [Stack Overflow][stackoverflow].
## Writing and Running a Rust Program
-Next, make a new source file and call it *main.rs*. Rust files always end
-in a *.rs* extension. If you’re using more than one word in your filename, use
-an underscore to separate them; for example, you'd use *hello_world.rs* rather
-than *helloworld.rs*.
+We need to create a source file for our Rust program. Rust files always end
+in a *.rs* extension. If you are using more than one word in your filename,
+use an underscore to separate them; for example, you would use
+*my_program.rs* rather than *myprogram.rs*.
-Now open the *main.rs* file you just created, and type the following code:
+Now, make a new file and call it *main.rs*. Open the file and type
+the following code:
```rust
fn main() {
vector, even the heap-allocated memory. This happens deterministically, at the
end of the scope.
-We'll cover [vectors] in detail later in this chapter; we only use them
+We covered [vectors] in the previous chapter; we use them
here as an example of a type that allocates space on the heap at runtime. They
behave like [arrays], except their size may change by `push()`ing more
elements onto them.
Vectors have a [generic type][generics] `Vec<T>`, so in this example `v` will have type
-`Vec<i32>`. We'll cover generics in detail later in this chapter.
+`Vec<i32>`. We'll cover [generics] in detail in a later chapter.
[arrays]: primitive-types.html#arrays
[vectors]: vectors.html
* `-` (`- expr`): arithmetic negation. Overloadable (`Neg`).
* `-=` (`var -= expr`): arithmetic subtraction & assignment. Overloadable (`SubAssign`).
* `->` (`fn(…) -> type`, `|…| -> type`): function and closure return type. See [Functions], [Closures].
-* `-> !` (`fn(…) -> !`, `|…| -> !`): diverging function or closure. See [Diverging Functions].
* `.` (`expr.ident`): member access. See [Structs], [Method Syntax].
* `..` (`..`, `expr..`, `..expr`, `expr..expr`): right-exclusive range literal.
* `..` (`..expr`): struct literal update syntax. See [Structs (Update syntax)].
* `/*!…*/`: inner block doc comment. See [Comments].
* `/**…*/`: outer block doc comment. See [Comments].
+<!-- Special types -->
+
+* `!`: always empty Never type. See [Diverging Functions].
+
<!-- Various things involving parens and tuples -->
* `()`: empty tuple (*a.k.a.* unit), both literal and type.
Rust will not let us use a value that has not been initialized.
-Let take a minute to talk about this stuff we've added to `println!`.
+Let us take a minute to talk about this stuff we've added to `println!`.
If you include two curly braces (`{}`, some call them moustaches...) in your
string to print, Rust will interpret this as a request to interpolate some sort
bound := path | lifetime
```
+### Never type
+An empty type
+
+```antlr
+never_type : "!" ;
+```
+
### Object types
**FIXME:** grammar?
footer {
border-top: 1px solid #ddd;
- font-size: 14.3px;
+ font-size: 14px;
font-style: italic;
padding-top: 5px;
margin-top: 3em;
UNQUALIFIED_TYPE_MARKERS = frozenset(["(", "[", "&", "*"])
def extract_type_name(qualified_type_name):
- '''Extracts the type name from a fully qualified path'''
+ """Extracts the type name from a fully qualified path"""
if qualified_type_name[0] in UNQUALIFIED_TYPE_MARKERS:
return qualified_type_name
#=------------------------------------------------------------------------------
# Pretty Printer Classes
#=------------------------------------------------------------------------------
-class RustStructPrinter:
+class RustStructPrinter(object):
def __init__(self, val, omit_first_field, omit_type_name, is_tuple_like):
self.__val = val
self.__omit_first_field = omit_first_field
return ""
-class RustSlicePrinter:
+class RustSlicePrinter(object):
def __init__(self, val):
self.__val = val
- def display_hint(self):
+ @staticmethod
+ def display_hint():
return "array"
def to_string(self):
yield (str(index), (raw_ptr + index).dereference())
-class RustStringSlicePrinter:
+class RustStringSlicePrinter(object):
def __init__(self, val):
self.__val = val
return '"%s"' % raw_ptr.string(encoding="utf-8", length=length)
-class RustStdVecPrinter:
+class RustStdVecPrinter(object):
def __init__(self, val):
self.__val = val
- def display_hint(self):
+ @staticmethod
+ def display_hint():
return "array"
def to_string(self):
yield (str(index), (gdb_ptr + index).dereference())
-class RustStdStringPrinter:
+class RustStdStringPrinter(object):
def __init__(self, val):
self.__val = val
length=length)
-class RustCStyleVariantPrinter:
+class RustCStyleVariantPrinter(object):
def __init__(self, val):
assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_ENUM
self.__val = val
return str(self.__val.get_wrapped_value())
-class IdentityPrinter:
+class IdentityPrinter(object):
def __init__(self, string):
self.string = string
def print_debug(s):
- "Print something if DEBUG_OUTPUT is True"
+ """Print something if DEBUG_OUTPUT is True"""
global DEBUG_OUTPUT
if DEBUG_OUTPUT:
print("DEBUG: " + str(s))
def normalize_whitespace(s):
- "Replace newlines, tabs, multiple spaces, etc with exactly one space"
+ """Replace newlines, tabs, multiple spaces, etc with exactly one space"""
return re.sub("\s+", " ", s)
def execute_command(command_interpreter, command):
- "Executes a single CLI command"
+ """Executes a single CLI command"""
global new_breakpoints
global registered_breakpoints
#=--------------------------------------------------------------------------------------------------
def print_struct_val(val, internal_dict, omit_first_field, omit_type_name, is_tuple_like):
- '''
+ """
Prints a struct, tuple, or tuple struct value with Rust syntax.
Ignores any fields before field_start_index.
- '''
+ """
assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_STRUCT
if omit_type_name:
"body": body}
def print_pointer_val(val, internal_dict):
- '''Prints a pointer value with Rust syntax'''
+ """Prints a pointer value with Rust syntax"""
assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR
sigil = "&"
type_name = val.type.get_unqualified_type_name()
#=--------------------------------------------------------------------------------------------------
def print_array_of_values(array_name, data_ptr_val, length, internal_dict):
- '''Prints a contigous memory range, interpreting it as values of the
- pointee-type of data_ptr_val.'''
+ """Prints a contigous memory range, interpreting it as values of the
+ pointee-type of data_ptr_val."""
data_ptr_type = data_ptr_val.type
assert data_ptr_type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR
OS=`uname -s`
case $OS in
- ("Linux"|"FreeBSD"|"DragonFly"|"Bitrig"|"OpenBSD"|"SunOS")
+ ("Linux"|"FreeBSD"|"DragonFly"|"Bitrig"|"OpenBSD"|"SunOS"|"Haiku")
BIN_SUF=
LIB_SUF=.so
;;
def __init__(self):
Type.__init__(self, 0)
- def compiler_ctor(self):
+ @staticmethod
+ def compiler_ctor():
return '::VOID'
def compiler_ctor_ref(self):
return '&' + self.compiler_ctor()
- def rust_name(self):
+ @staticmethod
+ def rust_name():
return '()'
- def type_info(self, platform_info):
+ @staticmethod
+ def type_info(platform_info):
return None
def __eq__(self, other):
class Pointer(Type):
def __init__(self, elem, llvm_elem, const):
- self._elem = elem;
+ self._elem = elem
self._llvm_elem = llvm_elem
self._const = const
Type.__init__(self, BITWIDTH_POINTER)
# must be a power of two
assert width & (width - 1) == 0
def recur(processed, untouched):
- if untouched == []:
+ if not untouched:
ret = processed[0]
args = processed[1:]
yield MonomorphicIntrinsic(self._platform, self.intrinsic, width,
def __init__(self):
pass
- def open(self, platform):
+ @staticmethod
+ def open(platform):
return 'extern "platform-intrinsic" {'
- def render(self, mono):
+ @staticmethod
+ def render(mono):
return ' fn {}{}{};'.format(mono.platform_prefix(),
mono.intrinsic_name(),
mono.intrinsic_signature())
- def close(self):
+ @staticmethod
+ def close():
return '}'
class CompilerDefs(object):
def __init__(self):
pass
- def open(self, platform):
+ @staticmethod
+ def open(platform):
return '''\
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
if !name.starts_with("{0}") {{ return None }}
Some(match &name["{0}".len()..] {{'''.format(platform.platform_prefix())
- def render(self, mono):
+ @staticmethod
+ def render(mono):
return '''\
"{}" => Intrinsic {{
inputs: {{ static INPUTS: [&'static Type; {}] = [{}]; &INPUTS }},
mono.compiler_ret(),
mono.llvm_name())
- def close(self):
+ @staticmethod
+ def close():
return '''\
_ => return None,
})
def interact(proc, queue):
- line = ""
n = 0
while proc.poll() is None:
line = proc.stdout.readline()
continue
assert line.endswith('\n'), "incomplete line: " + repr(line)
queue.put(line)
- line = ""
n += 1
if n % UPDATE_EVERY_N == 0:
msg("got", str(n // 1000) + "k", "records")
canon_decomp = {}
compat_decomp = {}
- udict = {};
- range_start = -1;
+ udict = {}
+ range_start = -1
for line in fileinput.input(f):
- data = line.split(';');
+ data = line.split(';')
if len(data) != 15:
continue
- cp = int(data[0], 16);
+ cp = int(data[0], 16)
if is_surrogate(cp):
continue
if range_start >= 0:
for i in xrange(range_start, cp):
- udict[i] = data;
- range_start = -1;
+ udict[i] = data
+ range_start = -1
if data[1].endswith(", First>"):
- range_start = cp;
- continue;
- udict[cp] = data;
+ range_start = cp
+ continue
+ udict[cp] = data
for code in udict:
- [code_org, name, gencat, combine, bidi,
+ (code_org, name, gencat, combine, bidi,
decomp, deci, digit, num, mirror,
- old, iso, upcase, lowcase, titlecase ] = udict[code];
+ old, iso, upcase, lowcase, titlecase) = udict[code]
# generate char to char direct common and simple conversions
# uppercase to lowercase
global bytes_old, bytes_new
bytes_old += 8 * len(t_data)
CHUNK = 64
- rawdata = [False] * 0x110000;
+ rawdata = [False] * 0x110000
for (lo, hi) in t_data:
for cp in range(lo, hi + 1):
rawdata[cp] = True
/// }
/// ```
-#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Arc<T: ?Sized> {
ptr: Shared<ArcInner<T>>,
/// nodes behind strong `Arc<T>` pointers, and then storing the parent pointers
/// as `Weak<T>` pointers.
-#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "arc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> {
ptr: Shared<ArcInner<T>>,
#![feature(staged_api)]
#![feature(unboxed_closures)]
#![feature(unique)]
-#![cfg_attr(stage0, feature(unsafe_no_drop_flag))]
#![feature(unsize)]
#![cfg_attr(not(test), feature(fused, fn_traits, placement_new_protocol))]
/// `shrink_to_fit`, and `from_box` will actually set RawVec's private capacity
/// field. This allows zero-sized types to not be special-cased by consumers of
/// this type.
-#[cfg_attr(stage0, unsafe_no_drop_flag)]
pub struct RawVec<T> {
ptr: Unique<T>,
cap: usize,
#![allow(deprecated)]
-//! Unsynchronized reference-counted boxes (the `Rc<T>` type) which are usable
-//! only within a single thread.
+//! Single-threaded reference-counting pointers.
//!
-//! The `Rc<T>` type provides shared ownership of an immutable value.
-//! Destruction is deterministic, and will occur as soon as the last owner is
-//! gone. It is marked as non-sendable because it avoids the overhead of atomic
-//! reference counting.
+//! The type [`Rc<T>`][rc] provides shared ownership of a value, allocated
+//! in the heap. Invoking [`clone`][clone] on `Rc` produces a new pointer
+//! to the same value in the heap. When the last `Rc` pointer to a given
+//! value is destroyed, the pointed-to value is also destroyed.
//!
-//! The `downgrade` method can be used to create a non-owning `Weak<T>` pointer
-//! to the box. A `Weak<T>` pointer can be upgraded to an `Rc<T>` pointer, but
-//! will return `None` if the value has already been dropped.
+//! Shared pointers in Rust disallow mutation by default, and `Rc` is no
+//! exception. If you need to mutate through an `Rc`, use [`Cell`][cell] or
+//! [`RefCell`][refcell].
//!
-//! For example, a tree with parent pointers can be represented by putting the
-//! nodes behind strong `Rc<T>` pointers, and then storing the parent pointers
-//! as `Weak<T>` pointers.
+//! `Rc` uses non-atomic reference counting. This means that overhead is very
+//! low, but an `Rc` cannot be sent between threads, and consequently `Rc`
+//! does not implement [`Send`][send]. As a result, the Rust compiler
+//! will check *at compile time* that you are not sending `Rc`s between
+//! threads. If you need multi-threaded, atomic reference counting, use
+//! [`sync::Arc`][arc].
+//!
+//! The [`downgrade`][downgrade] method can be used to create a non-owning
+//! [`Weak`][weak] pointer. A `Weak` pointer can be [`upgrade`][upgrade]d
+//! to an `Rc`, but this will return [`None`][option] if the value has
+//! already been dropped.
+//!
+//! A cycle between `Rc` pointers will never be deallocated. For this reason,
+//! `Weak` is used to break cycles. For example, a tree could have strong
+//! `Rc` pointers from parent nodes to children, and `Weak` pointers from
+//! children back to their parents.
+//!
+//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
+//! so you can call `T`'s methods on a value of type `Rc<T>`. To avoid name
+//! clashes with `T`'s methods, the methods of `Rc<T>` itself are [associated
+//! functions][assoc], called using function-like syntax:
+//!
+//! ```
+//! # use std::rc::Rc;
+//! # let my_rc = Rc::new(());
+//! Rc::downgrade(&my_rc);
+//! ```
+//!
+//! `Weak<T>` does not auto-dereference to `T`, because the value may have
+//! already been destroyed.
+//!
+//! [rc]: struct.Rc.html
+//! [weak]: struct.Weak.html
+//! [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+//! [cell]: ../../std/cell/struct.Cell.html
+//! [refcell]: ../../std/cell/struct.RefCell.html
+//! [send]: ../../std/marker/trait.Send.html
+//! [arc]: ../../std/sync/struct.Arc.html
+//! [deref]: ../../std/ops/trait.Deref.html
+//! [downgrade]: struct.Rc.html#method.downgrade
+//! [upgrade]: struct.Weak.html#method.upgrade
+//! [option]: ../../std/option/enum.Option.html
+//! [assoc]: ../../book/method-syntax.html#associated-functions
//!
//! # Examples
//!
//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
//! unique ownership, because more than one gadget may belong to the same
-//! `Owner`. `Rc<T>` allows us to share an `Owner` between multiple `Gadget`s,
+//! `Owner`. `Rc` allows us to share an `Owner` between multiple `Gadget`s,
//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
//!
-//! ```rust
+//! ```
//! use std::rc::Rc;
//!
//! struct Owner {
-//! name: String
+//! name: String,
//! // ...other fields
//! }
//!
//! struct Gadget {
//! id: i32,
-//! owner: Rc<Owner>
+//! owner: Rc<Owner>,
//! // ...other fields
//! }
//!
//! fn main() {
-//! // Create a reference counted Owner.
-//! let gadget_owner : Rc<Owner> = Rc::new(
-//! Owner { name: String::from("Gadget Man") }
+//! // Create a reference-counted `Owner`.
+//! let gadget_owner: Rc<Owner> = Rc::new(
+//! Owner {
+//! name: "Gadget Man".to_string(),
+//! }
//! );
//!
-//! // Create Gadgets belonging to gadget_owner. To increment the reference
-//! // count we clone the `Rc<T>` object.
-//! let gadget1 = Gadget { id: 1, owner: gadget_owner.clone() };
-//! let gadget2 = Gadget { id: 2, owner: gadget_owner.clone() };
+//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
+//! // value gives us a new pointer to the same `Owner` value, incrementing
+//! // the reference count in the process.
+//! let gadget1 = Gadget {
+//! id: 1,
+//! owner: gadget_owner.clone(),
+//! };
+//! let gadget2 = Gadget {
+//! id: 2,
+//! owner: gadget_owner.clone(),
+//! };
//!
+//! // Dispose of our local variable `gadget_owner`.
//! drop(gadget_owner);
//!
-//! // Despite dropping gadget_owner, we're still able to print out the name
-//! // of the Owner of the Gadgets. This is because we've only dropped the
-//! // reference count object, not the Owner it wraps. As long as there are
-//! // other `Rc<T>` objects pointing at the same Owner, it will remain
-//! // allocated. Notice that the `Rc<T>` wrapper around Gadget.owner gets
-//! // automatically dereferenced for us.
+//! // Despite dropping `gadget_owner`, we're still able to print out the name
+//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
+//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
+//! // other `Rc<Owner>` values pointing at the same `Owner`, it will remain
+//! // allocated. The field projection `gadget1.owner.name` works because
+//! // `Rc<Owner>` automatically dereferences to `Owner`.
//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
//!
-//! // At the end of the method, gadget1 and gadget2 get destroyed, and with
-//! // them the last counted references to our Owner. Gadget Man now gets
-//! // destroyed as well.
+//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
+//! // with them the last counted references to our `Owner`. Gadget Man now
+//! // gets destroyed as well.
//! }
//! ```
//!
//! If our requirements change, and we also need to be able to traverse from
-//! Owner → Gadget, we will run into problems: an `Rc<T>` pointer from Owner
-//! → Gadget introduces a cycle between the objects. This means that their
-//! reference counts can never reach 0, and the objects will remain allocated: a
-//! memory leak. In order to get around this, we can use `Weak<T>` pointers.
-//! These pointers don't contribute to the total count.
+//! `Owner` to `Gadget`, we will run into problems. An `Rc` pointer from `Owner`
+//! to `Gadget` introduces a cycle between the values. This means that their
+//! reference counts can never reach 0, and the values will remain allocated
+//! forever: a memory leak. In order to get around this, we can use `Weak`
+//! pointers.
//!
//! Rust actually makes it somewhat difficult to produce this loop in the first
-//! place: in order to end up with two objects that point at each other, one of
-//! them needs to be mutable. This is problematic because `Rc<T>` enforces
-//! memory safety by only giving out shared references to the object it wraps,
+//! place. In order to end up with two values that point at each other, one of
+//! them needs to be mutable. This is difficult because `Rc` enforces
+//! memory safety by only giving out shared references to the value it wraps,
//! and these don't allow direct mutation. We need to wrap the part of the
-//! object we wish to mutate in a `RefCell`, which provides *interior
+//! value we wish to mutate in a [`RefCell`][refcell], which provides *interior
//! mutability*: a method to achieve mutability through a shared reference.
-//! `RefCell` enforces Rust's borrowing rules at runtime. Read the `Cell`
-//! documentation for more details on interior mutability.
+//! `RefCell` enforces Rust's borrowing rules at runtime.
//!
-//! ```rust
+//! ```
//! use std::rc::Rc;
//! use std::rc::Weak;
//! use std::cell::RefCell;
//! }
//!
//! fn main() {
-//! // Create a reference counted Owner. Note the fact that we've put the
-//! // Owner's vector of Gadgets inside a RefCell so that we can mutate it
-//! // through a shared reference.
-//! let gadget_owner : Rc<Owner> = Rc::new(
+//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
+//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
+//! // a shared reference.
+//! let gadget_owner: Rc<Owner> = Rc::new(
//! Owner {
//! name: "Gadget Man".to_string(),
-//! gadgets: RefCell::new(Vec::new()),
+//! gadgets: RefCell::new(vec![]),
//! }
//! );
//!
-//! // Create Gadgets belonging to gadget_owner as before.
-//! let gadget1 = Rc::new(Gadget{id: 1, owner: gadget_owner.clone()});
-//! let gadget2 = Rc::new(Gadget{id: 2, owner: gadget_owner.clone()});
+//! // Create `Gadget`s belonging to `gadget_owner`, as before.
+//! let gadget1 = Rc::new(
+//! Gadget {
+//! id: 1,
+//! owner: gadget_owner.clone(),
+//! }
+//! );
+//! let gadget2 = Rc::new(
+//! Gadget {
+//! id: 2,
+//! owner: gadget_owner.clone(),
+//! }
+//! );
+//!
+//! // Add the `Gadget`s to their `Owner`.
+//! {
+//! let mut gadgets = gadget_owner.gadgets.borrow_mut();
+//! gadgets.push(Rc::downgrade(&gadget1));
+//! gadgets.push(Rc::downgrade(&gadget2));
//!
-//! // Add the Gadgets to their Owner. To do this we mutably borrow from
-//! // the RefCell holding the Owner's Gadgets.
-//! gadget_owner.gadgets.borrow_mut().push(Rc::downgrade(&gadget1));
-//! gadget_owner.gadgets.borrow_mut().push(Rc::downgrade(&gadget2));
+//! // `RefCell` dynamic borrow ends here.
+//! }
//!
-//! // Iterate over our Gadgets, printing their details out
-//! for gadget_opt in gadget_owner.gadgets.borrow().iter() {
+//! // Iterate over our `Gadget`s, printing their details out.
+//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
//!
-//! // gadget_opt is a Weak<Gadget>. Since weak pointers can't guarantee
-//! // that their object is still allocated, we need to call upgrade()
-//! // on them to turn them into a strong reference. This returns an
-//! // Option, which contains a reference to our object if it still
-//! // exists.
-//! let gadget = gadget_opt.upgrade().unwrap();
+//! // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
+//! // guarantee the value is still allocated, we need to call
+//! // `upgrade`, which returns an `Option<Rc<Gadget>>`.
+//! //
+//! // In this case we know the value still exists, so we simply
+//! // `unwrap` the `Option`. In a more complicated program, you might
+//! // need graceful error handling for a `None` result.
+//!
+//! let gadget = gadget_weak.upgrade().unwrap();
//! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
//! }
//!
-//! // At the end of the method, gadget_owner, gadget1 and gadget2 get
-//! // destroyed. There are now no strong (`Rc<T>`) references to the gadgets.
-//! // Once they get destroyed, the Gadgets get destroyed. This zeroes the
-//! // reference count on Gadget Man, they get destroyed as well.
+//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
+//! // are destroyed. There are now no strong (`Rc`) pointers to the
+//! // gadgets, so they are destroyed. This zeroes the reference count on
+//! // Gadget Man, so he gets destroyed as well.
//! }
//! ```
}
-/// A reference-counted pointer type over an immutable value.
+/// A single-threaded reference-counting pointer.
///
-/// See the [module level documentation](./index.html) for more details.
+/// See the [module-level documentation](./index.html) for more details.
///
-/// Note: the inherent methods defined on `Rc<T>` are all associated functions,
-/// which means that you have to call them as e.g. `Rc::get_mut(&value)` instead
-/// of `value.get_mut()`. This is so that there are no conflicts with methods
-/// on the inner type `T`, which are what you want to call in the majority of
-/// cases.
-#[cfg_attr(stage0, unsafe_no_drop_flag)]
+/// The inherent methods of `Rc` are all associated functions, which means
+/// that you have to call them as e.g. `Rc::get_mut(&value)` instead of
+/// `value.get_mut()`. This avoids conflicts with methods of the inner
+/// type `T`.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Rc<T: ?Sized> {
ptr: Shared<RcBox<T>>,
}
}
- /// Unwraps the contained value if the `Rc<T>` has exactly one strong reference.
+ /// Returns the contained value, if the `Rc` has exactly one strong reference.
///
- /// Otherwise, an `Err` is returned with the same `Rc<T>`.
+ /// Otherwise, an `Err` is returned with the same `Rc` that was passed in.
///
/// This will succeed even if there are outstanding weak references.
///
///
/// let x = Rc::new(4);
/// let _y = x.clone();
- /// assert_eq!(Rc::try_unwrap(x), Err(Rc::new(4)));
+ /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
/// ```
#[inline]
#[stable(feature = "rc_unique", since = "1.4.0")]
}
}
- /// Checks if `Rc::try_unwrap` would return `Ok`.
+ /// Checks whether `Rc::try_unwrap` would return `Ok`.
///
/// # Examples
///
/// let x = Rc::new(4);
/// let _y = x.clone();
/// assert!(!Rc::would_unwrap(&x));
- /// assert_eq!(Rc::try_unwrap(x), Err(Rc::new(4)));
+ /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
/// ```
#[unstable(feature = "rc_would_unwrap",
reason = "just added for niche usecase",
}
impl<T: ?Sized> Rc<T> {
- /// Creates a new `Weak<T>` reference from this value.
+ /// Creates a new [`Weak`][weak] pointer to this value.
+ ///
+ /// [weak]: struct.Weak.html
///
/// # Examples
///
Weak { ptr: this.ptr }
}
- /// Get the number of weak references to this value.
+ /// Gets the number of [`Weak`][weak] pointers to this value.
+ ///
+ /// [weak]: struct.Weak.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(rc_counts)]
+ ///
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ /// let _weak_five = Rc::downgrade(&five);
+ ///
+ /// assert_eq!(1, Rc::weak_count(&five));
+ /// ```
#[inline]
#[unstable(feature = "rc_counts", reason = "not clearly useful",
issue = "28356")]
this.weak() - 1
}
- /// Get the number of strong references to this value.
+ /// Gets the number of strong (`Rc`) pointers to this value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(rc_counts)]
+ ///
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ /// let _also_five = five.clone();
+ ///
+ /// assert_eq!(2, Rc::strong_count(&five));
+ /// ```
#[inline]
#[unstable(feature = "rc_counts", reason = "not clearly useful",
issue = "28356")]
this.strong()
}
- /// Returns true if there are no other `Rc` or `Weak<T>` values that share
- /// the same inner value.
+ /// Returns true if there are no other `Rc` or [`Weak`][weak] pointers to
+ /// this inner value.
+ ///
+ /// [weak]: struct.Weak.html
///
/// # Examples
///
Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
}
- /// Returns a mutable reference to the contained value if the `Rc<T>` has
- /// one strong reference and no weak references.
+ /// Returns a mutable reference to the inner value, if there are
+ /// no other `Rc` or [`Weak`][weak] pointers to the same value.
+ ///
+ /// Returns [`None`][option] otherwise, because it is not safe to
+ /// mutate a shared value.
///
- /// Returns `None` if the `Rc<T>` is not unique.
+ /// See also [`make_mut`][make_mut], which will [`clone`][clone]
+ /// the inner value when it's shared.
+ ///
+ /// [weak]: struct.Weak.html
+ /// [option]: ../../std/option/enum.Option.html
+ /// [make_mut]: struct.Rc.html#method.make_mut
+ /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
///
/// # Examples
///
#[unstable(feature = "ptr_eq",
reason = "newly added",
issue = "36497")]
- /// Return whether two `Rc` references point to the same value
- /// (not just values that compare equal).
+ /// Returns true if the two `Rc`s point to the same value (not
+ /// just values that compare as equal).
///
/// # Examples
///
}
impl<T: Clone> Rc<T> {
- /// Make a mutable reference into the given `Rc<T>` by cloning the inner
- /// data if the `Rc<T>` doesn't have one strong reference and no weak
- /// references.
+ /// Makes a mutable reference into the given `Rc`.
+ ///
+ /// If there are other `Rc` or [`Weak`][weak] pointers to the same value,
+ /// then `make_mut` will invoke [`clone`][clone] on the inner value to
+ /// ensure unique ownership. This is also referred to as clone-on-write.
///
- /// This is also referred to as a copy-on-write.
+ /// See also [`get_mut`][get_mut], which will fail rather than cloning.
+ ///
+ /// [weak]: struct.Weak.html
+ /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+ /// [get_mut]: struct.Rc.html#method.get_mut
///
/// # Examples
///
///
/// let mut data = Rc::new(5);
///
- /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
- /// let mut other_data = data.clone(); // Won't clone inner data
- /// *Rc::make_mut(&mut data) += 1; // Clones inner data
- /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
- /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
+ /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
+ /// let mut other_data = data.clone(); // Won't clone inner data
+ /// *Rc::make_mut(&mut data) += 1; // Clones inner data
+ /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
+ /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
///
- /// // Note: data and other_data now point to different numbers
+ /// // Now `data` and `other_data` point to different values.
/// assert_eq!(*data, 8);
/// assert_eq!(*other_data, 12);
- ///
/// ```
#[inline]
#[stable(feature = "rc_unique", since = "1.4.0")]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Drop for Rc<T> {
- /// Drops the `Rc<T>`.
+ /// Drops the `Rc`.
///
/// This will decrement the strong reference count. If the strong reference
- /// count becomes zero and the only other references are `Weak<T>` ones,
- /// `drop`s the inner value.
+ /// count reaches zero then the only other references (if any) are `Weak`,
+ /// so we `drop` the inner value.
///
/// # Examples
///
/// ```
/// use std::rc::Rc;
///
- /// {
- /// let five = Rc::new(5);
+ /// struct Foo;
///
- /// // stuff
- ///
- /// drop(five); // explicit drop
+ /// impl Drop for Foo {
+ /// fn drop(&mut self) {
+ /// println!("dropped!");
+ /// }
/// }
- /// {
- /// let five = Rc::new(5);
///
- /// // stuff
+ /// let foo = Rc::new(Foo);
+ /// let foo2 = foo.clone();
///
- /// } // implicit drop
+ /// drop(foo); // Doesn't print anything
+ /// drop(foo2); // Prints "dropped!"
/// ```
#[unsafe_destructor_blind_to_params]
fn drop(&mut self) {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Clone for Rc<T> {
- /// Makes a clone of the `Rc<T>`.
+ /// Makes a clone of the `Rc` pointer.
///
- /// When you clone an `Rc<T>`, it will create another pointer to the data and
- /// increase the strong reference counter.
+ /// This creates another pointer to the same inner value, increasing the
+ /// strong reference count.
///
/// # Examples
///
/// use std::rc::Rc;
///
/// let x: Rc<i32> = Default::default();
+ /// assert_eq!(*x, 0);
/// ```
#[inline]
fn default() -> Rc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
- /// Equality for two `Rc<T>`s.
+ /// Equality for two `Rc`s.
///
- /// Two `Rc<T>`s are equal if their inner value are equal.
+ /// Two `Rc`s are equal if their inner values are equal.
///
/// # Examples
///
///
/// let five = Rc::new(5);
///
- /// five == Rc::new(5);
+ /// assert!(five == Rc::new(5));
/// ```
#[inline(always)]
fn eq(&self, other: &Rc<T>) -> bool {
**self == **other
}
- /// Inequality for two `Rc<T>`s.
+ /// Inequality for two `Rc`s.
///
- /// Two `Rc<T>`s are unequal if their inner value are unequal.
+ /// Two `Rc`s are unequal if their inner values are unequal.
///
/// # Examples
///
///
/// let five = Rc::new(5);
///
- /// five != Rc::new(5);
+ /// assert!(five != Rc::new(6));
/// ```
#[inline(always)]
fn ne(&self, other: &Rc<T>) -> bool {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
- /// Partial comparison for two `Rc<T>`s.
+ /// Partial comparison for two `Rc`s.
///
/// The two are compared by calling `partial_cmp()` on their inner values.
///
///
/// ```
/// use std::rc::Rc;
+ /// use std::cmp::Ordering;
///
/// let five = Rc::new(5);
///
- /// five.partial_cmp(&Rc::new(5));
+ /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
/// ```
#[inline(always)]
fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
- /// Less-than comparison for two `Rc<T>`s.
+ /// Less-than comparison for two `Rc`s.
///
/// The two are compared by calling `<` on their inner values.
///
///
/// let five = Rc::new(5);
///
- /// five < Rc::new(5);
+ /// assert!(five < Rc::new(6));
/// ```
#[inline(always)]
fn lt(&self, other: &Rc<T>) -> bool {
**self < **other
}
- /// 'Less-than or equal to' comparison for two `Rc<T>`s.
+ /// 'Less than or equal to' comparison for two `Rc`s.
///
/// The two are compared by calling `<=` on their inner values.
///
///
/// let five = Rc::new(5);
///
- /// five <= Rc::new(5);
+ /// assert!(five <= Rc::new(5));
/// ```
#[inline(always)]
fn le(&self, other: &Rc<T>) -> bool {
**self <= **other
}
- /// Greater-than comparison for two `Rc<T>`s.
+ /// Greater-than comparison for two `Rc`s.
///
/// The two are compared by calling `>` on their inner values.
///
///
/// let five = Rc::new(5);
///
- /// five > Rc::new(5);
+ /// assert!(five > Rc::new(4));
/// ```
#[inline(always)]
fn gt(&self, other: &Rc<T>) -> bool {
**self > **other
}
- /// 'Greater-than or equal to' comparison for two `Rc<T>`s.
+ /// 'Greater than or equal to' comparison for two `Rc`s.
///
/// The two are compared by calling `>=` on their inner values.
///
///
/// let five = Rc::new(5);
///
- /// five >= Rc::new(5);
+ /// assert!(five >= Rc::new(5));
/// ```
#[inline(always)]
fn ge(&self, other: &Rc<T>) -> bool {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Ord> Ord for Rc<T> {
- /// Comparison for two `Rc<T>`s.
+ /// Comparison for two `Rc`s.
///
/// The two are compared by calling `cmp()` on their inner values.
///
///
/// ```
/// use std::rc::Rc;
+ /// use std::cmp::Ordering;
///
/// let five = Rc::new(5);
///
- /// five.partial_cmp(&Rc::new(5));
+ /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
/// ```
#[inline]
fn cmp(&self, other: &Rc<T>) -> Ordering {
}
}
-/// A weak version of `Rc<T>`.
+/// A weak version of [`Rc`][rc].
+///
+/// `Weak` pointers do not count towards determining if the inner value
+/// should be dropped.
+///
+/// The typical way to obtain a `Weak` pointer is to call
+/// [`Rc::downgrade`][downgrade].
///
-/// Weak references do not count when determining if the inner value should be
-/// dropped.
+/// See the [module-level documentation](./index.html) for more details.
///
-/// See the [module level documentation](./index.html) for more.
-#[cfg_attr(stage0, unsafe_no_drop_flag)]
+/// [rc]: struct.Rc.html
+/// [downgrade]: struct.Rc.html#method.downgrade
#[stable(feature = "rc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> {
ptr: Shared<RcBox<T>>,
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
impl<T> Weak<T> {
- /// Constructs a new `Weak<T>` without an accompanying instance of T.
+ /// Constructs a new `Weak<T>`, without an accompanying instance of `T`.
///
- /// This allocates memory for T, but does not initialize it. Calling
- /// Weak<T>::upgrade() on the return value always gives None.
+ /// This allocates memory for `T`, but does not initialize it. Calling
+ /// [`upgrade`][upgrade] on the return value always gives
+ /// [`None`][option].
+ ///
+ /// [upgrade]: struct.Weak.html#method.upgrade
+ /// [option]: ../../std/option/enum.Option.html
///
/// # Examples
///
/// use std::rc::Weak;
///
/// let empty: Weak<i64> = Weak::new();
+ /// assert!(empty.upgrade().is_none());
/// ```
#[stable(feature = "downgraded_weak", since = "1.10.0")]
pub fn new() -> Weak<T> {
}
impl<T: ?Sized> Weak<T> {
- /// Upgrades a weak reference to a strong reference.
+ /// Upgrades the `Weak` pointer to an [`Rc`][rc], if possible.
///
- /// Upgrades the `Weak<T>` reference to an `Rc<T>`, if possible.
+ /// Returns [`None`][option] if the strong count has reached zero and the
+ /// inner value was destroyed.
///
- /// Returns `None` if there were no strong references and the data was
- /// destroyed.
+ /// [rc]: struct.Rc.html
+ /// [option]: ../../std/option/enum.Option.html
///
/// # Examples
///
/// let weak_five = Rc::downgrade(&five);
///
/// let strong_five: Option<Rc<_>> = weak_five.upgrade();
+ /// assert!(strong_five.is_some());
+ ///
+ /// // Destroy all strong pointers.
+ /// drop(strong_five);
+ /// drop(five);
+ ///
+ /// assert!(weak_five.upgrade().is_none());
/// ```
#[stable(feature = "rc_weak", since = "1.4.0")]
pub fn upgrade(&self) -> Option<Rc<T>> {
#[stable(feature = "rc_weak", since = "1.4.0")]
impl<T: ?Sized> Drop for Weak<T> {
- /// Drops the `Weak<T>`.
+ /// Drops the `Weak` pointer.
///
/// This will decrement the weak reference count.
///
/// ```
/// use std::rc::Rc;
///
- /// {
- /// let five = Rc::new(5);
- /// let weak_five = Rc::downgrade(&five);
- ///
- /// // stuff
+ /// struct Foo;
///
- /// drop(weak_five); // explicit drop
+ /// impl Drop for Foo {
+ /// fn drop(&mut self) {
+ /// println!("dropped!");
+ /// }
/// }
- /// {
- /// let five = Rc::new(5);
- /// let weak_five = Rc::downgrade(&five);
///
- /// // stuff
+ /// let foo = Rc::new(Foo);
+ /// let weak_foo = Rc::downgrade(&foo);
+ /// let other_weak_foo = weak_foo.clone();
///
- /// } // implicit drop
+ /// drop(weak_foo); // Doesn't print anything
+ /// drop(foo); // Prints "dropped!"
+ ///
+ /// assert!(other_weak_foo.upgrade().is_none());
/// ```
fn drop(&mut self) {
unsafe {
#[stable(feature = "rc_weak", since = "1.4.0")]
impl<T: ?Sized> Clone for Weak<T> {
- /// Makes a clone of the `Weak<T>`.
+ /// Makes a clone of the `Weak` pointer.
///
- /// This increases the weak reference count.
+ /// This creates another pointer to the same inner value, increasing the
+ /// weak reference count.
///
/// # Examples
///
#[stable(feature = "downgraded_weak", since = "1.10.0")]
impl<T> Default for Weak<T> {
- /// Creates a new `Weak<T>`.
+ /// Constructs a new `Weak<T>`, without an accompanying instance of `T`.
+ ///
+ /// This allocates memory for `T`, but does not initialize it. Calling
+ /// [`upgrade`][upgrade] on the return value always gives
+ /// [`None`][option].
+ ///
+ /// [upgrade]: struct.Weak.html#method.upgrade
+ /// [option]: ../../std/option/enum.Option.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Weak;
+ ///
+ /// let empty: Weak<i64> = Default::default();
+ /// assert!(empty.upgrade().is_none());
+ /// ```
fn default() -> Weak<T> {
Weak::new()
}
println!("cargo:rustc-cfg=cargobuild");
println!("cargo:rerun-if-changed=build.rs");
- let target = env::var("TARGET").unwrap();
- let host = env::var("HOST").unwrap();
+ let target = env::var("TARGET").expect("TARGET was not set");
+ let host = env::var("HOST").expect("HOST was not set");
let build_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap());
let src_dir = env::current_dir().unwrap();
.current_dir(&build_dir)
.arg("build_lib_static")
.arg("-j")
- .arg(env::var("NUM_JOBS").unwrap()));
+ .arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set")));
if target.contains("windows") {
println!("cargo:rustc-link-lib=static=jemalloc");
//! of individual objects while the arena itself is still alive. The benefit
//! of an arena is very fast allocation; just a pointer bump.
//!
-//! This crate has two arenas implemented: `TypedArena`, which is a simpler
-//! arena but can only hold objects of a single type, and `Arena`, which is a
-//! more complex, slower arena which can hold objects of any type.
+//! This crate implements `TypedArena`, a simple arena that can only hold
+//! objects of a single type.
#![crate_name = "arena"]
#![unstable(feature = "rustc_private", issue = "27812")]
use alloc::heap;
use alloc::raw_vec::RawVec;
-/// A faster arena that can hold objects of only one type.
+/// An arena that can hold objects of only one type.
pub struct TypedArena<T> {
/// A pointer to the next object to be allocated.
ptr: Cell<*mut T>,
/// reached, a new chunk is allocated.
end: Cell<*mut T>,
- /// A vector arena segments.
+ /// A vector of arena chunks.
chunks: RefCell<Vec<TypedArenaChunk<T>>>,
/// Marker indicating that dropping the arena causes its owned
}
struct TypedArenaChunk<T> {
- /// Pointer to the next arena segment.
+ /// The raw storage for the arena chunk.
storage: RawVec<T>,
}
const PAGE: usize = 4096;
impl<T> TypedArena<T> {
- /// Creates a new `TypedArena` with preallocated space for many objects.
+ /// Creates a new `TypedArena`.
#[inline]
pub fn new() -> TypedArena<T> {
- // Reserve at least one page.
- let elem_size = cmp::max(1, mem::size_of::<T>());
- TypedArena::with_capacity(PAGE / elem_size)
- }
-
- /// Creates a new `TypedArena` with preallocated space for the given number of
- /// objects.
- #[inline]
- pub fn with_capacity(capacity: usize) -> TypedArena<T> {
- unsafe {
- let chunk = TypedArenaChunk::<T>::new(cmp::max(1, capacity));
- TypedArena {
- ptr: Cell::new(chunk.start()),
- end: Cell::new(chunk.end()),
- chunks: RefCell::new(vec![chunk]),
- _own: PhantomData,
- }
+ TypedArena {
+ // We set both `ptr` and `end` to 0 so that the first call to
+ // alloc() will trigger a grow().
+ ptr: Cell::new(0 as *mut T),
+ end: Cell::new(0 as *mut T),
+ chunks: RefCell::new(vec![]),
+ _own: PhantomData,
}
}
fn grow(&self) {
unsafe {
let mut chunks = self.chunks.borrow_mut();
- let prev_capacity = chunks.last().unwrap().storage.cap();
- let new_capacity = prev_capacity.checked_mul(2).unwrap();
- if chunks.last_mut().unwrap().storage.double_in_place() {
- self.end.set(chunks.last().unwrap().end());
+ let (chunk, new_capacity);
+ if let Some(last_chunk) = chunks.last_mut() {
+ if last_chunk.storage.double_in_place() {
+ self.end.set(last_chunk.end());
+ return;
+ } else {
+ let prev_capacity = last_chunk.storage.cap();
+ new_capacity = prev_capacity.checked_mul(2).unwrap();
+ }
} else {
- let chunk = TypedArenaChunk::<T>::new(new_capacity);
- self.ptr.set(chunk.start());
- self.end.set(chunk.end());
- chunks.push(chunk);
+ let elem_size = cmp::max(1, mem::size_of::<T>());
+ new_capacity = cmp::max(1, PAGE / elem_size);
}
+ chunk = TypedArenaChunk::<T>::new(new_capacity);
+ self.ptr.set(chunk.start());
+ self.end.set(chunk.end());
+ chunks.push(chunk);
}
}
+
/// Clears the arena. Deallocates all but the longest chunk which may be reused.
pub fn clear(&mut self) {
unsafe {
// Clear the last chunk, which is partially filled.
let mut chunks_borrow = self.chunks.borrow_mut();
- let last_idx = chunks_borrow.len() - 1;
- self.clear_last_chunk(&mut chunks_borrow[last_idx]);
- // If `T` is ZST, code below has no effect.
- for mut chunk in chunks_borrow.drain(..last_idx) {
- let cap = chunk.storage.cap();
- chunk.destroy(cap);
+ if let Some(mut last_chunk) = chunks_borrow.pop() {
+ self.clear_last_chunk(&mut last_chunk);
+ // If `T` is ZST, code below has no effect.
+ for mut chunk in chunks_borrow.drain(..) {
+ let cap = chunk.storage.cap();
+ chunk.destroy(cap);
+ }
+ chunks_borrow.push(last_chunk);
}
}
}
unsafe {
// Determine how much was filled.
let mut chunks_borrow = self.chunks.borrow_mut();
- let mut last_chunk = chunks_borrow.pop().unwrap();
- // Drop the contents of the last chunk.
- self.clear_last_chunk(&mut last_chunk);
- // The last chunk will be dropped. Destroy all other chunks.
- for chunk in chunks_borrow.iter_mut() {
- let cap = chunk.storage.cap();
- chunk.destroy(cap);
+ if let Some(mut last_chunk) = chunks_borrow.pop() {
+ // Drop the contents of the last chunk.
+ self.clear_last_chunk(&mut last_chunk);
+ // The last chunk will be dropped. Destroy all other chunks.
+ for chunk in chunks_borrow.iter_mut() {
+ let cap = chunk.storage.cap();
+ chunk.destroy(cap);
+ }
}
// RawVec handles deallocation of `last_chunk` and `self.chunks`.
}
z: i32,
}
+ #[test]
+ pub fn test_unused() {
+ let arena: TypedArena<Point> = TypedArena::new();
+ assert!(arena.chunks.borrow().is_empty());
+ }
+
#[test]
fn test_arena_alloc_nested() {
struct Inner {
#![feature(step_by)]
#![feature(unicode)]
#![feature(unique)]
-#![cfg_attr(stage0, feature(unsafe_no_drop_flag))]
#![cfg_attr(test, feature(rand, test))]
#![no_std]
}
/// Use the syntax described in `std::fmt` to create a value of type `String`.
-/// See `std::fmt` for more information.
+/// See [`std::fmt`][fmt] for more information.
+///
+/// [fmt]: ../std/fmt/index.html
///
/// # Examples
///
self.sort_by(|a, b| a.cmp(b))
}
- /// Sorts the slice, in place, using `key` to extract a key by which to
+ /// Sorts the slice, in place, using `f` to extract a key by which to
/// order the sort by.
///
/// This sort is stable and `O(n log n)` worst-case but allocates
pub fn push(&mut self, ch: char) {
match ch.len_utf8() {
1 => self.vec.push(ch as u8),
- _ => self.vec.extend_from_slice(ch.encode_utf8().as_slice()),
+ _ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0;4]).as_bytes()),
}
}
let len = self.len();
assert!(idx <= len);
assert!(self.is_char_boundary(idx));
- let bits = ch.encode_utf8();
+ let mut bits = [0; 4];
+ let bits = ch.encode_utf8(&mut bits).as_bytes();
unsafe {
- self.insert_bytes(idx, bits.as_slice());
+ self.insert_bytes(idx, bits);
}
}
}
}
-#[stable(feature = "stringfromchars", since = "1.12.0")]
-impl<'a> From<&'a [char]> for String {
- #[inline]
- fn from(v: &'a [char]) -> String {
- let mut s = String::with_capacity(v.len());
- for c in v {
- s.push(*c);
- }
- s
- }
-}
-
-#[stable(feature = "stringfromchars", since = "1.12.0")]
-impl From<Vec<char>> for String {
- #[inline]
- fn from(v: Vec<char>) -> String {
- String::from(v.as_slice())
- }
-}
-
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Write for String {
#[inline]
/// Vec does not currently guarantee the order in which elements are dropped
/// (the order has changed in the past, and may change again).
///
-#[cfg_attr(stage0, unsafe_no_drop_flag)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Vec<T> {
buf: RawVec<T>,
/// ```
/// use std::collections::VecDeque;
///
- /// let mut vector: VecDeque<u32> = VecDeque::new();
+ /// let mut vector = VecDeque::new();
///
/// vector.push_back(0);
/// vector.push_back(1);
/// vector.push_back(2);
///
- /// assert_eq!(vector.as_slices(), (&[0u32, 1, 2] as &[u32], &[] as &[u32]));
+ /// assert_eq!(vector.as_slices(), (&[0, 1, 2][..], &[][..]));
///
/// vector.push_front(10);
/// vector.push_front(9);
///
- /// assert_eq!(vector.as_slices(), (&[9u32, 10] as &[u32], &[0u32, 1, 2] as &[u32]));
+ /// assert_eq!(vector.as_slices(), (&[9, 10][..], &[0, 1, 2][..]));
/// ```
#[inline]
#[stable(feature = "deque_extras_15", since = "1.5.0")]
/// ```
/// use std::collections::VecDeque;
///
- /// let mut vector: VecDeque<u32> = VecDeque::new();
+ /// let mut vector = VecDeque::new();
///
/// vector.push_back(0);
/// vector.push_back(1);
///
/// vector.as_mut_slices().0[0] = 42;
/// vector.as_mut_slices().1[0] = 24;
- /// assert_eq!(vector.as_slices(), (&[42u32, 10] as &[u32], &[24u32, 1] as &[u32]));
+ /// assert_eq!(vector.as_slices(), (&[42, 10][..], &[24, 1][..]));
/// ```
#[inline]
#[stable(feature = "deque_extras_15", since = "1.5.0")]
#[test]
fn test_chars_decoding() {
+ let mut bytes = [0; 4];
for c in (0..0x110000).filter_map(::std::char::from_u32) {
- let bytes = c.encode_utf8();
- let s = ::std::str::from_utf8(bytes.as_slice()).unwrap();
+ let s = c.encode_utf8(&mut bytes);
if Some(c) != s.chars().next() {
panic!("character {:x}={} does not decode correctly", c as u32, c);
}
#[test]
fn test_chars_rev_decoding() {
+ let mut bytes = [0; 4];
for c in (0..0x110000).filter_map(::std::char::from_u32) {
- let bytes = c.encode_utf8();
- let s = ::std::str::from_utf8(bytes.as_slice()).unwrap();
+ let s = c.encode_utf8(&mut bytes);
if Some(c) != s.chars().rev().next() {
panic!("character {:x}={} does not decode correctly", c as u32, c);
}
}
fn main() {
- let target = env::var("TARGET").unwrap();
+ let target = env::var("TARGET").expect("TARGET was not set");
let cfg = &mut gcc::Config::new();
if target.contains("msvc") {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![cfg_attr(not(stage0), feature(compiler_builtins))]
+#![feature(compiler_builtins)]
#![no_std]
-#![cfg_attr(not(stage0), compiler_builtins)]
+#![compiler_builtins]
#![unstable(feature = "compiler_builtins_lib",
reason = "internal implementation detail of rustc right now",
issue = "0")]
use char_private::is_printable;
use convert::TryFrom;
use fmt;
+use slice;
use iter::FusedIterator;
use mem::transmute;
#[stable(feature = "core", since = "1.6.0")]
fn len_utf16(self) -> usize;
#[unstable(feature = "unicode", issue = "27784")]
- fn encode_utf8(self) -> EncodeUtf8;
+ fn encode_utf8(self, dst: &mut [u8]) -> &mut str;
#[unstable(feature = "unicode", issue = "27784")]
- fn encode_utf16(self) -> EncodeUtf16;
+ fn encode_utf16(self, dst: &mut [u16]) -> &mut [u16];
}
#[stable(feature = "core", since = "1.6.0")]
}
#[inline]
- fn encode_utf8(self) -> EncodeUtf8 {
+ fn encode_utf8(self, dst: &mut [u8]) -> &mut str {
let code = self as u32;
- let mut buf = [0; 4];
- let pos = if code < MAX_ONE_B {
- buf[3] = code as u8;
- 3
- } else if code < MAX_TWO_B {
- buf[2] = (code >> 6 & 0x1F) as u8 | TAG_TWO_B;
- buf[3] = (code & 0x3F) as u8 | TAG_CONT;
- 2
- } else if code < MAX_THREE_B {
- buf[1] = (code >> 12 & 0x0F) as u8 | TAG_THREE_B;
- buf[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT;
- buf[3] = (code & 0x3F) as u8 | TAG_CONT;
- 1
- } else {
- buf[0] = (code >> 18 & 0x07) as u8 | TAG_FOUR_B;
- buf[1] = (code >> 12 & 0x3F) as u8 | TAG_CONT;
- buf[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT;
- buf[3] = (code & 0x3F) as u8 | TAG_CONT;
- 0
- };
- EncodeUtf8 { buf: buf, pos: pos }
+ unsafe {
+ let len =
+ if code < MAX_ONE_B && !dst.is_empty() {
+ *dst.get_unchecked_mut(0) = code as u8;
+ 1
+ } else if code < MAX_TWO_B && dst.len() >= 2 {
+ *dst.get_unchecked_mut(0) = (code >> 6 & 0x1F) as u8 | TAG_TWO_B;
+ *dst.get_unchecked_mut(1) = (code & 0x3F) as u8 | TAG_CONT;
+ 2
+ } else if code < MAX_THREE_B && dst.len() >= 3 {
+ *dst.get_unchecked_mut(0) = (code >> 12 & 0x0F) as u8 | TAG_THREE_B;
+ *dst.get_unchecked_mut(1) = (code >> 6 & 0x3F) as u8 | TAG_CONT;
+ *dst.get_unchecked_mut(2) = (code & 0x3F) as u8 | TAG_CONT;
+ 3
+ } else if dst.len() >= 4 {
+ *dst.get_unchecked_mut(0) = (code >> 18 & 0x07) as u8 | TAG_FOUR_B;
+ *dst.get_unchecked_mut(1) = (code >> 12 & 0x3F) as u8 | TAG_CONT;
+ *dst.get_unchecked_mut(2) = (code >> 6 & 0x3F) as u8 | TAG_CONT;
+ *dst.get_unchecked_mut(3) = (code & 0x3F) as u8 | TAG_CONT;
+ 4
+ } else {
+ panic!("encode_utf8: need {} bytes to encode U+{:X}, but the buffer has {}",
+ from_u32_unchecked(code).len_utf8(),
+ code,
+ dst.len())
+ };
+ transmute(slice::from_raw_parts_mut(dst.as_mut_ptr(), len))
+ }
}
#[inline]
- fn encode_utf16(self) -> EncodeUtf16 {
- let mut buf = [0; 2];
+ fn encode_utf16(self, dst: &mut [u16]) -> &mut [u16] {
let mut code = self as u32;
- let pos = if (code & 0xFFFF) == code {
- // The BMP falls through (assuming non-surrogate, as it should)
- buf[1] = code as u16;
- 1
- } else {
- // Supplementary planes break into surrogates.
- code -= 0x1_0000;
- buf[0] = 0xD800 | ((code >> 10) as u16);
- buf[1] = 0xDC00 | ((code as u16) & 0x3FF);
- 0
- };
- EncodeUtf16 { buf: buf, pos: pos }
+ unsafe {
+ if (code & 0xFFFF) == code && !dst.is_empty() {
+ // The BMP falls through (assuming non-surrogate, as it should)
+ *dst.get_unchecked_mut(0) = code as u16;
+ slice::from_raw_parts_mut(dst.as_mut_ptr(), 1)
+ } else if dst.len() >= 2 {
+ // Supplementary planes break into surrogates.
+ code -= 0x1_0000;
+ *dst.get_unchecked_mut(0) = 0xD800 | ((code >> 10) as u16);
+ *dst.get_unchecked_mut(1) = 0xDC00 | ((code as u16) & 0x3FF);
+ slice::from_raw_parts_mut(dst.as_mut_ptr(), 2)
+ } else {
+ panic!("encode_utf16: need {} units to encode U+{:X}, but the buffer has {}",
+ from_u32_unchecked(code).len_utf16(),
+ code,
+ dst.len())
+ }
+ }
}
}
#[unstable(feature = "fused", issue = "35602")]
impl FusedIterator for EscapeDebug {}
-/// An iterator over `u8` entries represending the UTF-8 encoding of a `char`
-/// value.
-///
-/// Constructed via the `.encode_utf8()` method on `char`.
-#[unstable(feature = "unicode", issue = "27784")]
-#[derive(Debug)]
-pub struct EncodeUtf8 {
- buf: [u8; 4],
- pos: usize,
-}
-
-impl EncodeUtf8 {
- /// Returns the remaining bytes of this iterator as a slice.
- #[unstable(feature = "unicode", issue = "27784")]
- pub fn as_slice(&self) -> &[u8] {
- &self.buf[self.pos..]
- }
-}
-
-#[unstable(feature = "unicode", issue = "27784")]
-impl Iterator for EncodeUtf8 {
- type Item = u8;
-
- fn next(&mut self) -> Option<u8> {
- if self.pos == self.buf.len() {
- None
- } else {
- let ret = Some(self.buf[self.pos]);
- self.pos += 1;
- ret
- }
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.as_slice().iter().size_hint()
- }
-}
-
-#[unstable(feature = "fused", issue = "35602")]
-impl FusedIterator for EncodeUtf8 {}
-
-/// An iterator over `u16` entries represending the UTF-16 encoding of a `char`
-/// value.
-///
-/// Constructed via the `.encode_utf16()` method on `char`.
-#[unstable(feature = "unicode", issue = "27784")]
-#[derive(Debug)]
-pub struct EncodeUtf16 {
- buf: [u16; 2],
- pos: usize,
-}
-
-impl EncodeUtf16 {
- /// Returns the remaining bytes of this iterator as a slice.
- #[unstable(feature = "unicode", issue = "27784")]
- pub fn as_slice(&self) -> &[u16] {
- &self.buf[self.pos..]
- }
-}
-
-
-#[unstable(feature = "unicode", issue = "27784")]
-impl Iterator for EncodeUtf16 {
- type Item = u16;
-
- fn next(&mut self) -> Option<u16> {
- if self.pos == self.buf.len() {
- None
- } else {
- let ret = Some(self.buf[self.pos]);
- self.pos += 1;
- ret
- }
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.as_slice().iter().size_hint()
- }
-}
-#[unstable(feature = "fused", issue = "35602")]
-impl FusedIterator for EncodeUtf16 {}
/// An iterator over an iterator of bytes of the characters the bytes represent
/// as UTF-8
/// `<DecodeUtf8 as Iterator>::next` returns this for an invalid input sequence.
#[unstable(feature = "decode_utf8", issue = "33906")]
-#[derive(PartialEq, Debug)]
+#[derive(PartialEq, Eq, Debug)]
pub struct InvalidSequence(());
#[unstable(feature = "decode_utf8", issue = "33906")]
reason = "deriving hack, should not be public",
issue = "0")]
pub struct AssertParamIsCopy<T: Copy + ?Sized> { _field: ::marker::PhantomData<T> }
-#[cfg(stage0)]
-#[doc(hidden)]
-#[inline(always)]
-#[unstable(feature = "derive_clone_copy",
- reason = "deriving hack, should not be public",
- issue = "0")]
-pub fn assert_receiver_is_clone<T: Clone + ?Sized>(_: &T) {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: ?Sized> Clone for &'a T {
/// This function will return an instance of `Error` on error.
#[stable(feature = "fmt_write_char", since = "1.1.0")]
fn write_char(&mut self, c: char) -> Result {
- self.write_str(unsafe {
- str::from_utf8_unchecked(c.encode_utf8().as_slice())
- })
+ self.write_str(c.encode_utf8(&mut [0; 4]))
}
/// Glue for usage of the `write!` macro with implementors of this trait.
/// safely be done so, so no constructors are given and the fields are private
/// to prevent modification.
///
-/// The `format_args!` macro will safely create an instance of this structure
+/// The [`format_args!`] macro will safely create an instance of this structure
/// and pass it to a function or closure, passed as the first argument. The
-/// macro validates the format string at compile-time so usage of the `write`
-/// and `format` functions can be safely performed.
+/// macro validates the format string at compile-time so usage of the [`write`]
+/// and [`format`] functions can be safely performed.
+///
+/// [`format_args!`]: ../../std/macro.format_args.html
+/// [`format`]: ../../std/fmt/fn.format.html
+/// [`write`]: ../../std/fmt/fn.write.html
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Copy, Clone)]
pub struct Arguments<'a> {
// Writes the sign if it exists, and then the prefix if it was requested
let write_prefix = |f: &mut Formatter| {
if let Some(c) = sign {
- f.buf.write_str(unsafe {
- str::from_utf8_unchecked(c.encode_utf8().as_slice())
- })?;
+ f.buf.write_str(c.encode_utf8(&mut [0; 4]))?;
}
if prefixed { f.buf.write_str(prefix) }
else { Ok(()) }
rt::v1::Alignment::Center => (padding / 2, (padding + 1) / 2),
};
- let fill = self.fill.encode_utf8();
- let fill = unsafe {
- str::from_utf8_unchecked(fill.as_slice())
- };
+ let mut fill = [0; 4];
+ let fill = self.fill.encode_utf8(&mut fill);
for _ in 0..pre_pad {
self.buf.write_str(fill)?;
if f.width.is_none() && f.precision.is_none() {
f.write_char(*self)
} else {
- f.pad(unsafe {
- str::from_utf8_unchecked(self.encode_utf8().as_slice())
- })
+ f.pad(self.encode_utf8(&mut [0; 4]))
}
}
}
}
/// Possible alignments that can be requested as part of a formatting directive.
-#[derive(Copy, Clone, PartialEq)]
+#[derive(Copy, Clone, PartialEq, Eq)]
pub enum Alignment {
/// Indication that contents should be left-aligned.
Left,
/// own, or if it does not enable any significant optimizations.
pub fn assume(b: bool);
- #[cfg(not(stage0))]
/// Hints to the compiler that branch condition is likely to be true.
/// Returns the value passed to it.
///
/// Any use other than with `if` statements will probably not have an effect.
pub fn likely(b: bool) -> bool;
- #[cfg(not(stage0))]
/// Hints to the compiler that branch condition is likely to be false.
/// Returns the value passed to it.
///
/// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
/// bytes of memory starting at `dst` to `val`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::ptr;
+ ///
+ /// let mut vec = vec![0; 4];
+ /// unsafe {
+ /// let vec_ptr = vec.as_mut_ptr();
+ /// ptr::write_bytes(vec_ptr, b'a', 2);
+ /// }
+ /// assert_eq!(vec, [b'a', b'a', 0, 0]);
+ /// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
($($t:ty)*) => ($(
#[stable(feature = "rust1", since = "1.0.0")]
impl ExactSizeIterator for ops::Range<$t> { }
+ )*)
+}
+macro_rules! range_incl_exact_iter_impl {
+ ($($t:ty)*) => ($(
#[unstable(feature = "inclusive_range",
reason = "recently added, follows RFC",
issue = "28237")]
}
}
-// Ranges of u64 and i64 are excluded because they cannot guarantee having
-// a length <= usize::MAX, which is required by ExactSizeIterator.
+// These macros generate `ExactSizeIterator` impls for various range types.
+// Range<{u,i}64> and RangeInclusive<{u,i}{32,64,size}> are excluded
+// because they cannot guarantee having a length <= usize::MAX, which is
+// required by ExactSizeIterator.
range_exact_iter_impl!(usize u8 u16 u32 isize i8 i16 i32);
+range_incl_exact_iter_impl!(u8 u16 i8 i16);
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: Step + Clone> DoubleEndedIterator for ops::Range<A> where
});
}
+/// Asserts that two expressions are not equal to each other.
+///
+/// On panic, this macro will print the values of the expressions with their
+/// debug representations.
+///
+/// # Examples
+///
+/// ```
+/// let a = 3;
+/// let b = 2;
+/// assert_ne!(a, b);
+/// ```
+#[macro_export]
+#[stable(feature = "assert_ne", since = "1.12.0")]
+macro_rules! assert_ne {
+ ($left:expr , $right:expr) => ({
+ match (&$left, &$right) {
+ (left_val, right_val) => {
+ if *left_val == *right_val {
+ panic!("assertion failed: `(left != right)` \
+ (left: `{:?}`, right: `{:?}`)", left_val, right_val)
+ }
+ }
+ }
+ });
+ ($left:expr , $right:expr, $($arg:tt)*) => ({
+ match (&($left), &($right)) {
+ (left_val, right_val) => {
+ if *left_val == *right_val {
+ panic!("assertion failed: `(left != right)` \
+ (left: `{:?}`, right: `{:?}`): {}", left_val, right_val,
+ format_args!($($arg)*))
+ }
+ }
+ }
+ });
+}
+
/// Ensure that a boolean expression is `true` at runtime.
///
/// This will invoke the `panic!` macro if the provided expression cannot be
($($arg:tt)*) => (if cfg!(debug_assertions) { assert_eq!($($arg)*); })
}
+/// Asserts that two expressions are not equal to each other.
+///
+/// On panic, this macro will print the values of the expressions with their
+/// debug representations.
+///
+/// Unlike `assert_ne!`, `debug_assert_ne!` statements are only enabled in non
+/// optimized builds by default. An optimized build will omit all
+/// `debug_assert_ne!` statements unless `-C debug-assertions` is passed to the
+/// compiler. This makes `debug_assert_ne!` useful for checks that are too
+/// expensive to be present in a release build but may be helpful during
+/// development.
+///
+/// # Examples
+///
+/// ```
+/// let a = 3;
+/// let b = 2;
+/// debug_assert_ne!(a, b);
+/// ```
+#[macro_export]
+#[stable(feature = "assert_ne", since = "1.12.0")]
+macro_rules! debug_assert_ne {
+ ($($arg:tt)*) => (if cfg!(debug_assertions) { assert_ne!($($arg)*); })
+}
+
/// Helper macro for reducing boilerplate code for matching `Result` together
/// with converting downstream errors.
///
/// [`FromStr`]: ../str/trait.FromStr.html
/// [`f32`]: ../../std/primitive.f32.html
/// [`f64`]: ../../std/primitive.f64.html
-#[derive(Debug, Clone, PartialEq)]
+#[derive(Debug, Clone, PartialEq, Eq)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ParseFloatError {
kind: FloatErrorKind
}
-#[derive(Debug, Clone, PartialEq)]
+#[derive(Debug, Clone, PartialEq, Eq)]
enum FloatErrorKind {
Empty,
Invalid,
/// - Any number from `(mant - minus) * 2^exp` to `(mant + plus) * 2^exp` will
/// round to the original value. The range is inclusive only when
/// `inclusive` is true.
-#[derive(Copy, Clone, Debug, PartialEq)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct Decoded {
/// The scaled mantissa.
pub mant: u64,
}
/// Decoded unsigned value.
-#[derive(Copy, Clone, Debug, PartialEq)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum FullDecoded {
/// Not-a-number.
Nan,
let mut base = self;
let mut acc = 1;
- let mut prev_base = self;
- let mut base_oflo = false;
- while exp > 0 {
+ while exp > 1 {
if (exp & 1) == 1 {
- if base_oflo {
- // ensure overflow occurs in the same manner it
- // would have otherwise (i.e. signal any exception
- // it would have otherwise).
- acc = acc * (prev_base * prev_base);
- } else {
- acc = acc * base;
- }
+ acc = acc * base;
}
- prev_base = base;
- let (new_base, new_base_oflo) = base.overflowing_mul(base);
- base = new_base;
- base_oflo = new_base_oflo;
exp /= 2;
+ base = base * base;
}
+
+ // Deal with the final bit of the exponent separately, since
+ // squaring the base afterwards is not necessary and may cause a
+ // needless overflow.
+ if exp == 1 {
+ acc = acc * base;
+ }
+
acc
}
/// assert_eq!(nan.classify(), FpCategory::Nan);
/// assert_eq!(sub.classify(), FpCategory::Subnormal);
/// ```
-#[derive(Copy, Clone, PartialEq, Debug)]
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub enum FpCategory {
/// "Not a Number", often obtained by dividing by zero.
/// on the primitive integer types, such as [`i8::from_str_radix()`].
///
/// [`i8::from_str_radix()`]: ../../std/primitive.i8.html#method.from_str_radix
-#[derive(Debug, Clone, PartialEq)]
+#[derive(Debug, Clone, PartialEq, Eq)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ParseIntError { kind: IntErrorKind }
-#[derive(Debug, Clone, PartialEq)]
+#[derive(Debug, Clone, PartialEq, Eq)]
enum IntErrorKind {
Empty,
InvalidDigit,
/// Moves the value `v` out of the `Option<T>` if it is `Some(v)`.
///
- /// # Panics
- ///
- /// Panics if the self value equals `None`.
- ///
- /// # Safety note
- ///
/// In general, because this function may panic, its use is discouraged.
/// Instead, prefer to use pattern matching and handle the `None`
/// case explicitly.
///
+ /// # Panics
+ ///
+ /// Panics if the self value equals `None`.
+ ///
/// # Examples
///
/// ```
}
/// An error returned when parsing a `bool` from a string fails.
-#[derive(Debug, Clone, PartialEq)]
+#[derive(Debug, Clone, PartialEq, Eq)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ParseBoolError { _priv: () }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::char;
+use std::{char,str};
use std::convert::TryFrom;
#[test]
#[test]
fn test_encode_utf8() {
fn check(input: char, expect: &[u8]) {
- assert_eq!(input.encode_utf8().as_slice(), expect);
- for (a, b) in input.encode_utf8().zip(expect) {
- assert_eq!(a, *b);
- }
+ let mut buf = [0; 4];
+ let ptr = buf.as_ptr();
+ let s = input.encode_utf8(&mut buf);
+ assert_eq!(s.as_ptr() as usize, ptr as usize);
+ assert!(str::from_utf8(s.as_bytes()).is_ok());
+ assert_eq!(s.as_bytes(), expect);
}
check('x', &[0x78]);
#[test]
fn test_encode_utf16() {
fn check(input: char, expect: &[u16]) {
- assert_eq!(input.encode_utf16().as_slice(), expect);
- for (a, b) in input.encode_utf16().zip(expect) {
- assert_eq!(a, *b);
- }
+ let mut buf = [0; 2];
+ let ptr = buf.as_mut_ptr();
+ let b = input.encode_utf16(&mut buf);
+ assert_eq!(b.as_mut_ptr() as usize, ptr as usize);
+ assert_eq!(b, expect);
}
check('x', &[0x0078]);
-Subproject commit d4f6a19c55a03e3f9f6fb7377911b37ed807eb6c
+Subproject commit eb708c020826a8d792a5a5275be147aabe47fe24
+++ /dev/null
-[package]
-authors = ["The Rust Project Developers"]
-name = "rbml"
-version = "0.0.0"
-
-[lib]
-name = "rbml"
-path = "lib.rs"
-crate-type = ["dylib"]
-
-[dependencies]
-log = { path = "../liblog" }
-serialize = { path = "../libserialize" }
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#[inline]
-pub fn write_to_vec(vec: &mut Vec<u8>, position: &mut usize, byte: u8) {
- if *position == vec.len() {
- vec.push(byte);
- } else {
- vec[*position] = byte;
- }
-
- *position += 1;
-}
-
-pub fn write_unsigned_leb128(out: &mut Vec<u8>, start_position: usize, mut value: u64) -> usize {
- let mut position = start_position;
- loop {
- let mut byte = (value & 0x7F) as u8;
- value >>= 7;
- if value != 0 {
- byte |= 0x80;
- }
-
- write_to_vec(out, &mut position, byte);
-
- if value == 0 {
- break;
- }
- }
-
- return position - start_position;
-}
-
-pub fn read_unsigned_leb128(data: &[u8], start_position: usize) -> (u64, usize) {
- let mut result = 0;
- let mut shift = 0;
- let mut position = start_position;
- loop {
- let byte = data[position];
- position += 1;
- result |= ((byte & 0x7F) as u64) << shift;
- if (byte & 0x80) == 0 {
- break;
- }
- shift += 7;
- }
-
- (result, position - start_position)
-}
-
-
-pub fn write_signed_leb128(out: &mut Vec<u8>, start_position: usize, mut value: i64) -> usize {
- let mut position = start_position;
-
- loop {
- let mut byte = (value as u8) & 0x7f;
- value >>= 7;
- let more = !((((value == 0) && ((byte & 0x40) == 0)) ||
- ((value == -1) && ((byte & 0x40) != 0))));
- if more {
- byte |= 0x80; // Mark this byte to show that more bytes will follow.
- }
-
- write_to_vec(out, &mut position, byte);
-
- if !more {
- break;
- }
- }
-
- return position - start_position;
-}
-
-pub fn read_signed_leb128(data: &[u8], start_position: usize) -> (i64, usize) {
- let mut result = 0;
- let mut shift = 0;
- let mut position = start_position;
- let mut byte;
-
- loop {
- byte = data[position];
- position += 1;
- result |= ((byte & 0x7F) as i64) << shift;
- shift += 7;
-
- if (byte & 0x80) == 0 {
- break;
- }
- }
-
- if (shift < 64) && ((byte & 0x40) != 0) {
- // sign extend
- result |= -(1i64 << shift);
- }
-
- (result, position - start_position)
-}
-
-#[test]
-fn test_unsigned_leb128() {
- let mut stream = Vec::with_capacity(10000);
-
- for x in 0..62 {
- let pos = stream.len();
- let bytes_written = write_unsigned_leb128(&mut stream, pos, 3 << x);
- assert_eq!(stream.len(), pos + bytes_written);
- }
-
- let mut position = 0;
- for x in 0..62 {
- let expected = 3 << x;
- let (actual, bytes_read) = read_unsigned_leb128(&stream, position);
- assert_eq!(expected, actual);
- position += bytes_read;
- }
- assert_eq!(stream.len(), position);
-}
-
-#[test]
-fn test_signed_leb128() {
- let mut values = Vec::new();
-
- let mut i = -500;
- while i < 500 {
- values.push(i * 123457i64);
- i += 1;
- }
-
- let mut stream = Vec::new();
-
- for &x in &values {
- let pos = stream.len();
- let bytes_written = write_signed_leb128(&mut stream, pos, x);
- assert_eq!(stream.len(), pos + bytes_written);
- }
-
- let mut pos = 0;
- for &x in &values {
- let (value, bytes_read) = read_signed_leb128(&mut stream, pos);
- pos += bytes_read;
- assert_eq!(x, value);
- }
- assert_eq!(pos, stream.len());
-}
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Really Bad Markup Language (rbml) is an internal serialization format of rustc.
-//! This is not intended to be used by users.
-//!
-//! Originally based on the Extensible Binary Markup Language
-//! (ebml; http://www.matroska.org/technical/specs/rfc/index.html),
-//! it is now a separate format tuned for the rust object metadata.
-//!
-//! # Encoding
-//!
-//! RBML document consists of the tag, length and data.
-//! The encoded data can contain multiple RBML documents concatenated.
-//!
-//! **Tags** are a hint for the following data.
-//! Tags are a number from 0x000 to 0xfff, where 0xf0 through 0xff is reserved.
-//! Tags less than 0xf0 are encoded in one literal byte.
-//! Tags greater than 0xff are encoded in two big-endian bytes,
-//! where the tag number is ORed with 0xf000. (E.g. tag 0x123 = `f1 23`)
-//!
-//! **Lengths** encode the length of the following data.
-//! It is a variable-length unsigned isize, and one of the following forms:
-//!
-//! - `80` through `fe` for lengths up to 0x7e;
-//! - `40 ff` through `7f ff` for lengths up to 0x3fff;
-//! - `20 40 00` through `3f ff ff` for lengths up to 0x1fffff;
-//! - `10 20 00 00` through `1f ff ff ff` for lengths up to 0xfffffff.
-//!
-//! The "overlong" form is allowed so that the length can be encoded
-//! without the prior knowledge of the encoded data.
-//! For example, the length 0 can be represented either by `80`, `40 00`,
-//! `20 00 00` or `10 00 00 00`.
-//! The encoder tries to minimize the length if possible.
-//! Also, some predefined tags listed below are so commonly used that
-//! their lengths are omitted ("implicit length").
-//!
-//! **Data** can be either binary bytes or zero or more nested RBML documents.
-//! Nested documents cannot overflow, and should be entirely contained
-//! within a parent document.
-//!
-//! # Predefined Tags
-//!
-//! Most RBML tags are defined by the application.
-//! (For the rust object metadata, see also `rustc::metadata::common`.)
-//! RBML itself does define a set of predefined tags however,
-//! intended for the auto-serialization implementation.
-//!
-//! Predefined tags with an implicit length:
-//!
-//! - `U8` (`00`): 1-byte unsigned integer.
-//! - `U16` (`01`): 2-byte big endian unsigned integer.
-//! - `U32` (`02`): 4-byte big endian unsigned integer.
-//! - `U64` (`03`): 8-byte big endian unsigned integer.
-//! Any of `U*` tags can be used to encode primitive unsigned integer types,
-//! as long as it is no greater than the actual size.
-//! For example, `u8` can only be represented via the `U8` tag.
-//!
-//! - `I8` (`04`): 1-byte signed integer.
-//! - `I16` (`05`): 2-byte big endian signed integer.
-//! - `I32` (`06`): 4-byte big endian signed integer.
-//! - `I64` (`07`): 8-byte big endian signed integer.
-//! Similar to `U*` tags. Always uses two's complement encoding.
-//!
-//! - `Bool` (`08`): 1-byte boolean value, `00` for false and `01` for true.
-//!
-//! - `Char` (`09`): 4-byte big endian Unicode scalar value.
-//! Surrogate pairs or out-of-bound values are invalid.
-//!
-//! - `F32` (`0a`): 4-byte big endian unsigned integer representing
-//! IEEE 754 binary32 floating-point format.
-//! - `F64` (`0b`): 8-byte big endian unsigned integer representing
-//! IEEE 754 binary64 floating-point format.
-//!
-//! - `Sub8` (`0c`): 1-byte unsigned integer for supplementary information.
-//! - `Sub32` (`0d`): 4-byte unsigned integer for supplementary information.
-//! Those two tags normally occur as the first subdocument of certain tags,
-//! namely `Enum`, `Vec` and `Map`, to provide a variant or size information.
-//! They can be used interchangeably.
-//!
-//! Predefined tags with an explicit length:
-//!
-//! - `Str` (`10`): A UTF-8-encoded string.
-//!
-//! - `Enum` (`11`): An enum.
-//! The first subdocument should be `Sub*` tags with a variant ID.
-//! Subsequent subdocuments, if any, encode variant arguments.
-//!
-//! - `Vec` (`12`): A vector (sequence).
-//! - `VecElt` (`13`): A vector element.
-//! The first subdocument should be `Sub*` tags with the number of elements.
-//! Subsequent subdocuments should be `VecElt` tag per each element.
-//!
-//! - `Map` (`14`): A map (associated array).
-//! - `MapKey` (`15`): A key part of the map entry.
-//! - `MapVal` (`16`): A value part of the map entry.
-//! The first subdocument should be `Sub*` tags with the number of entries.
-//! Subsequent subdocuments should be an alternating sequence of
-//! `MapKey` and `MapVal` tags per each entry.
-//!
-//! - `Opaque` (`17`): An opaque, custom-format tag.
-//! Used to wrap ordinary custom tags or data in the auto-serialized context.
-//! Rustc typically uses this to encode type information.
-//!
-//! First 0x20 tags are reserved by RBML; custom tags start at 0x20.
-
-#![crate_name = "rbml"]
-#![unstable(feature = "rustc_private", issue = "27812")]
-#![crate_type = "rlib"]
-#![crate_type = "dylib"]
-#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
- html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
- html_root_url = "https://doc.rust-lang.org/nightly/",
- html_playground_url = "https://play.rust-lang.org/",
- test(attr(deny(warnings))))]
-#![cfg_attr(not(stage0), deny(warnings))]
-
-#![feature(rustc_private)]
-#![feature(staged_api)]
-#![feature(question_mark)]
-
-#![cfg_attr(test, feature(test))]
-
-extern crate serialize;
-
-#[cfg(test)]
-extern crate serialize as rustc_serialize; // Used by RustcEncodable
-
-#[macro_use]
-extern crate log;
-
-#[cfg(test)]
-extern crate test;
-
-pub mod opaque;
-pub mod leb128;
-
-pub use self::EbmlEncoderTag::*;
-pub use self::Error::*;
-
-use std::str;
-use std::fmt;
-
-/// Common data structures
-#[derive(Clone, Copy)]
-pub struct Doc<'a> {
- pub data: &'a [u8],
- pub start: usize,
- pub end: usize,
-}
-
-impl<'doc> Doc<'doc> {
- pub fn new(data: &'doc [u8]) -> Doc<'doc> {
- Doc {
- data: data,
- start: 0,
- end: data.len(),
- }
- }
-
- pub fn get(&self, tag: usize) -> Doc<'doc> {
- reader::get_doc(*self, tag)
- }
-
- pub fn is_empty(&self) -> bool {
- self.start == self.end
- }
-
- pub fn as_str(&self) -> &'doc str {
- str::from_utf8(&self.data[self.start..self.end]).unwrap()
- }
-
- pub fn to_string(&self) -> String {
- self.as_str().to_string()
- }
-}
-
-pub struct TaggedDoc<'a> {
- tag: usize,
- pub doc: Doc<'a>,
-}
-
-#[derive(Copy, Clone, Debug)]
-pub enum EbmlEncoderTag {
- // tags 00..1f are reserved for auto-serialization.
- // first NUM_IMPLICIT_TAGS tags are implicitly sized and lengths are not encoded.
- EsU8 = 0x00, // + 1 byte
- EsU16 = 0x01, // + 2 bytes
- EsU32 = 0x02, // + 4 bytes
- EsU64 = 0x03, // + 8 bytes
- EsI8 = 0x04, // + 1 byte
- EsI16 = 0x05, // + 2 bytes
- EsI32 = 0x06, // + 4 bytes
- EsI64 = 0x07, // + 8 bytes
- EsBool = 0x08, // + 1 byte
- EsChar = 0x09, // + 4 bytes
- EsF32 = 0x0a, // + 4 bytes
- EsF64 = 0x0b, // + 8 bytes
- EsSub8 = 0x0c, // + 1 byte
- EsSub32 = 0x0d, // + 4 bytes
- // 0x0e and 0x0f are reserved
- EsStr = 0x10,
- EsEnum = 0x11, // encodes the variant id as the first EsSub*
- EsVec = 0x12, // encodes the # of elements as the first EsSub*
- EsVecElt = 0x13,
- EsMap = 0x14, // encodes the # of pairs as the first EsSub*
- EsMapKey = 0x15,
- EsMapVal = 0x16,
- EsOpaque = 0x17,
-}
-
-const NUM_TAGS: usize = 0x1000;
-const NUM_IMPLICIT_TAGS: usize = 0x0e;
-
-#[cfg_attr(rustfmt, rustfmt_skip)]
-static TAG_IMPLICIT_LEN: [i8; NUM_IMPLICIT_TAGS] = [
- 1, 2, 4, 8, // EsU*
- 1, 2, 4, 8, // ESI*
- 1, // EsBool
- 4, // EsChar
- 4, 8, // EsF*
- 1, 4, // EsSub*
-];
-
-#[derive(Debug)]
-pub enum Error {
- IntTooBig(usize),
- InvalidTag(usize),
- Expected(String),
- IoError(std::io::Error),
- ApplicationError(String),
-}
-
-impl fmt::Display for Error {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- // FIXME: this should be a more useful display form
- fmt::Debug::fmt(self, f)
- }
-}
-// --------------------------------------
-
-pub mod reader {
- use std::char;
-
- use std::isize;
- use std::mem::transmute;
-
- use serialize;
-
- use super::opaque;
- use super::{ApplicationError, EsVec, EsMap, EsEnum, EsSub8, EsSub32, EsVecElt, EsMapKey,
- EsU64, EsU32, EsU16, EsU8, EsI64, EsI32, EsI16, EsI8, EsBool, EsF64, EsF32,
- EsChar, EsStr, EsMapVal, EsOpaque, EbmlEncoderTag, Doc, TaggedDoc, Error,
- IntTooBig, InvalidTag, Expected, NUM_IMPLICIT_TAGS, TAG_IMPLICIT_LEN};
-
- pub type DecodeResult<T> = Result<T, Error>;
- // rbml reading
-
- macro_rules! try_or {
- ($e:expr, $r:expr) => (
- match $e {
- Ok(e) => e,
- Err(e) => {
- debug!("ignored error: {:?}", e);
- return $r
- }
- }
- )
- }
-
- #[derive(Copy, Clone)]
- pub struct Res {
- pub val: usize,
- pub next: usize,
- }
-
- pub fn tag_at(data: &[u8], start: usize) -> DecodeResult<Res> {
- let v = data[start] as usize;
- if v < 0xf0 {
- Ok(Res {
- val: v,
- next: start + 1,
- })
- } else if v > 0xf0 {
- Ok(Res {
- val: ((v & 0xf) << 8) | data[start + 1] as usize,
- next: start + 2,
- })
- } else {
- // every tag starting with byte 0xf0 is an overlong form, which is prohibited.
- Err(InvalidTag(v))
- }
- }
-
- #[inline(never)]
- fn vuint_at_slow(data: &[u8], start: usize) -> DecodeResult<Res> {
- let a = data[start];
- if a & 0x80 != 0 {
- return Ok(Res {
- val: (a & 0x7f) as usize,
- next: start + 1,
- });
- }
- if a & 0x40 != 0 {
- return Ok(Res {
- val: ((a & 0x3f) as usize) << 8 | (data[start + 1] as usize),
- next: start + 2,
- });
- }
- if a & 0x20 != 0 {
- return Ok(Res {
- val: ((a & 0x1f) as usize) << 16 | (data[start + 1] as usize) << 8 |
- (data[start + 2] as usize),
- next: start + 3,
- });
- }
- if a & 0x10 != 0 {
- return Ok(Res {
- val: ((a & 0x0f) as usize) << 24 | (data[start + 1] as usize) << 16 |
- (data[start + 2] as usize) << 8 |
- (data[start + 3] as usize),
- next: start + 4,
- });
- }
- Err(IntTooBig(a as usize))
- }
-
- pub fn vuint_at(data: &[u8], start: usize) -> DecodeResult<Res> {
- if data.len() - start < 4 {
- return vuint_at_slow(data, start);
- }
-
- // Lookup table for parsing EBML Element IDs as per
- // http://ebml.sourceforge.net/specs/ The Element IDs are parsed by
- // reading a big endian u32 positioned at data[start]. Using the four
- // most significant bits of the u32 we lookup in the table below how
- // the element ID should be derived from it.
- //
- // The table stores tuples (shift, mask) where shift is the number the
- // u32 should be right shifted with and mask is the value the right
- // shifted value should be masked with. If for example the most
- // significant bit is set this means it's a class A ID and the u32
- // should be right shifted with 24 and masked with 0x7f. Therefore we
- // store (24, 0x7f) at index 0x8 - 0xF (four bit numbers where the most
- // significant bit is set).
- //
- // By storing the number of shifts and masks in a table instead of
- // checking in order if the most significant bit is set, the second
- // most significant bit is set etc. we can replace up to three
- // "and+branch" with a single table lookup which gives us a measured
- // speedup of around 2x on x86_64.
- static SHIFT_MASK_TABLE: [(usize, u32); 16] = [(0, 0x0),
- (0, 0x0fffffff),
- (8, 0x1fffff),
- (8, 0x1fffff),
- (16, 0x3fff),
- (16, 0x3fff),
- (16, 0x3fff),
- (16, 0x3fff),
- (24, 0x7f),
- (24, 0x7f),
- (24, 0x7f),
- (24, 0x7f),
- (24, 0x7f),
- (24, 0x7f),
- (24, 0x7f),
- (24, 0x7f)];
-
- unsafe {
- let ptr = data.as_ptr().offset(start as isize) as *const u32;
- let val = u32::from_be(*ptr);
-
- let i = (val >> 28) as usize;
- let (shift, mask) = SHIFT_MASK_TABLE[i];
- Ok(Res {
- val: ((val >> shift) & mask) as usize,
- next: start + ((32 - shift) >> 3),
- })
- }
- }
-
- pub fn tag_len_at(data: &[u8], tag: Res) -> DecodeResult<Res> {
- if tag.val < NUM_IMPLICIT_TAGS && TAG_IMPLICIT_LEN[tag.val] >= 0 {
- Ok(Res {
- val: TAG_IMPLICIT_LEN[tag.val] as usize,
- next: tag.next,
- })
- } else {
- vuint_at(data, tag.next)
- }
- }
-
- pub fn doc_at<'a>(data: &'a [u8], start: usize) -> DecodeResult<TaggedDoc<'a>> {
- let elt_tag = tag_at(data, start)?;
- let elt_size = tag_len_at(data, elt_tag)?;
- let end = elt_size.next + elt_size.val;
- Ok(TaggedDoc {
- tag: elt_tag.val,
- doc: Doc {
- data: data,
- start: elt_size.next,
- end: end,
- },
- })
- }
-
- pub fn maybe_get_doc<'a>(d: Doc<'a>, tg: usize) -> Option<Doc<'a>> {
- let mut pos = d.start;
- while pos < d.end {
- let elt_tag = try_or!(tag_at(d.data, pos), None);
- let elt_size = try_or!(tag_len_at(d.data, elt_tag), None);
- pos = elt_size.next + elt_size.val;
- if elt_tag.val == tg {
- return Some(Doc {
- data: d.data,
- start: elt_size.next,
- end: pos,
- });
- }
- }
- None
- }
-
- pub fn get_doc<'a>(d: Doc<'a>, tg: usize) -> Doc<'a> {
- match maybe_get_doc(d, tg) {
- Some(d) => d,
- None => {
- error!("failed to find block with tag {:?}", tg);
- panic!();
- }
- }
- }
-
- pub fn docs<'a>(d: Doc<'a>) -> DocsIterator<'a> {
- DocsIterator { d: d }
- }
-
- pub struct DocsIterator<'a> {
- d: Doc<'a>,
- }
-
- impl<'a> Iterator for DocsIterator<'a> {
- type Item = (usize, Doc<'a>);
-
- fn next(&mut self) -> Option<(usize, Doc<'a>)> {
- if self.d.start >= self.d.end {
- return None;
- }
-
- let elt_tag = try_or!(tag_at(self.d.data, self.d.start), {
- self.d.start = self.d.end;
- None
- });
- let elt_size = try_or!(tag_len_at(self.d.data, elt_tag), {
- self.d.start = self.d.end;
- None
- });
-
- let end = elt_size.next + elt_size.val;
- let doc = Doc {
- data: self.d.data,
- start: elt_size.next,
- end: end,
- };
-
- self.d.start = end;
- return Some((elt_tag.val, doc));
- }
- }
-
- pub fn tagged_docs<'a>(d: Doc<'a>, tag: usize) -> TaggedDocsIterator<'a> {
- TaggedDocsIterator {
- iter: docs(d),
- tag: tag,
- }
- }
-
- pub struct TaggedDocsIterator<'a> {
- iter: DocsIterator<'a>,
- tag: usize,
- }
-
- impl<'a> Iterator for TaggedDocsIterator<'a> {
- type Item = Doc<'a>;
-
- fn next(&mut self) -> Option<Doc<'a>> {
- while let Some((tag, doc)) = self.iter.next() {
- if tag == self.tag {
- return Some(doc);
- }
- }
- None
- }
- }
-
- pub fn with_doc_data<T, F>(d: Doc, f: F) -> T
- where F: FnOnce(&[u8]) -> T
- {
- f(&d.data[d.start..d.end])
- }
-
- pub fn doc_as_u8(d: Doc) -> u8 {
- assert_eq!(d.end, d.start + 1);
- d.data[d.start]
- }
-
- pub fn doc_as_u64(d: Doc) -> u64 {
- if d.end >= 8 {
- // For performance, we read 8 big-endian bytes,
- // and mask off the junk if there is any. This
- // obviously won't work on the first 8 bytes
- // of a file - we will fall of the start
- // of the page and segfault.
-
- let mut b = [0; 8];
- b.copy_from_slice(&d.data[d.end - 8..d.end]);
- let data = unsafe { (*(b.as_ptr() as *const u64)).to_be() };
- let len = d.end - d.start;
- if len < 8 {
- data & ((1 << (len * 8)) - 1)
- } else {
- data
- }
- } else {
- let mut result = 0;
- for b in &d.data[d.start..d.end] {
- result = (result << 8) + (*b as u64);
- }
- result
- }
- }
-
- #[inline]
- pub fn doc_as_u16(d: Doc) -> u16 {
- doc_as_u64(d) as u16
- }
- #[inline]
- pub fn doc_as_u32(d: Doc) -> u32 {
- doc_as_u64(d) as u32
- }
-
- #[inline]
- pub fn doc_as_i8(d: Doc) -> i8 {
- doc_as_u8(d) as i8
- }
- #[inline]
- pub fn doc_as_i16(d: Doc) -> i16 {
- doc_as_u16(d) as i16
- }
- #[inline]
- pub fn doc_as_i32(d: Doc) -> i32 {
- doc_as_u32(d) as i32
- }
- #[inline]
- pub fn doc_as_i64(d: Doc) -> i64 {
- doc_as_u64(d) as i64
- }
-
- pub struct Decoder<'a> {
- parent: Doc<'a>,
- pos: usize,
- }
-
- impl<'doc> Decoder<'doc> {
- pub fn new(d: Doc<'doc>) -> Decoder<'doc> {
- Decoder {
- parent: d,
- pos: d.start,
- }
- }
-
- fn next_doc(&mut self, exp_tag: EbmlEncoderTag) -> DecodeResult<Doc<'doc>> {
- debug!(". next_doc(exp_tag={:?})", exp_tag);
- if self.pos >= self.parent.end {
- return Err(Expected(format!("no more documents in current node!")));
- }
- let TaggedDoc { tag: r_tag, doc: r_doc } = doc_at(self.parent.data, self.pos)?;
- debug!("self.parent={:?}-{:?} self.pos={:?} r_tag={:?} r_doc={:?}-{:?}",
- self.parent.start,
- self.parent.end,
- self.pos,
- r_tag,
- r_doc.start,
- r_doc.end);
- if r_tag != (exp_tag as usize) {
- return Err(Expected(format!("expected EBML doc with tag {:?} but found tag {:?}",
- exp_tag,
- r_tag)));
- }
- if r_doc.end > self.parent.end {
- return Err(Expected(format!("invalid EBML, child extends to {:#x}, parent to \
- {:#x}",
- r_doc.end,
- self.parent.end)));
- }
- self.pos = r_doc.end;
- Ok(r_doc)
- }
-
- fn push_doc<T, F>(&mut self, exp_tag: EbmlEncoderTag, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- let d = self.next_doc(exp_tag)?;
- let old_parent = self.parent;
- let old_pos = self.pos;
- self.parent = d;
- self.pos = d.start;
- let r = f(self)?;
- self.parent = old_parent;
- self.pos = old_pos;
- Ok(r)
- }
-
- fn _next_sub(&mut self) -> DecodeResult<usize> {
- // empty vector/map optimization
- if self.parent.is_empty() {
- return Ok(0);
- }
-
- let TaggedDoc { tag: r_tag, doc: r_doc } = doc_at(self.parent.data, self.pos)?;
- let r = if r_tag == (EsSub8 as usize) {
- doc_as_u8(r_doc) as usize
- } else if r_tag == (EsSub32 as usize) {
- doc_as_u32(r_doc) as usize
- } else {
- return Err(Expected(format!("expected EBML doc with tag {:?} or {:?} but found \
- tag {:?}",
- EsSub8,
- EsSub32,
- r_tag)));
- };
- if r_doc.end > self.parent.end {
- return Err(Expected(format!("invalid EBML, child extends to {:#x}, parent to \
- {:#x}",
- r_doc.end,
- self.parent.end)));
- }
- self.pos = r_doc.end;
- debug!("_next_sub result={:?}", r);
- Ok(r)
- }
-
- // variable-length unsigned integer with different tags.
- // `first_tag` should be a tag for u8 or i8.
- // `last_tag` should be the largest allowed integer tag with the matching signedness.
- // all tags between them should be valid, in the order of u8, u16, u32 and u64.
- fn _next_int(&mut self,
- first_tag: EbmlEncoderTag,
- last_tag: EbmlEncoderTag)
- -> DecodeResult<u64> {
- if self.pos >= self.parent.end {
- return Err(Expected(format!("no more documents in current node!")));
- }
-
- let TaggedDoc { tag: r_tag, doc: r_doc } = doc_at(self.parent.data, self.pos)?;
- let r = if first_tag as usize <= r_tag && r_tag <= last_tag as usize {
- match r_tag - first_tag as usize {
- 0 => doc_as_u8(r_doc) as u64,
- 1 => doc_as_u16(r_doc) as u64,
- 2 => doc_as_u32(r_doc) as u64,
- 3 => doc_as_u64(r_doc),
- _ => unreachable!(),
- }
- } else {
- return Err(Expected(format!("expected EBML doc with tag {:?} through {:?} but \
- found tag {:?}",
- first_tag,
- last_tag,
- r_tag)));
- };
- if r_doc.end > self.parent.end {
- return Err(Expected(format!("invalid EBML, child extends to {:#x}, parent to \
- {:#x}",
- r_doc.end,
- self.parent.end)));
- }
- self.pos = r_doc.end;
- debug!("_next_int({:?}, {:?}) result={:?}", first_tag, last_tag, r);
- Ok(r)
- }
-
- pub fn read_opaque<R, F>(&mut self, op: F) -> DecodeResult<R>
- where F: FnOnce(&mut opaque::Decoder, Doc) -> DecodeResult<R>
- {
- let doc = self.next_doc(EsOpaque)?;
-
- let result = {
- let mut opaque_decoder = opaque::Decoder::new(doc.data, doc.start);
- op(&mut opaque_decoder, doc)?
- };
-
- Ok(result)
- }
-
- pub fn position(&self) -> usize {
- self.pos
- }
-
- pub fn advance(&mut self, bytes: usize) {
- self.pos += bytes;
- }
- }
-
- impl<'doc> serialize::Decoder for Decoder<'doc> {
- type Error = Error;
- fn read_nil(&mut self) -> DecodeResult<()> {
- Ok(())
- }
-
- fn read_u64(&mut self) -> DecodeResult<u64> {
- self._next_int(EsU8, EsU64)
- }
- fn read_u32(&mut self) -> DecodeResult<u32> {
- Ok(self._next_int(EsU8, EsU32)? as u32)
- }
- fn read_u16(&mut self) -> DecodeResult<u16> {
- Ok(self._next_int(EsU8, EsU16)? as u16)
- }
- fn read_u8(&mut self) -> DecodeResult<u8> {
- Ok(doc_as_u8(self.next_doc(EsU8)?))
- }
- fn read_usize(&mut self) -> DecodeResult<usize> {
- let v = self._next_int(EsU8, EsU64)?;
- if v > (::std::usize::MAX as u64) {
- Err(IntTooBig(v as usize))
- } else {
- Ok(v as usize)
- }
- }
-
- fn read_i64(&mut self) -> DecodeResult<i64> {
- Ok(self._next_int(EsI8, EsI64)? as i64)
- }
- fn read_i32(&mut self) -> DecodeResult<i32> {
- Ok(self._next_int(EsI8, EsI32)? as i32)
- }
- fn read_i16(&mut self) -> DecodeResult<i16> {
- Ok(self._next_int(EsI8, EsI16)? as i16)
- }
- fn read_i8(&mut self) -> DecodeResult<i8> {
- Ok(doc_as_u8(self.next_doc(EsI8)?) as i8)
- }
- fn read_isize(&mut self) -> DecodeResult<isize> {
- let v = self._next_int(EsI8, EsI64)? as i64;
- if v > (isize::MAX as i64) || v < (isize::MIN as i64) {
- debug!("FIXME \\#6122: Removing this makes this function miscompile");
- Err(IntTooBig(v as usize))
- } else {
- Ok(v as isize)
- }
- }
-
- fn read_bool(&mut self) -> DecodeResult<bool> {
- Ok(doc_as_u8(self.next_doc(EsBool)?) != 0)
- }
-
- fn read_f64(&mut self) -> DecodeResult<f64> {
- let bits = doc_as_u64(self.next_doc(EsF64)?);
- Ok(unsafe { transmute(bits) })
- }
- fn read_f32(&mut self) -> DecodeResult<f32> {
- let bits = doc_as_u32(self.next_doc(EsF32)?);
- Ok(unsafe { transmute(bits) })
- }
- fn read_char(&mut self) -> DecodeResult<char> {
- Ok(char::from_u32(doc_as_u32(self.next_doc(EsChar)?)).unwrap())
- }
- fn read_str(&mut self) -> DecodeResult<String> {
- Ok(self.next_doc(EsStr)?.to_string())
- }
-
- // Compound types:
- fn read_enum<T, F>(&mut self, name: &str, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_enum({})", name);
-
- let doc = self.next_doc(EsEnum)?;
-
- let (old_parent, old_pos) = (self.parent, self.pos);
- self.parent = doc;
- self.pos = self.parent.start;
-
- let result = f(self)?;
-
- self.parent = old_parent;
- self.pos = old_pos;
- Ok(result)
- }
-
- fn read_enum_variant<T, F>(&mut self, _: &[&str], mut f: F) -> DecodeResult<T>
- where F: FnMut(&mut Decoder<'doc>, usize) -> DecodeResult<T>
- {
- debug!("read_enum_variant()");
- let idx = self._next_sub()?;
- debug!(" idx={}", idx);
-
- f(self, idx)
- }
-
- fn read_enum_variant_arg<T, F>(&mut self, idx: usize, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_enum_variant_arg(idx={})", idx);
- f(self)
- }
-
- fn read_enum_struct_variant<T, F>(&mut self, _: &[&str], mut f: F) -> DecodeResult<T>
- where F: FnMut(&mut Decoder<'doc>, usize) -> DecodeResult<T>
- {
- debug!("read_enum_struct_variant()");
- let idx = self._next_sub()?;
- debug!(" idx={}", idx);
-
- f(self, idx)
- }
-
- fn read_enum_struct_variant_field<T, F>(&mut self,
- name: &str,
- idx: usize,
- f: F)
- -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_enum_struct_variant_arg(name={}, idx={})", name, idx);
- f(self)
- }
-
- fn read_struct<T, F>(&mut self, name: &str, _: usize, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_struct(name={})", name);
- f(self)
- }
-
- fn read_struct_field<T, F>(&mut self, name: &str, idx: usize, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_struct_field(name={}, idx={})", name, idx);
- f(self)
- }
-
- fn read_tuple<T, F>(&mut self, tuple_len: usize, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_tuple()");
- self.read_seq(move |d, len| {
- if len == tuple_len {
- f(d)
- } else {
- Err(Expected(format!("Expected tuple of length `{}`, found tuple of length \
- `{}`",
- tuple_len,
- len)))
- }
- })
- }
-
- fn read_tuple_arg<T, F>(&mut self, idx: usize, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_tuple_arg(idx={})", idx);
- self.read_seq_elt(idx, f)
- }
-
- fn read_tuple_struct<T, F>(&mut self, name: &str, len: usize, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_tuple_struct(name={})", name);
- self.read_tuple(len, f)
- }
-
- fn read_tuple_struct_arg<T, F>(&mut self, idx: usize, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_tuple_struct_arg(idx={})", idx);
- self.read_tuple_arg(idx, f)
- }
-
- fn read_option<T, F>(&mut self, mut f: F) -> DecodeResult<T>
- where F: FnMut(&mut Decoder<'doc>, bool) -> DecodeResult<T>
- {
- debug!("read_option()");
- self.read_enum("Option", move |this| {
- this.read_enum_variant(&["None", "Some"], move |this, idx| {
- match idx {
- 0 => f(this, false),
- 1 => f(this, true),
- _ => Err(Expected(format!("Expected None or Some"))),
- }
- })
- })
- }
-
- fn read_seq<T, F>(&mut self, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>, usize) -> DecodeResult<T>
- {
- debug!("read_seq()");
- self.push_doc(EsVec, move |d| {
- let len = d._next_sub()?;
- debug!(" len={}", len);
- f(d, len)
- })
- }
-
- fn read_seq_elt<T, F>(&mut self, idx: usize, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_seq_elt(idx={})", idx);
- self.push_doc(EsVecElt, f)
- }
-
- fn read_map<T, F>(&mut self, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>, usize) -> DecodeResult<T>
- {
- debug!("read_map()");
- self.push_doc(EsMap, move |d| {
- let len = d._next_sub()?;
- debug!(" len={}", len);
- f(d, len)
- })
- }
-
- fn read_map_elt_key<T, F>(&mut self, idx: usize, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_map_elt_key(idx={})", idx);
- self.push_doc(EsMapKey, f)
- }
-
- fn read_map_elt_val<T, F>(&mut self, idx: usize, f: F) -> DecodeResult<T>
- where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult<T>
- {
- debug!("read_map_elt_val(idx={})", idx);
- self.push_doc(EsMapVal, f)
- }
-
- fn error(&mut self, err: &str) -> Error {
- ApplicationError(err.to_string())
- }
- }
-}
-
-pub mod writer {
- use std::mem;
- use std::io::prelude::*;
- use std::io::{self, SeekFrom, Cursor};
-
- use super::opaque;
- use super::{EsVec, EsMap, EsEnum, EsSub8, EsSub32, EsVecElt, EsMapKey, EsU64, EsU32, EsU16,
- EsU8, EsI64, EsI32, EsI16, EsI8, EsBool, EsF64, EsF32, EsChar, EsStr, EsMapVal,
- EsOpaque, NUM_IMPLICIT_TAGS, NUM_TAGS};
-
- use serialize;
-
-
- pub type EncodeResult = io::Result<()>;
-
- // rbml writing
- pub struct Encoder<'a> {
- pub writer: &'a mut Cursor<Vec<u8>>,
- size_positions: Vec<u64>,
- relax_limit: u64, // do not move encoded bytes before this position
- }
-
- fn write_tag<W: Write>(w: &mut W, n: usize) -> EncodeResult {
- if n < 0xf0 {
- w.write_all(&[n as u8])
- } else if 0x100 <= n && n < NUM_TAGS {
- w.write_all(&[0xf0 | (n >> 8) as u8, n as u8])
- } else {
- Err(io::Error::new(io::ErrorKind::Other, &format!("invalid tag: {}", n)[..]))
- }
- }
-
- fn write_sized_vuint<W: Write>(w: &mut W, n: usize, size: usize) -> EncodeResult {
- match size {
- 1 => w.write_all(&[0x80 | (n as u8)]),
- 2 => w.write_all(&[0x40 | ((n >> 8) as u8), n as u8]),
- 3 => w.write_all(&[0x20 | ((n >> 16) as u8), (n >> 8) as u8, n as u8]),
- 4 => w.write_all(&[0x10 | ((n >> 24) as u8), (n >> 16) as u8, (n >> 8) as u8, n as u8]),
- _ => Err(io::Error::new(io::ErrorKind::Other, &format!("isize too big: {}", n)[..])),
- }
- }
-
- pub fn write_vuint<W: Write>(w: &mut W, n: usize) -> EncodeResult {
- if n < 0x7f {
- return write_sized_vuint(w, n, 1);
- }
- if n < 0x4000 {
- return write_sized_vuint(w, n, 2);
- }
- if n < 0x200000 {
- return write_sized_vuint(w, n, 3);
- }
- if n < 0x10000000 {
- return write_sized_vuint(w, n, 4);
- }
- Err(io::Error::new(io::ErrorKind::Other, &format!("isize too big: {}", n)[..]))
- }
-
- impl<'a> Encoder<'a> {
- pub fn new(w: &'a mut Cursor<Vec<u8>>) -> Encoder<'a> {
- Encoder {
- writer: w,
- size_positions: vec![],
- relax_limit: 0,
- }
- }
-
- pub fn start_tag(&mut self, tag_id: usize) -> EncodeResult {
- debug!("Start tag {:?}", tag_id);
- assert!(tag_id >= NUM_IMPLICIT_TAGS);
-
- // Write the enum ID:
- write_tag(self.writer, tag_id)?;
-
- // Write a placeholder four-byte size.
- let cur_pos = self.writer.seek(SeekFrom::Current(0))?;
- self.size_positions.push(cur_pos);
- let zeroes: &[u8] = &[0, 0, 0, 0];
- self.writer.write_all(zeroes)
- }
-
- pub fn end_tag(&mut self) -> EncodeResult {
- let last_size_pos = self.size_positions.pop().unwrap();
- let cur_pos = self.writer.seek(SeekFrom::Current(0))?;
- self.writer.seek(SeekFrom::Start(last_size_pos))?;
- let size = (cur_pos - last_size_pos - 4) as usize;
-
- // relax the size encoding for small tags (bigger tags are costly to move).
- // we should never try to move the stable positions, however.
- const RELAX_MAX_SIZE: usize = 0x100;
- if size <= RELAX_MAX_SIZE && last_size_pos >= self.relax_limit {
- // we can't alter the buffer in place, so have a temporary buffer
- let mut buf = [0u8; RELAX_MAX_SIZE];
- {
- let last_size_pos = last_size_pos as usize;
- let data = &self.writer.get_ref()[last_size_pos + 4..cur_pos as usize];
- buf[..size].copy_from_slice(data);
- }
-
- // overwrite the size and data and continue
- write_vuint(self.writer, size)?;
- self.writer.write_all(&buf[..size])?;
- } else {
- // overwrite the size with an overlong encoding and skip past the data
- write_sized_vuint(self.writer, size, 4)?;
- self.writer.seek(SeekFrom::Start(cur_pos))?;
- }
-
- debug!("End tag (size = {:?})", size);
- Ok(())
- }
-
- pub fn wr_tag<F>(&mut self, tag_id: usize, blk: F) -> EncodeResult
- where F: FnOnce() -> EncodeResult
- {
- self.start_tag(tag_id)?;
- blk()?;
- self.end_tag()
- }
-
- pub fn wr_tagged_bytes(&mut self, tag_id: usize, b: &[u8]) -> EncodeResult {
- assert!(tag_id >= NUM_IMPLICIT_TAGS);
- write_tag(self.writer, tag_id)?;
- write_vuint(self.writer, b.len())?;
- self.writer.write_all(b)
- }
-
- pub fn wr_tagged_u64(&mut self, tag_id: usize, v: u64) -> EncodeResult {
- let bytes: [u8; 8] = unsafe { mem::transmute(v.to_be()) };
- // tagged integers are emitted in big-endian, with no
- // leading zeros.
- let leading_zero_bytes = v.leading_zeros() / 8;
- self.wr_tagged_bytes(tag_id, &bytes[leading_zero_bytes as usize..])
- }
-
- #[inline]
- pub fn wr_tagged_u32(&mut self, tag_id: usize, v: u32) -> EncodeResult {
- self.wr_tagged_u64(tag_id, v as u64)
- }
-
- #[inline]
- pub fn wr_tagged_u16(&mut self, tag_id: usize, v: u16) -> EncodeResult {
- self.wr_tagged_u64(tag_id, v as u64)
- }
-
- #[inline]
- pub fn wr_tagged_u8(&mut self, tag_id: usize, v: u8) -> EncodeResult {
- self.wr_tagged_bytes(tag_id, &[v])
- }
-
- #[inline]
- pub fn wr_tagged_i64(&mut self, tag_id: usize, v: i64) -> EncodeResult {
- self.wr_tagged_u64(tag_id, v as u64)
- }
-
- #[inline]
- pub fn wr_tagged_i32(&mut self, tag_id: usize, v: i32) -> EncodeResult {
- self.wr_tagged_u32(tag_id, v as u32)
- }
-
- #[inline]
- pub fn wr_tagged_i16(&mut self, tag_id: usize, v: i16) -> EncodeResult {
- self.wr_tagged_u16(tag_id, v as u16)
- }
-
- #[inline]
- pub fn wr_tagged_i8(&mut self, tag_id: usize, v: i8) -> EncodeResult {
- self.wr_tagged_bytes(tag_id, &[v as u8])
- }
-
- pub fn wr_tagged_str(&mut self, tag_id: usize, v: &str) -> EncodeResult {
- self.wr_tagged_bytes(tag_id, v.as_bytes())
- }
-
- // for auto-serialization
- fn wr_tagged_raw_bytes(&mut self, tag_id: usize, b: &[u8]) -> EncodeResult {
- write_tag(self.writer, tag_id)?;
- self.writer.write_all(b)
- }
-
- fn wr_tagged_raw_u64(&mut self, tag_id: usize, v: u64) -> EncodeResult {
- let bytes: [u8; 8] = unsafe { mem::transmute(v.to_be()) };
- self.wr_tagged_raw_bytes(tag_id, &bytes)
- }
-
- fn wr_tagged_raw_u32(&mut self, tag_id: usize, v: u32) -> EncodeResult {
- let bytes: [u8; 4] = unsafe { mem::transmute(v.to_be()) };
- self.wr_tagged_raw_bytes(tag_id, &bytes)
- }
-
- fn wr_tagged_raw_u16(&mut self, tag_id: usize, v: u16) -> EncodeResult {
- let bytes: [u8; 2] = unsafe { mem::transmute(v.to_be()) };
- self.wr_tagged_raw_bytes(tag_id, &bytes)
- }
-
- fn wr_tagged_raw_u8(&mut self, tag_id: usize, v: u8) -> EncodeResult {
- self.wr_tagged_raw_bytes(tag_id, &[v])
- }
-
- fn wr_tagged_raw_i64(&mut self, tag_id: usize, v: i64) -> EncodeResult {
- self.wr_tagged_raw_u64(tag_id, v as u64)
- }
-
- fn wr_tagged_raw_i32(&mut self, tag_id: usize, v: i32) -> EncodeResult {
- self.wr_tagged_raw_u32(tag_id, v as u32)
- }
-
- fn wr_tagged_raw_i16(&mut self, tag_id: usize, v: i16) -> EncodeResult {
- self.wr_tagged_raw_u16(tag_id, v as u16)
- }
-
- fn wr_tagged_raw_i8(&mut self, tag_id: usize, v: i8) -> EncodeResult {
- self.wr_tagged_raw_bytes(tag_id, &[v as u8])
- }
-
- pub fn wr_bytes(&mut self, b: &[u8]) -> EncodeResult {
- debug!("Write {:?} bytes", b.len());
- self.writer.write_all(b)
- }
-
- pub fn wr_str(&mut self, s: &str) -> EncodeResult {
- debug!("Write str: {:?}", s);
- self.writer.write_all(s.as_bytes())
- }
-
- /// Returns the current position while marking it stable, i.e.
- /// generated bytes so far wouldn't be affected by relaxation.
- pub fn mark_stable_position(&mut self) -> u64 {
- let pos = self.writer.seek(SeekFrom::Current(0)).unwrap();
- if self.relax_limit < pos {
- self.relax_limit = pos;
- }
- pos
- }
- }
-
- impl<'a> Encoder<'a> {
- // used internally to emit things like the vector length and so on
- fn _emit_tagged_sub(&mut self, v: usize) -> EncodeResult {
- if v as u8 as usize == v {
- self.wr_tagged_raw_u8(EsSub8 as usize, v as u8)
- } else if v as u32 as usize == v {
- self.wr_tagged_raw_u32(EsSub32 as usize, v as u32)
- } else {
- Err(io::Error::new(io::ErrorKind::Other,
- &format!("length or variant id too big: {}", v)[..]))
- }
- }
-
- pub fn emit_opaque<F>(&mut self, f: F) -> EncodeResult
- where F: FnOnce(&mut opaque::Encoder) -> EncodeResult
- {
- self.start_tag(EsOpaque as usize)?;
-
- {
- let mut opaque_encoder = opaque::Encoder::new(self.writer);
- f(&mut opaque_encoder)?;
- }
-
- self.mark_stable_position();
- self.end_tag()
- }
- }
-
- impl<'a> serialize::Encoder for Encoder<'a> {
- type Error = io::Error;
-
- fn emit_nil(&mut self) -> EncodeResult {
- Ok(())
- }
-
- fn emit_usize(&mut self, v: usize) -> EncodeResult {
- self.emit_u64(v as u64)
- }
- fn emit_u64(&mut self, v: u64) -> EncodeResult {
- if v as u32 as u64 == v {
- self.emit_u32(v as u32)
- } else {
- self.wr_tagged_raw_u64(EsU64 as usize, v)
- }
- }
- fn emit_u32(&mut self, v: u32) -> EncodeResult {
- if v as u16 as u32 == v {
- self.emit_u16(v as u16)
- } else {
- self.wr_tagged_raw_u32(EsU32 as usize, v)
- }
- }
- fn emit_u16(&mut self, v: u16) -> EncodeResult {
- if v as u8 as u16 == v {
- self.emit_u8(v as u8)
- } else {
- self.wr_tagged_raw_u16(EsU16 as usize, v)
- }
- }
- fn emit_u8(&mut self, v: u8) -> EncodeResult {
- self.wr_tagged_raw_u8(EsU8 as usize, v)
- }
-
- fn emit_isize(&mut self, v: isize) -> EncodeResult {
- self.emit_i64(v as i64)
- }
- fn emit_i64(&mut self, v: i64) -> EncodeResult {
- if v as i32 as i64 == v {
- self.emit_i32(v as i32)
- } else {
- self.wr_tagged_raw_i64(EsI64 as usize, v)
- }
- }
- fn emit_i32(&mut self, v: i32) -> EncodeResult {
- if v as i16 as i32 == v {
- self.emit_i16(v as i16)
- } else {
- self.wr_tagged_raw_i32(EsI32 as usize, v)
- }
- }
- fn emit_i16(&mut self, v: i16) -> EncodeResult {
- if v as i8 as i16 == v {
- self.emit_i8(v as i8)
- } else {
- self.wr_tagged_raw_i16(EsI16 as usize, v)
- }
- }
- fn emit_i8(&mut self, v: i8) -> EncodeResult {
- self.wr_tagged_raw_i8(EsI8 as usize, v)
- }
-
- fn emit_bool(&mut self, v: bool) -> EncodeResult {
- self.wr_tagged_raw_u8(EsBool as usize, v as u8)
- }
-
- fn emit_f64(&mut self, v: f64) -> EncodeResult {
- let bits = unsafe { mem::transmute(v) };
- self.wr_tagged_raw_u64(EsF64 as usize, bits)
- }
- fn emit_f32(&mut self, v: f32) -> EncodeResult {
- let bits = unsafe { mem::transmute(v) };
- self.wr_tagged_raw_u32(EsF32 as usize, bits)
- }
- fn emit_char(&mut self, v: char) -> EncodeResult {
- self.wr_tagged_raw_u32(EsChar as usize, v as u32)
- }
-
- fn emit_str(&mut self, v: &str) -> EncodeResult {
- self.wr_tagged_str(EsStr as usize, v)
- }
-
- fn emit_enum<F>(&mut self, _name: &str, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.start_tag(EsEnum as usize)?;
- f(self)?;
- self.end_tag()
- }
-
- fn emit_enum_variant<F>(&mut self, _: &str, v_id: usize, _: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self._emit_tagged_sub(v_id)?;
- f(self)
- }
-
- fn emit_enum_variant_arg<F>(&mut self, _: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- f(self)
- }
-
- fn emit_enum_struct_variant<F>(&mut self,
- v_name: &str,
- v_id: usize,
- cnt: usize,
- f: F)
- -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_enum_variant(v_name, v_id, cnt, f)
- }
-
- fn emit_enum_struct_variant_field<F>(&mut self, _: &str, idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_enum_variant_arg(idx, f)
- }
-
- fn emit_struct<F>(&mut self, _: &str, _len: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- f(self)
- }
-
- fn emit_struct_field<F>(&mut self, _name: &str, _: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- f(self)
- }
-
- fn emit_tuple<F>(&mut self, len: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_seq(len, f)
- }
- fn emit_tuple_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_seq_elt(idx, f)
- }
-
- fn emit_tuple_struct<F>(&mut self, _: &str, len: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_seq(len, f)
- }
- fn emit_tuple_struct_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_seq_elt(idx, f)
- }
-
- fn emit_option<F>(&mut self, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_enum("Option", f)
- }
- fn emit_option_none(&mut self) -> EncodeResult {
- self.emit_enum_variant("None", 0, 0, |_| Ok(()))
- }
- fn emit_option_some<F>(&mut self, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
-
- self.emit_enum_variant("Some", 1, 1, f)
- }
-
- fn emit_seq<F>(&mut self, len: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- if len == 0 {
- // empty vector optimization
- return self.wr_tagged_bytes(EsVec as usize, &[]);
- }
-
- self.start_tag(EsVec as usize)?;
- self._emit_tagged_sub(len)?;
- f(self)?;
- self.end_tag()
- }
-
- fn emit_seq_elt<F>(&mut self, _idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
-
- self.start_tag(EsVecElt as usize)?;
- f(self)?;
- self.end_tag()
- }
-
- fn emit_map<F>(&mut self, len: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- if len == 0 {
- // empty map optimization
- return self.wr_tagged_bytes(EsMap as usize, &[]);
- }
-
- self.start_tag(EsMap as usize)?;
- self._emit_tagged_sub(len)?;
- f(self)?;
- self.end_tag()
- }
-
- fn emit_map_elt_key<F>(&mut self, _idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
-
- self.start_tag(EsMapKey as usize)?;
- f(self)?;
- self.end_tag()
- }
-
- fn emit_map_elt_val<F>(&mut self, _idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.start_tag(EsMapVal as usize)?;
- f(self)?;
- self.end_tag()
- }
- }
-}
-
-// ___________________________________________________________________________
-// Testing
-
-#[cfg(test)]
-mod tests {
- use super::{Doc, reader, writer};
-
- use serialize::{Encodable, Decodable};
-
- use std::io::Cursor;
-
- #[test]
- fn test_vuint_at() {
- let data = &[
- 0x80,
- 0xff,
- 0x40, 0x00,
- 0x7f, 0xff,
- 0x20, 0x00, 0x00,
- 0x3f, 0xff, 0xff,
- 0x10, 0x00, 0x00, 0x00,
- 0x1f, 0xff, 0xff, 0xff
- ];
-
- let mut res: reader::Res;
-
- // Class A
- res = reader::vuint_at(data, 0).unwrap();
- assert_eq!(res.val, 0);
- assert_eq!(res.next, 1);
- res = reader::vuint_at(data, res.next).unwrap();
- assert_eq!(res.val, (1 << 7) - 1);
- assert_eq!(res.next, 2);
-
- // Class B
- res = reader::vuint_at(data, res.next).unwrap();
- assert_eq!(res.val, 0);
- assert_eq!(res.next, 4);
- res = reader::vuint_at(data, res.next).unwrap();
- assert_eq!(res.val, (1 << 14) - 1);
- assert_eq!(res.next, 6);
-
- // Class C
- res = reader::vuint_at(data, res.next).unwrap();
- assert_eq!(res.val, 0);
- assert_eq!(res.next, 9);
- res = reader::vuint_at(data, res.next).unwrap();
- assert_eq!(res.val, (1 << 21) - 1);
- assert_eq!(res.next, 12);
-
- // Class D
- res = reader::vuint_at(data, res.next).unwrap();
- assert_eq!(res.val, 0);
- assert_eq!(res.next, 16);
- res = reader::vuint_at(data, res.next).unwrap();
- assert_eq!(res.val, (1 << 28) - 1);
- assert_eq!(res.next, 20);
- }
-
- #[test]
- fn test_option_int() {
- fn test_v(v: Option<isize>) {
- debug!("v == {:?}", v);
- let mut wr = Cursor::new(Vec::new());
- {
- let mut rbml_w = writer::Encoder::new(&mut wr);
- let _ = v.encode(&mut rbml_w);
- }
- let rbml_doc = Doc::new(wr.get_ref());
- let mut deser = reader::Decoder::new(rbml_doc);
- let v1 = Decodable::decode(&mut deser).unwrap();
- debug!("v1 == {:?}", v1);
- assert_eq!(v, v1);
- }
-
- test_v(Some(22));
- test_v(None);
- test_v(Some(3));
- }
-}
-
-#[cfg(test)]
-mod bench {
- #![allow(non_snake_case)]
- use test::Bencher;
- use super::reader;
-
- #[bench]
- pub fn vuint_at_A_aligned(b: &mut Bencher) {
- let data = (0..4 * 100)
- .map(|i| {
- match i % 2 {
- 0 => 0x80,
- _ => i as u8,
- }
- })
- .collect::<Vec<_>>();
- let mut sum = 0;
- b.iter(|| {
- let mut i = 0;
- while i < data.len() {
- sum += reader::vuint_at(&data, i).unwrap().val;
- i += 4;
- }
- });
- }
-
- #[bench]
- pub fn vuint_at_A_unaligned(b: &mut Bencher) {
- let data = (0..4 * 100 + 1)
- .map(|i| {
- match i % 2 {
- 1 => 0x80,
- _ => i as u8,
- }
- })
- .collect::<Vec<_>>();
- let mut sum = 0;
- b.iter(|| {
- let mut i = 1;
- while i < data.len() {
- sum += reader::vuint_at(&data, i).unwrap().val;
- i += 4;
- }
- });
- }
-
- #[bench]
- pub fn vuint_at_D_aligned(b: &mut Bencher) {
- let data = (0..4 * 100)
- .map(|i| {
- match i % 4 {
- 0 => 0x10,
- 3 => i as u8,
- _ => 0,
- }
- })
- .collect::<Vec<_>>();
- let mut sum = 0;
- b.iter(|| {
- let mut i = 0;
- while i < data.len() {
- sum += reader::vuint_at(&data, i).unwrap().val;
- i += 4;
- }
- });
- }
-
- #[bench]
- pub fn vuint_at_D_unaligned(b: &mut Bencher) {
- let data = (0..4 * 100 + 1)
- .map(|i| {
- match i % 4 {
- 1 => 0x10,
- 0 => i as u8,
- _ => 0,
- }
- })
- .collect::<Vec<_>>();
- let mut sum = 0;
- b.iter(|| {
- let mut i = 1;
- while i < data.len() {
- sum += reader::vuint_at(&data, i).unwrap().val;
- i += 4;
- }
- });
- }
-}
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use Error as DecodeError;
-use writer::EncodeResult;
-use leb128::{read_signed_leb128, read_unsigned_leb128, write_signed_leb128, write_unsigned_leb128};
-use std::io::{self, Write};
-use serialize;
-
-// -----------------------------------------------------------------------------
-// Encoder
-// -----------------------------------------------------------------------------
-
-pub struct Encoder<'a> {
- pub cursor: &'a mut io::Cursor<Vec<u8>>,
-}
-
-impl<'a> Encoder<'a> {
- pub fn new(cursor: &'a mut io::Cursor<Vec<u8>>) -> Encoder<'a> {
- Encoder { cursor: cursor }
- }
-}
-
-
-macro_rules! write_uleb128 {
- ($enc:expr, $value:expr) => {{
- let pos = $enc.cursor.position() as usize;
- let bytes_written = write_unsigned_leb128($enc.cursor.get_mut(), pos, $value as u64);
- $enc.cursor.set_position((pos + bytes_written) as u64);
- Ok(())
- }}
-}
-
-macro_rules! write_sleb128 {
- ($enc:expr, $value:expr) => {{
- let pos = $enc.cursor.position() as usize;
- let bytes_written = write_signed_leb128($enc.cursor.get_mut(), pos, $value as i64);
- $enc.cursor.set_position((pos + bytes_written) as u64);
- Ok(())
- }}
-}
-
-impl<'a> serialize::Encoder for Encoder<'a> {
- type Error = io::Error;
-
- fn emit_nil(&mut self) -> EncodeResult {
- Ok(())
- }
-
- fn emit_usize(&mut self, v: usize) -> EncodeResult {
- write_uleb128!(self, v)
- }
-
- fn emit_u64(&mut self, v: u64) -> EncodeResult {
- write_uleb128!(self, v)
- }
-
- fn emit_u32(&mut self, v: u32) -> EncodeResult {
- write_uleb128!(self, v)
- }
-
- fn emit_u16(&mut self, v: u16) -> EncodeResult {
- write_uleb128!(self, v)
- }
-
- fn emit_u8(&mut self, v: u8) -> EncodeResult {
- let _ = self.cursor.write_all(&[v]);
- Ok(())
- }
-
- fn emit_isize(&mut self, v: isize) -> EncodeResult {
- write_sleb128!(self, v)
- }
-
- fn emit_i64(&mut self, v: i64) -> EncodeResult {
- write_sleb128!(self, v)
- }
-
- fn emit_i32(&mut self, v: i32) -> EncodeResult {
- write_sleb128!(self, v)
- }
-
- fn emit_i16(&mut self, v: i16) -> EncodeResult {
- write_sleb128!(self, v)
- }
-
- fn emit_i8(&mut self, v: i8) -> EncodeResult {
- let as_u8: u8 = unsafe { ::std::mem::transmute(v) };
- let _ = self.cursor.write_all(&[as_u8]);
- Ok(())
- }
-
- fn emit_bool(&mut self, v: bool) -> EncodeResult {
- self.emit_u8(if v {
- 1
- } else {
- 0
- })
- }
-
- fn emit_f64(&mut self, v: f64) -> EncodeResult {
- let as_u64: u64 = unsafe { ::std::mem::transmute(v) };
- self.emit_u64(as_u64)
- }
-
- fn emit_f32(&mut self, v: f32) -> EncodeResult {
- let as_u32: u32 = unsafe { ::std::mem::transmute(v) };
- self.emit_u32(as_u32)
- }
-
- fn emit_char(&mut self, v: char) -> EncodeResult {
- self.emit_u32(v as u32)
- }
-
- fn emit_str(&mut self, v: &str) -> EncodeResult {
- self.emit_usize(v.len())?;
- let _ = self.cursor.write_all(v.as_bytes());
- Ok(())
- }
-
- fn emit_enum<F>(&mut self, _name: &str, f: F) -> EncodeResult
- where F: FnOnce(&mut Self) -> EncodeResult
- {
- f(self)
- }
-
- fn emit_enum_variant<F>(&mut self,
- _v_name: &str,
- v_id: usize,
- _len: usize,
- f: F)
- -> EncodeResult
- where F: FnOnce(&mut Self) -> EncodeResult
- {
- self.emit_usize(v_id)?;
- f(self)
- }
-
- fn emit_enum_variant_arg<F>(&mut self, _: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- f(self)
- }
-
- fn emit_enum_struct_variant<F>(&mut self,
- v_name: &str,
- v_id: usize,
- cnt: usize,
- f: F)
- -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_enum_variant(v_name, v_id, cnt, f)
- }
-
- fn emit_enum_struct_variant_field<F>(&mut self, _: &str, idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_enum_variant_arg(idx, f)
- }
-
- fn emit_struct<F>(&mut self, _: &str, _len: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- f(self)
- }
-
- fn emit_struct_field<F>(&mut self, _name: &str, _: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- f(self)
- }
-
- fn emit_tuple<F>(&mut self, len: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_seq(len, f)
- }
-
- fn emit_tuple_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_seq_elt(idx, f)
- }
-
- fn emit_tuple_struct<F>(&mut self, _: &str, len: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_seq(len, f)
- }
-
- fn emit_tuple_struct_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_seq_elt(idx, f)
- }
-
- fn emit_option<F>(&mut self, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_enum("Option", f)
- }
-
- fn emit_option_none(&mut self) -> EncodeResult {
- self.emit_enum_variant("None", 0, 0, |_| Ok(()))
- }
-
- fn emit_option_some<F>(&mut self, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_enum_variant("Some", 1, 1, f)
- }
-
- fn emit_seq<F>(&mut self, len: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_usize(len)?;
- f(self)
- }
-
- fn emit_seq_elt<F>(&mut self, _idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- f(self)
- }
-
- fn emit_map<F>(&mut self, len: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- self.emit_usize(len)?;
- f(self)
- }
-
- fn emit_map_elt_key<F>(&mut self, _idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- f(self)
- }
-
- fn emit_map_elt_val<F>(&mut self, _idx: usize, f: F) -> EncodeResult
- where F: FnOnce(&mut Encoder<'a>) -> EncodeResult
- {
- f(self)
- }
-}
-
-impl<'a> Encoder<'a> {
- pub fn position(&self) -> usize {
- self.cursor.position() as usize
- }
-
- pub fn from_rbml<'b: 'c, 'c>(rbml: &'c mut ::writer::Encoder<'b>) -> Encoder<'c> {
- Encoder { cursor: rbml.writer }
- }
-}
-
-// -----------------------------------------------------------------------------
-// Decoder
-// -----------------------------------------------------------------------------
-
-pub struct Decoder<'a> {
- pub data: &'a [u8],
- position: usize,
-}
-
-impl<'a> Decoder<'a> {
- pub fn new(data: &'a [u8], position: usize) -> Decoder<'a> {
- Decoder {
- data: data,
- position: position,
- }
- }
-
- pub fn position(&self) -> usize {
- self.position
- }
-
- pub fn advance(&mut self, bytes: usize) {
- self.position += bytes;
- }
-}
-
-macro_rules! read_uleb128 {
- ($dec:expr, $t:ty) => ({
- let (value, bytes_read) = read_unsigned_leb128($dec.data, $dec.position);
- $dec.position += bytes_read;
- Ok(value as $t)
- })
-}
-
-macro_rules! read_sleb128 {
- ($dec:expr, $t:ty) => ({
- let (value, bytes_read) = read_signed_leb128($dec.data, $dec.position);
- $dec.position += bytes_read;
- Ok(value as $t)
- })
-}
-
-
-impl<'a> serialize::Decoder for Decoder<'a> {
- type Error = DecodeError;
-
- fn read_nil(&mut self) -> Result<(), Self::Error> {
- Ok(())
- }
-
- fn read_u64(&mut self) -> Result<u64, Self::Error> {
- read_uleb128!(self, u64)
- }
-
- fn read_u32(&mut self) -> Result<u32, Self::Error> {
- read_uleb128!(self, u32)
- }
-
- fn read_u16(&mut self) -> Result<u16, Self::Error> {
- read_uleb128!(self, u16)
- }
-
- fn read_u8(&mut self) -> Result<u8, Self::Error> {
- let value = self.data[self.position];
- self.position += 1;
- Ok(value)
- }
-
- fn read_usize(&mut self) -> Result<usize, Self::Error> {
- read_uleb128!(self, usize)
- }
-
- fn read_i64(&mut self) -> Result<i64, Self::Error> {
- read_sleb128!(self, i64)
- }
-
- fn read_i32(&mut self) -> Result<i32, Self::Error> {
- read_sleb128!(self, i32)
- }
-
- fn read_i16(&mut self) -> Result<i16, Self::Error> {
- read_sleb128!(self, i16)
- }
-
- fn read_i8(&mut self) -> Result<i8, Self::Error> {
- let as_u8 = self.data[self.position];
- self.position += 1;
- unsafe { Ok(::std::mem::transmute(as_u8)) }
- }
-
- fn read_isize(&mut self) -> Result<isize, Self::Error> {
- read_sleb128!(self, isize)
- }
-
- fn read_bool(&mut self) -> Result<bool, Self::Error> {
- let value = self.read_u8()?;
- Ok(value != 0)
- }
-
- fn read_f64(&mut self) -> Result<f64, Self::Error> {
- let bits = self.read_u64()?;
- Ok(unsafe { ::std::mem::transmute(bits) })
- }
-
- fn read_f32(&mut self) -> Result<f32, Self::Error> {
- let bits = self.read_u32()?;
- Ok(unsafe { ::std::mem::transmute(bits) })
- }
-
- fn read_char(&mut self) -> Result<char, Self::Error> {
- let bits = self.read_u32()?;
- Ok(::std::char::from_u32(bits).unwrap())
- }
-
- fn read_str(&mut self) -> Result<String, Self::Error> {
- let len = self.read_usize()?;
- let s = ::std::str::from_utf8(&self.data[self.position..self.position + len]).unwrap();
- self.position += len;
- Ok(s.to_string())
- }
-
- fn read_enum<T, F>(&mut self, _name: &str, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- f(self)
- }
-
- fn read_enum_variant<T, F>(&mut self, _: &[&str], mut f: F) -> Result<T, Self::Error>
- where F: FnMut(&mut Decoder<'a>, usize) -> Result<T, Self::Error>
- {
- let disr = self.read_usize()?;
- f(self, disr)
- }
-
- fn read_enum_variant_arg<T, F>(&mut self, _idx: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- f(self)
- }
-
- fn read_enum_struct_variant<T, F>(&mut self, _: &[&str], mut f: F) -> Result<T, Self::Error>
- where F: FnMut(&mut Decoder<'a>, usize) -> Result<T, Self::Error>
- {
- let disr = self.read_usize()?;
- f(self, disr)
- }
-
- fn read_enum_struct_variant_field<T, F>(&mut self,
- _name: &str,
- _idx: usize,
- f: F)
- -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- f(self)
- }
-
- fn read_struct<T, F>(&mut self, _name: &str, _: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- f(self)
- }
-
- fn read_struct_field<T, F>(&mut self, _name: &str, _idx: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- f(self)
- }
-
- fn read_tuple<T, F>(&mut self, tuple_len: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- self.read_seq(move |d, len| {
- if len == tuple_len {
- f(d)
- } else {
- let err = format!("Invalid tuple length. Expected {}, found {}",
- tuple_len,
- len);
- Err(DecodeError::Expected(err))
- }
- })
- }
-
- fn read_tuple_arg<T, F>(&mut self, idx: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- self.read_seq_elt(idx, f)
- }
-
- fn read_tuple_struct<T, F>(&mut self, _name: &str, len: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- self.read_tuple(len, f)
- }
-
- fn read_tuple_struct_arg<T, F>(&mut self, idx: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- self.read_tuple_arg(idx, f)
- }
-
- fn read_option<T, F>(&mut self, mut f: F) -> Result<T, Self::Error>
- where F: FnMut(&mut Decoder<'a>, bool) -> Result<T, Self::Error>
- {
- self.read_enum("Option", move |this| {
- this.read_enum_variant(&["None", "Some"], move |this, idx| {
- match idx {
- 0 => f(this, false),
- 1 => f(this, true),
- _ => {
- let msg = format!("Invalid Option index: {}", idx);
- Err(DecodeError::Expected(msg))
- }
- }
- })
- })
- }
-
- fn read_seq<T, F>(&mut self, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>, usize) -> Result<T, Self::Error>
- {
- let len = self.read_usize()?;
- f(self, len)
- }
-
- fn read_seq_elt<T, F>(&mut self, _idx: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- f(self)
- }
-
- fn read_map<T, F>(&mut self, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>, usize) -> Result<T, Self::Error>
- {
- let len = self.read_usize()?;
- f(self, len)
- }
-
- fn read_map_elt_key<T, F>(&mut self, _idx: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- f(self)
- }
-
- fn read_map_elt_val<T, F>(&mut self, _idx: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Decoder<'a>) -> Result<T, Self::Error>
- {
- f(self)
- }
-
- fn error(&mut self, err: &str) -> Self::Error {
- DecodeError::ApplicationError(err.to_string())
- }
-}
-
-
-#[cfg(test)]
-mod tests {
- use serialize::{Encodable, Decodable};
- use std::io::Cursor;
- use std::fmt::Debug;
- use super::{Encoder, Decoder};
-
- #[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
- struct Struct {
- a: (),
- b: u8,
- c: u16,
- d: u32,
- e: u64,
- f: usize,
-
- g: i8,
- h: i16,
- i: i32,
- j: i64,
- k: isize,
-
- l: char,
- m: String,
- n: f32,
- o: f64,
- p: bool,
- q: Option<u32>,
- }
-
-
- fn check_round_trip<T: Encodable + Decodable + PartialEq + Debug>(values: Vec<T>) {
- let mut cursor = Cursor::new(Vec::new());
-
- for value in &values {
- let mut encoder = Encoder::new(&mut cursor);
- Encodable::encode(&value, &mut encoder).unwrap();
- }
-
- let data = cursor.into_inner();
- let mut decoder = Decoder::new(&data[..], 0);
-
- for value in values {
- let decoded = Decodable::decode(&mut decoder).unwrap();
- assert_eq!(value, decoded);
- }
- }
-
- #[test]
- fn test_unit() {
- check_round_trip(vec![(), (), (), ()]);
- }
-
- #[test]
- fn test_u8() {
- let mut vec = vec![];
- for i in ::std::u8::MIN..::std::u8::MAX {
- vec.push(i);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_u16() {
- for i in ::std::u16::MIN..::std::u16::MAX {
- check_round_trip(vec![1, 2, 3, i, i, i]);
- }
- }
-
- #[test]
- fn test_u32() {
- check_round_trip(vec![1, 2, 3, ::std::u32::MIN, 0, 1, ::std::u32::MAX, 2, 1]);
- }
-
- #[test]
- fn test_u64() {
- check_round_trip(vec![1, 2, 3, ::std::u64::MIN, 0, 1, ::std::u64::MAX, 2, 1]);
- }
-
- #[test]
- fn test_usize() {
- check_round_trip(vec![1, 2, 3, ::std::usize::MIN, 0, 1, ::std::usize::MAX, 2, 1]);
- }
-
- #[test]
- fn test_i8() {
- let mut vec = vec![];
- for i in ::std::i8::MIN..::std::i8::MAX {
- vec.push(i);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_i16() {
- for i in ::std::i16::MIN..::std::i16::MAX {
- check_round_trip(vec![-1, 2, -3, i, i, i, 2]);
- }
- }
-
- #[test]
- fn test_i32() {
- check_round_trip(vec![-1, 2, -3, ::std::i32::MIN, 0, 1, ::std::i32::MAX, 2, 1]);
- }
-
- #[test]
- fn test_i64() {
- check_round_trip(vec![-1, 2, -3, ::std::i64::MIN, 0, 1, ::std::i64::MAX, 2, 1]);
- }
-
- #[test]
- fn test_isize() {
- check_round_trip(vec![-1, 2, -3, ::std::isize::MIN, 0, 1, ::std::isize::MAX, 2, 1]);
- }
-
- #[test]
- fn test_bool() {
- check_round_trip(vec![false, true, true, false, false]);
- }
-
- #[test]
- fn test_f32() {
- let mut vec = vec![];
- for i in -100..100 {
- vec.push((i as f32) / 3.0);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_f64() {
- let mut vec = vec![];
- for i in -100..100 {
- vec.push((i as f64) / 3.0);
- }
- check_round_trip(vec);
- }
-
- #[test]
- fn test_char() {
- let vec = vec!['a', 'b', 'c', 'd', 'A', 'X', ' ', '#', 'Ö', 'Ä', 'µ', '€'];
- check_round_trip(vec);
- }
-
- #[test]
- fn test_string() {
- let vec = vec!["abcbuÖeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µsbpapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmeabpnvapeapmaebn".to_string(),
- "abcbuÖganeiÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µsbpmaebn".to_string(),
- "abcbuÖganeiovÄnameÜavmpßvmea€µnvapeapmaebn".to_string()];
-
- check_round_trip(vec);
- }
-
- #[test]
- fn test_option() {
- check_round_trip(vec![Some(-1i8)]);
- check_round_trip(vec![Some(-2i16)]);
- check_round_trip(vec![Some(-3i32)]);
- check_round_trip(vec![Some(-4i64)]);
- check_round_trip(vec![Some(-5isize)]);
-
- let none_i8: Option<i8> = None;
- check_round_trip(vec![none_i8]);
-
- let none_i16: Option<i16> = None;
- check_round_trip(vec![none_i16]);
-
- let none_i32: Option<i32> = None;
- check_round_trip(vec![none_i32]);
-
- let none_i64: Option<i64> = None;
- check_round_trip(vec![none_i64]);
-
- let none_isize: Option<isize> = None;
- check_round_trip(vec![none_isize]);
- }
-
- #[test]
- fn test_struct() {
- check_round_trip(vec![Struct {
- a: (),
- b: 10,
- c: 11,
- d: 12,
- e: 13,
- f: 14,
-
- g: 15,
- h: 16,
- i: 17,
- j: 18,
- k: 19,
-
- l: 'x',
- m: "abc".to_string(),
- n: 20.5,
- o: 21.5,
- p: false,
- q: None,
- }]);
-
- check_round_trip(vec![Struct {
- a: (),
- b: 101,
- c: 111,
- d: 121,
- e: 131,
- f: 141,
-
- g: -15,
- h: -16,
- i: -17,
- j: -18,
- k: -19,
-
- l: 'y',
- m: "def".to_string(),
- n: -20.5,
- o: -21.5,
- p: true,
- q: Some(1234567),
- }]);
- }
-
- #[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
- enum Enum {
- Variant1,
- Variant2(usize, f32),
- Variant3 {
- a: i32,
- b: char,
- c: bool,
- },
- }
-
- #[test]
- fn test_enum() {
- check_round_trip(vec![Enum::Variant1,
- Enum::Variant2(1, 2.5),
- Enum::Variant3 {
- a: 3,
- b: 'b',
- c: false,
- },
- Enum::Variant3 {
- a: -4,
- b: 'f',
- c: true,
- }]);
- }
-
- #[test]
- fn test_sequence() {
- let mut vec = vec![];
- for i in -100i64..100i64 {
- vec.push(i * 100000);
- }
-
- check_round_trip(vec![vec]);
- }
-
- #[test]
- fn test_hash_map() {
- use std::collections::HashMap;
- let mut map = HashMap::new();
- for i in -100i64..100i64 {
- map.insert(i * 100000, i * 10000);
- }
-
- check_round_trip(vec![map]);
- }
-
- #[test]
- fn test_tuples() {
- check_round_trip(vec![('x', (), false, 0.5f32)]);
- check_round_trip(vec![(9i8, 10u16, 1.5f64)]);
- check_round_trip(vec![(-12i16, 11u8, 12usize)]);
- check_round_trip(vec![(1234567isize, 100000000000000u64, 99999999999999i64)]);
- check_round_trip(vec![(String::new(), "some string".to_string())]);
- }
-}
fmt_macros = { path = "../libfmt_macros" }
graphviz = { path = "../libgraphviz" }
log = { path = "../liblog" }
-rbml = { path = "../librbml" }
rustc_back = { path = "../librustc_back" }
rustc_bitflags = { path = "../librustc_bitflags" }
rustc_const_math = { path = "../librustc_const_math" }
WorkProduct(Arc<WorkProductId>),
// Represents different phases in the compiler.
- CrateReader,
CollectLanguageItems,
CheckStaticRecursion,
ResolveLifetimes,
// table in the tcx (or elsewhere) maps to one of these
// nodes. Often we map multiple tables to the same node if there
// is no point in distinguishing them (e.g., both the type and
- // predicates for an item wind up in `ItemSignature`). Other
- // times, such as `ImplItems` vs `TraitItemDefIds`, tables which
- // might be mergable are kept distinct because the sets of def-ids
- // to which they apply are disjoint, and hence we might as well
- // have distinct labels for easier debugging.
+ // predicates for an item wind up in `ItemSignature`).
ImplOrTraitItems(D),
ItemSignature(D),
FieldTy(D),
SizedConstraint(D),
- TraitItemDefIds(D),
+ ImplOrTraitItemDefIds(D),
InherentImpls(D),
- ImplItems(D),
// The set of impls for a given trait. Ultimately, it would be
// nice to get more fine-grained here (e.g., to include a
ImplOrTraitItems,
ItemSignature,
FieldTy,
- TraitItemDefIds,
+ ImplOrTraitItemDefIds,
InherentImpls,
- ImplItems,
TraitImpls,
ReprHints,
}
match *self {
Krate => Some(Krate),
- CrateReader => Some(CrateReader),
CollectLanguageItems => Some(CollectLanguageItems),
CheckStaticRecursion => Some(CheckStaticRecursion),
ResolveLifetimes => Some(ResolveLifetimes),
ItemSignature(ref d) => op(d).map(ItemSignature),
FieldTy(ref d) => op(d).map(FieldTy),
SizedConstraint(ref d) => op(d).map(SizedConstraint),
- TraitItemDefIds(ref d) => op(d).map(TraitItemDefIds),
+ ImplOrTraitItemDefIds(ref d) => op(d).map(ImplOrTraitItemDefIds),
InherentImpls(ref d) => op(d).map(InherentImpls),
- ImplItems(ref d) => op(d).map(ImplItems),
TraitImpls(ref d) => op(d).map(TraitImpls),
TraitItems(ref d) => op(d).map(TraitItems),
ReprHints(ref d) => op(d).map(ReprHints),
#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Def {
Fn(DefId),
- SelfTy(Option<DefId> /* trait */, Option<ast::NodeId> /* impl */),
+ SelfTy(Option<DefId> /* trait */, Option<DefId> /* impl */),
Mod(DefId),
- ForeignMod(DefId),
Static(DefId, bool /* is_mutbl */),
Const(DefId),
AssociatedConst(DefId),
- Local(DefId, // def id of variable
- ast::NodeId), // node id of variable
- Variant(DefId /* enum */, DefId /* variant */),
+ Local(DefId),
+ Variant(DefId),
Enum(DefId),
TyAlias(DefId),
- AssociatedTy(DefId /* trait */, DefId),
+ AssociatedTy(DefId),
Trait(DefId),
PrimTy(hir::PrimTy),
TyParam(DefId),
Upvar(DefId, // def id of closed over local
- ast::NodeId, // node id of closed over local
usize, // index in the freevars list of the closure
ast::NodeId), // expr node that creates the closure
// within.
pub type ExportMap = NodeMap<Vec<Export>>;
-#[derive(Copy, Clone)]
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
pub struct Export {
pub name: ast::Name, // The name of the target.
pub def_id: DefId, // The definition of the target.
}
impl Def {
- pub fn var_id(&self) -> ast::NodeId {
- match *self {
- Def::Local(_, id) |
- Def::Upvar(_, id, ..) => {
- id
- }
-
- Def::Fn(..) | Def::Mod(..) | Def::ForeignMod(..) | Def::Static(..) |
- Def::Variant(..) | Def::Enum(..) | Def::TyAlias(..) | Def::AssociatedTy(..) |
- Def::TyParam(..) | Def::Struct(..) | Def::Union(..) | Def::Trait(..) |
- Def::Method(..) | Def::Const(..) | Def::AssociatedConst(..) |
- Def::PrimTy(..) | Def::Label(..) | Def::SelfTy(..) | Def::Err => {
- bug!("attempted .var_id() on invalid {:?}", self)
- }
- }
- }
-
pub fn def_id(&self) -> DefId {
match *self {
- Def::Fn(id) | Def::Mod(id) | Def::ForeignMod(id) | Def::Static(id, _) |
- Def::Variant(_, id) | Def::Enum(id) | Def::TyAlias(id) | Def::AssociatedTy(_, id) |
+ Def::Fn(id) | Def::Mod(id) | Def::Static(id, _) |
+ Def::Variant(id) | Def::Enum(id) | Def::TyAlias(id) | Def::AssociatedTy(id) |
Def::TyParam(id) | Def::Struct(id) | Def::Union(id) | Def::Trait(id) |
Def::Method(id) | Def::Const(id) | Def::AssociatedConst(id) |
- Def::Local(id, _) | Def::Upvar(id, ..) => {
+ Def::Local(id) | Def::Upvar(id, ..) => {
id
}
match *self {
Def::Fn(..) => "function",
Def::Mod(..) => "module",
- Def::ForeignMod(..) => "foreign module",
Def::Static(..) => "static",
Def::Variant(..) => "variant",
Def::Enum(..) => "enum",
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use middle::cstore::LOCAL_CRATE;
use ty;
-use syntax::ast::CrateNum;
+
+use rustc_data_structures::indexed_vec::Idx;
+use serialize::{self, Encoder, Decoder};
+
use std::fmt;
use std::u32;
+#[derive(Clone, Copy, Eq, Ord, PartialOrd, PartialEq, Hash, Debug)]
+pub struct CrateNum(u32);
+
+impl Idx for CrateNum {
+ fn new(value: usize) -> Self {
+ assert!(value < (u32::MAX) as usize);
+ CrateNum(value as u32)
+ }
+
+ fn index(self) -> usize {
+ self.0 as usize
+ }
+}
+
+/// Item definitions in the currently-compiled crate would have the CrateNum
+/// LOCAL_CRATE in their DefId.
+pub const LOCAL_CRATE: CrateNum = CrateNum(0);
+
+impl CrateNum {
+ pub fn new(x: usize) -> CrateNum {
+ assert!(x < (u32::MAX as usize));
+ CrateNum(x as u32)
+ }
+
+ pub fn from_u32(x: u32) -> CrateNum {
+ CrateNum(x)
+ }
+
+ pub fn as_usize(&self) -> usize {
+ self.0 as usize
+ }
+
+ pub fn as_u32(&self) -> u32 {
+ self.0
+ }
+}
+
+impl fmt::Display for CrateNum {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(&self.0, f)
+ }
+}
+
+impl serialize::UseSpecializedEncodable for CrateNum {
+ fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+ s.emit_u32(self.0)
+ }
+}
+
+impl serialize::UseSpecializedDecodable for CrateNum {
+ fn default_decode<D: Decoder>(d: &mut D) -> Result<CrateNum, D::Error> {
+ d.read_u32().map(CrateNum)
+ }
+}
+
/// A DefIndex is an index into the hir-map for a crate, identifying a
/// particular definition. It should really be considered an interned
/// shorthand for a particular DefPath.
/// A DefId identifies a particular *definition*, by combining a crate
/// index and a def index.
-#[derive(Clone, Eq, Ord, PartialOrd, PartialEq, RustcEncodable,
- RustcDecodable, Hash, Copy)]
+#[derive(Clone, Eq, Ord, PartialOrd, PartialEq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct DefId {
pub krate: CrateNum,
pub index: DefIndex,
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! A Folder represents an HIR->HIR fold; it accepts a HIR piece,
-//! and returns a piece of the same type.
-
-use hir::*;
-use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, Attribute, Attribute_};
-use syntax::ast::{NestedMetaItem, NestedMetaItemKind, MetaItem, MetaItemKind};
-use hir;
-use syntax_pos::Span;
-use syntax::codemap::{respan, Spanned};
-use syntax::ptr::P;
-use syntax::parse::token::keywords;
-use syntax::util::move_map::MoveMap;
-
-pub trait Folder : Sized {
- // Any additions to this trait should happen in form
- // of a call to a public `noop_*` function that only calls
- // out to the folder again, not other `noop_*` functions.
- //
- // This is a necessary API workaround to the problem of not
- // being able to call out to the super default method
- // in an overridden default method.
-
- fn fold_crate(&mut self, c: Crate) -> Crate {
- noop_fold_crate(c, self)
- }
-
- fn fold_meta_items(&mut self, meta_items: HirVec<P<MetaItem>>) -> HirVec<P<MetaItem>> {
- noop_fold_meta_items(meta_items, self)
- }
-
- fn fold_meta_list_item(&mut self, list_item: NestedMetaItem) -> NestedMetaItem {
- noop_fold_meta_list_item(list_item, self)
- }
-
- fn fold_meta_item(&mut self, meta_item: P<MetaItem>) -> P<MetaItem> {
- noop_fold_meta_item(meta_item, self)
- }
-
- fn fold_view_path(&mut self, view_path: P<ViewPath>) -> P<ViewPath> {
- noop_fold_view_path(view_path, self)
- }
-
- fn fold_foreign_item(&mut self, ni: ForeignItem) -> ForeignItem {
- noop_fold_foreign_item(ni, self)
- }
-
- fn fold_item(&mut self, i: Item) -> Item {
- noop_fold_item(i, self)
- }
-
- fn fold_item_id(&mut self, i: ItemId) -> ItemId {
- noop_fold_item_id(i, self)
- }
-
- fn fold_struct_field(&mut self, sf: StructField) -> StructField {
- noop_fold_struct_field(sf, self)
- }
-
- fn fold_item_underscore(&mut self, i: Item_) -> Item_ {
- noop_fold_item_underscore(i, self)
- }
-
- fn fold_trait_item(&mut self, i: TraitItem) -> TraitItem {
- noop_fold_trait_item(i, self)
- }
-
- fn fold_impl_item(&mut self, i: ImplItem) -> ImplItem {
- noop_fold_impl_item(i, self)
- }
-
- fn fold_fn_decl(&mut self, d: P<FnDecl>) -> P<FnDecl> {
- noop_fold_fn_decl(d, self)
- }
-
- fn fold_block(&mut self, b: P<Block>) -> P<Block> {
- noop_fold_block(b, self)
- }
-
- fn fold_stmt(&mut self, s: Stmt) -> Stmt {
- noop_fold_stmt(s, self)
- }
-
- fn fold_arm(&mut self, a: Arm) -> Arm {
- noop_fold_arm(a, self)
- }
-
- fn fold_pat(&mut self, p: P<Pat>) -> P<Pat> {
- noop_fold_pat(p, self)
- }
-
- fn fold_decl(&mut self, d: P<Decl>) -> P<Decl> {
- noop_fold_decl(d, self)
- }
-
- fn fold_expr(&mut self, e: P<Expr>) -> P<Expr> {
- e.map(|e| noop_fold_expr(e, self))
- }
-
- fn fold_ty(&mut self, t: P<Ty>) -> P<Ty> {
- noop_fold_ty(t, self)
- }
-
- fn fold_ty_binding(&mut self, t: TypeBinding) -> TypeBinding {
- noop_fold_ty_binding(t, self)
- }
-
- fn fold_mod(&mut self, m: Mod) -> Mod {
- noop_fold_mod(m, self)
- }
-
- fn fold_foreign_mod(&mut self, nm: ForeignMod) -> ForeignMod {
- noop_fold_foreign_mod(nm, self)
- }
-
- fn fold_variant(&mut self, v: Variant) -> Variant {
- noop_fold_variant(v, self)
- }
-
- fn fold_name(&mut self, n: Name) -> Name {
- noop_fold_name(n, self)
- }
-
- fn fold_usize(&mut self, i: usize) -> usize {
- noop_fold_usize(i, self)
- }
-
- fn fold_path(&mut self, p: Path) -> Path {
- noop_fold_path(p, self)
- }
-
- fn fold_path_parameters(&mut self, p: PathParameters) -> PathParameters {
- noop_fold_path_parameters(p, self)
- }
-
- fn fold_angle_bracketed_parameter_data(&mut self,
- p: AngleBracketedParameterData)
- -> AngleBracketedParameterData {
- noop_fold_angle_bracketed_parameter_data(p, self)
- }
-
- fn fold_parenthesized_parameter_data(&mut self,
- p: ParenthesizedParameterData)
- -> ParenthesizedParameterData {
- noop_fold_parenthesized_parameter_data(p, self)
- }
-
- fn fold_local(&mut self, l: P<Local>) -> P<Local> {
- noop_fold_local(l, self)
- }
-
- fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime {
- noop_fold_lifetime(l, self)
- }
-
- fn fold_lifetime_def(&mut self, l: LifetimeDef) -> LifetimeDef {
- noop_fold_lifetime_def(l, self)
- }
-
- fn fold_attribute(&mut self, at: Attribute) -> Option<Attribute> {
- noop_fold_attribute(at, self)
- }
-
- fn fold_arg(&mut self, a: Arg) -> Arg {
- noop_fold_arg(a, self)
- }
-
- fn fold_generics(&mut self, generics: Generics) -> Generics {
- noop_fold_generics(generics, self)
- }
-
- fn fold_trait_ref(&mut self, p: TraitRef) -> TraitRef {
- noop_fold_trait_ref(p, self)
- }
-
- fn fold_poly_trait_ref(&mut self, p: PolyTraitRef) -> PolyTraitRef {
- noop_fold_poly_trait_ref(p, self)
- }
-
- fn fold_variant_data(&mut self, vdata: VariantData) -> VariantData {
- noop_fold_variant_data(vdata, self)
- }
-
- fn fold_lifetimes(&mut self, lts: HirVec<Lifetime>) -> HirVec<Lifetime> {
- noop_fold_lifetimes(lts, self)
- }
-
- fn fold_lifetime_defs(&mut self, lts: HirVec<LifetimeDef>) -> HirVec<LifetimeDef> {
- noop_fold_lifetime_defs(lts, self)
- }
-
- fn fold_ty_param(&mut self, tp: TyParam) -> TyParam {
- noop_fold_ty_param(tp, self)
- }
-
- fn fold_ty_params(&mut self, tps: HirVec<TyParam>) -> HirVec<TyParam> {
- noop_fold_ty_params(tps, self)
- }
-
- fn fold_opt_lifetime(&mut self, o_lt: Option<Lifetime>) -> Option<Lifetime> {
- noop_fold_opt_lifetime(o_lt, self)
- }
-
- fn fold_opt_bounds(&mut self,
- b: Option<TyParamBounds>)
- -> Option<TyParamBounds> {
- noop_fold_opt_bounds(b, self)
- }
-
- fn fold_bounds(&mut self, b: TyParamBounds) -> TyParamBounds {
- noop_fold_bounds(b, self)
- }
-
- fn fold_ty_param_bound(&mut self, tpb: TyParamBound) -> TyParamBound {
- noop_fold_ty_param_bound(tpb, self)
- }
-
- fn fold_mt(&mut self, mt: MutTy) -> MutTy {
- noop_fold_mt(mt, self)
- }
-
- fn fold_field(&mut self, field: Field) -> Field {
- noop_fold_field(field, self)
- }
-
- fn fold_where_clause(&mut self, where_clause: WhereClause) -> WhereClause {
- noop_fold_where_clause(where_clause, self)
- }
-
- fn fold_where_predicate(&mut self, where_predicate: WherePredicate) -> WherePredicate {
- noop_fold_where_predicate(where_predicate, self)
- }
-
- /// called for the `id` on each declaration
- fn new_id(&mut self, i: NodeId) -> NodeId {
- i
- }
-
- /// called for ids that are references (e.g., ItemDef)
- fn map_id(&mut self, i: NodeId) -> NodeId {
- i
- }
-
- fn new_span(&mut self, sp: Span) -> Span {
- sp
- }
-}
-
-pub fn noop_fold_meta_items<T: Folder>(meta_items: HirVec<P<MetaItem>>,
- fld: &mut T)
- -> HirVec<P<MetaItem>> {
- meta_items.move_map(|x| fld.fold_meta_item(x))
-}
-
-pub fn noop_fold_view_path<T: Folder>(view_path: P<ViewPath>, fld: &mut T) -> P<ViewPath> {
- view_path.map(|Spanned { node, span }| {
- Spanned {
- node: match node {
- ViewPathSimple(name, path) => {
- ViewPathSimple(name, fld.fold_path(path))
- }
- ViewPathGlob(path) => {
- ViewPathGlob(fld.fold_path(path))
- }
- ViewPathList(path, path_list_idents) => {
- ViewPathList(fld.fold_path(path),
- path_list_idents.move_map(|path_list_ident| {
- Spanned {
- node: PathListItem_ {
- id: fld.new_id(path_list_ident.node.id),
- name: path_list_ident.node.name,
- rename: path_list_ident.node.rename,
- },
- span: fld.new_span(path_list_ident.span),
- }
- }))
- }
- },
- span: fld.new_span(span),
- }
- })
-}
-
-pub fn fold_attrs<T, F>(attrs: T, fld: &mut F) -> T
- where T: Into<Vec<Attribute>> + From<Vec<Attribute>>,
- F: Folder,
-{
- attrs.into().move_flat_map(|x| fld.fold_attribute(x)).into()
-}
-
-pub fn noop_fold_arm<T: Folder>(Arm { attrs, pats, guard, body }: Arm, fld: &mut T) -> Arm {
- Arm {
- attrs: fold_attrs(attrs, fld),
- pats: pats.move_map(|x| fld.fold_pat(x)),
- guard: guard.map(|x| fld.fold_expr(x)),
- body: fld.fold_expr(body),
- }
-}
-
-pub fn noop_fold_decl<T: Folder>(d: P<Decl>, fld: &mut T) -> P<Decl> {
- d.map(|Spanned { node, span }| {
- match node {
- DeclLocal(l) => Spanned {
- node: DeclLocal(fld.fold_local(l)),
- span: fld.new_span(span),
- },
- DeclItem(it) => Spanned {
- node: DeclItem(fld.fold_item_id(it)),
- span: fld.new_span(span),
- },
- }
- })
-}
-
-pub fn noop_fold_ty_binding<T: Folder>(b: TypeBinding, fld: &mut T) -> TypeBinding {
- TypeBinding {
- id: fld.new_id(b.id),
- name: b.name,
- ty: fld.fold_ty(b.ty),
- span: fld.new_span(b.span),
- }
-}
-
-pub fn noop_fold_ty<T: Folder>(t: P<Ty>, fld: &mut T) -> P<Ty> {
- t.map(|Ty { id, node, span }| {
- Ty {
- id: fld.new_id(id),
- node: match node {
- TyInfer => node,
- TyVec(ty) => TyVec(fld.fold_ty(ty)),
- TyPtr(mt) => TyPtr(fld.fold_mt(mt)),
- TyRptr(region, mt) => {
- TyRptr(fld.fold_opt_lifetime(region), fld.fold_mt(mt))
- }
- TyBareFn(f) => {
- TyBareFn(f.map(|BareFnTy { lifetimes, unsafety, abi, decl }| {
- BareFnTy {
- lifetimes: fld.fold_lifetime_defs(lifetimes),
- unsafety: unsafety,
- abi: abi,
- decl: fld.fold_fn_decl(decl),
- }
- }))
- }
- TyNever => node,
- TyTup(tys) => TyTup(tys.move_map(|ty| fld.fold_ty(ty))),
- TyPath(qself, path) => {
- let qself = qself.map(|QSelf { ty, position }| {
- QSelf {
- ty: fld.fold_ty(ty),
- position: position,
- }
- });
- TyPath(qself, fld.fold_path(path))
- }
- TyObjectSum(ty, bounds) => {
- TyObjectSum(fld.fold_ty(ty), fld.fold_bounds(bounds))
- }
- TyFixedLengthVec(ty, e) => {
- TyFixedLengthVec(fld.fold_ty(ty), fld.fold_expr(e))
- }
- TyTypeof(expr) => {
- TyTypeof(fld.fold_expr(expr))
- }
- TyPolyTraitRef(bounds) => {
- TyPolyTraitRef(bounds.move_map(|b| fld.fold_ty_param_bound(b)))
- }
- TyImplTrait(bounds) => {
- TyImplTrait(bounds.move_map(|b| fld.fold_ty_param_bound(b)))
- }
- },
- span: fld.new_span(span),
- }
- })
-}
-
-pub fn noop_fold_foreign_mod<T: Folder>(ForeignMod { abi, items }: ForeignMod,
- fld: &mut T)
- -> ForeignMod {
- ForeignMod {
- abi: abi,
- items: items.move_map(|x| fld.fold_foreign_item(x)),
- }
-}
-
-pub fn noop_fold_variant<T: Folder>(v: Variant, fld: &mut T) -> Variant {
- Spanned {
- node: Variant_ {
- name: v.node.name,
- attrs: fold_attrs(v.node.attrs, fld),
- data: fld.fold_variant_data(v.node.data),
- disr_expr: v.node.disr_expr.map(|e| fld.fold_expr(e)),
- },
- span: fld.new_span(v.span),
- }
-}
-
-pub fn noop_fold_name<T: Folder>(n: Name, _: &mut T) -> Name {
- n
-}
-
-pub fn noop_fold_usize<T: Folder>(i: usize, _: &mut T) -> usize {
- i
-}
-
-pub fn noop_fold_path<T: Folder>(Path { global, segments, span }: Path, fld: &mut T) -> Path {
- Path {
- global: global,
- segments: segments.move_map(|PathSegment { name, parameters }| {
- PathSegment {
- name: fld.fold_name(name),
- parameters: fld.fold_path_parameters(parameters),
- }
- }),
- span: fld.new_span(span),
- }
-}
-
-pub fn noop_fold_path_parameters<T: Folder>(path_parameters: PathParameters,
- fld: &mut T)
- -> PathParameters {
- match path_parameters {
- AngleBracketedParameters(data) =>
- AngleBracketedParameters(fld.fold_angle_bracketed_parameter_data(data)),
- ParenthesizedParameters(data) =>
- ParenthesizedParameters(fld.fold_parenthesized_parameter_data(data)),
- }
-}
-
-pub fn noop_fold_angle_bracketed_parameter_data<T: Folder>(data: AngleBracketedParameterData,
- fld: &mut T)
- -> AngleBracketedParameterData {
- let AngleBracketedParameterData { lifetimes, types, bindings } = data;
- AngleBracketedParameterData {
- lifetimes: fld.fold_lifetimes(lifetimes),
- types: types.move_map(|ty| fld.fold_ty(ty)),
- bindings: bindings.move_map(|b| fld.fold_ty_binding(b)),
- }
-}
-
-pub fn noop_fold_parenthesized_parameter_data<T: Folder>(data: ParenthesizedParameterData,
- fld: &mut T)
- -> ParenthesizedParameterData {
- let ParenthesizedParameterData { inputs, output, span } = data;
- ParenthesizedParameterData {
- inputs: inputs.move_map(|ty| fld.fold_ty(ty)),
- output: output.map(|ty| fld.fold_ty(ty)),
- span: fld.new_span(span),
- }
-}
-
-pub fn noop_fold_local<T: Folder>(l: P<Local>, fld: &mut T) -> P<Local> {
- l.map(|Local { id, pat, ty, init, span, attrs }| {
- Local {
- id: fld.new_id(id),
- ty: ty.map(|t| fld.fold_ty(t)),
- pat: fld.fold_pat(pat),
- init: init.map(|e| fld.fold_expr(e)),
- span: fld.new_span(span),
- attrs: fold_attrs(attrs, fld),
- }
- })
-}
-
-pub fn noop_fold_attribute<T: Folder>(at: Attribute, fld: &mut T) -> Option<Attribute> {
- let Spanned {node: Attribute_ {id, style, value, is_sugared_doc}, span} = at;
- Some(Spanned {
- node: Attribute_ {
- id: id,
- style: style,
- value: fld.fold_meta_item(value),
- is_sugared_doc: is_sugared_doc,
- },
- span: fld.new_span(span),
- })
-}
-
-pub fn noop_fold_meta_list_item<T: Folder>(li: NestedMetaItem, fld: &mut T)
- -> NestedMetaItem {
- Spanned {
- node: match li.node {
- NestedMetaItemKind::MetaItem(mi) => {
- NestedMetaItemKind::MetaItem(fld.fold_meta_item(mi))
- },
- NestedMetaItemKind::Literal(lit) => NestedMetaItemKind::Literal(lit)
- },
- span: fld.new_span(li.span)
- }
-}
-
-pub fn noop_fold_meta_item<T: Folder>(mi: P<MetaItem>, fld: &mut T) -> P<MetaItem> {
- mi.map(|Spanned { node, span }| {
- Spanned {
- node: match node {
- MetaItemKind::Word(id) => MetaItemKind::Word(id),
- MetaItemKind::List(id, mis) => {
- MetaItemKind::List(id, mis.move_map(|e| fld.fold_meta_list_item(e)))
- }
- MetaItemKind::NameValue(id, s) => MetaItemKind::NameValue(id, s),
- },
- span: fld.new_span(span),
- }
- })
-}
-
-pub fn noop_fold_arg<T: Folder>(Arg { id, pat, ty }: Arg, fld: &mut T) -> Arg {
- Arg {
- id: fld.new_id(id),
- pat: fld.fold_pat(pat),
- ty: fld.fold_ty(ty),
- }
-}
-
-pub fn noop_fold_fn_decl<T: Folder>(decl: P<FnDecl>, fld: &mut T) -> P<FnDecl> {
- decl.map(|FnDecl { inputs, output, variadic }| {
- FnDecl {
- inputs: inputs.move_map(|x| fld.fold_arg(x)),
- output: match output {
- Return(ty) => Return(fld.fold_ty(ty)),
- DefaultReturn(span) => DefaultReturn(span),
- },
- variadic: variadic,
- }
- })
-}
-
-pub fn noop_fold_ty_param_bound<T>(tpb: TyParamBound, fld: &mut T) -> TyParamBound
- where T: Folder
-{
- match tpb {
- TraitTyParamBound(ty, modifier) => TraitTyParamBound(fld.fold_poly_trait_ref(ty), modifier),
- RegionTyParamBound(lifetime) => RegionTyParamBound(fld.fold_lifetime(lifetime)),
- }
-}
-
-pub fn noop_fold_ty_param<T: Folder>(tp: TyParam, fld: &mut T) -> TyParam {
- let TyParam {id, name, bounds, default, span} = tp;
- TyParam {
- id: fld.new_id(id),
- name: name,
- bounds: fld.fold_bounds(bounds),
- default: default.map(|x| fld.fold_ty(x)),
- span: span,
- }
-}
-
-pub fn noop_fold_ty_params<T: Folder>(tps: HirVec<TyParam>,
- fld: &mut T)
- -> HirVec<TyParam> {
- tps.move_map(|tp| fld.fold_ty_param(tp))
-}
-
-pub fn noop_fold_lifetime<T: Folder>(l: Lifetime, fld: &mut T) -> Lifetime {
- Lifetime {
- id: fld.new_id(l.id),
- name: l.name,
- span: fld.new_span(l.span),
- }
-}
-
-pub fn noop_fold_lifetime_def<T: Folder>(l: LifetimeDef, fld: &mut T) -> LifetimeDef {
- LifetimeDef {
- lifetime: fld.fold_lifetime(l.lifetime),
- bounds: fld.fold_lifetimes(l.bounds),
- }
-}
-
-pub fn noop_fold_lifetimes<T: Folder>(lts: HirVec<Lifetime>, fld: &mut T) -> HirVec<Lifetime> {
- lts.move_map(|l| fld.fold_lifetime(l))
-}
-
-pub fn noop_fold_lifetime_defs<T: Folder>(lts: HirVec<LifetimeDef>,
- fld: &mut T)
- -> HirVec<LifetimeDef> {
- lts.move_map(|l| fld.fold_lifetime_def(l))
-}
-
-pub fn noop_fold_opt_lifetime<T: Folder>(o_lt: Option<Lifetime>, fld: &mut T) -> Option<Lifetime> {
- o_lt.map(|lt| fld.fold_lifetime(lt))
-}
-
-pub fn noop_fold_generics<T: Folder>(Generics {ty_params, lifetimes, where_clause, span}: Generics,
- fld: &mut T)
- -> Generics {
- Generics {
- ty_params: fld.fold_ty_params(ty_params),
- lifetimes: fld.fold_lifetime_defs(lifetimes),
- where_clause: fld.fold_where_clause(where_clause),
- span: fld.new_span(span),
- }
-}
-
-pub fn noop_fold_where_clause<T: Folder>(WhereClause { id, predicates }: WhereClause,
- fld: &mut T)
- -> WhereClause {
- WhereClause {
- id: fld.new_id(id),
- predicates: predicates.move_map(|predicate| fld.fold_where_predicate(predicate)),
- }
-}
-
-pub fn noop_fold_where_predicate<T: Folder>(pred: WherePredicate, fld: &mut T) -> WherePredicate {
- match pred {
- hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate{bound_lifetimes,
- bounded_ty,
- bounds,
- span}) => {
- hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate {
- bound_lifetimes: fld.fold_lifetime_defs(bound_lifetimes),
- bounded_ty: fld.fold_ty(bounded_ty),
- bounds: bounds.move_map(|x| fld.fold_ty_param_bound(x)),
- span: fld.new_span(span),
- })
- }
- hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate{lifetime,
- bounds,
- span}) => {
- hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate {
- span: fld.new_span(span),
- lifetime: fld.fold_lifetime(lifetime),
- bounds: bounds.move_map(|bound| fld.fold_lifetime(bound)),
- })
- }
- hir::WherePredicate::EqPredicate(hir::WhereEqPredicate{id,
- path,
- ty,
- span}) => {
- hir::WherePredicate::EqPredicate(hir::WhereEqPredicate {
- id: fld.new_id(id),
- path: fld.fold_path(path),
- ty: fld.fold_ty(ty),
- span: fld.new_span(span),
- })
- }
- }
-}
-
-pub fn noop_fold_variant_data<T: Folder>(vdata: VariantData, fld: &mut T) -> VariantData {
- match vdata {
- VariantData::Struct(fields, id) => {
- VariantData::Struct(fields.move_map(|f| fld.fold_struct_field(f)),
- fld.new_id(id))
- }
- VariantData::Tuple(fields, id) => {
- VariantData::Tuple(fields.move_map(|f| fld.fold_struct_field(f)),
- fld.new_id(id))
- }
- VariantData::Unit(id) => VariantData::Unit(fld.new_id(id)),
- }
-}
-
-pub fn noop_fold_trait_ref<T: Folder>(p: TraitRef, fld: &mut T) -> TraitRef {
- let id = fld.new_id(p.ref_id);
- let TraitRef {
- path,
- ref_id: _,
- } = p;
- hir::TraitRef {
- path: fld.fold_path(path),
- ref_id: id,
- }
-}
-
-pub fn noop_fold_poly_trait_ref<T: Folder>(p: PolyTraitRef, fld: &mut T) -> PolyTraitRef {
- hir::PolyTraitRef {
- bound_lifetimes: fld.fold_lifetime_defs(p.bound_lifetimes),
- trait_ref: fld.fold_trait_ref(p.trait_ref),
- span: fld.new_span(p.span),
- }
-}
-
-pub fn noop_fold_struct_field<T: Folder>(f: StructField, fld: &mut T) -> StructField {
- StructField {
- span: fld.new_span(f.span),
- id: fld.new_id(f.id),
- name: f.name,
- vis: f.vis,
- ty: fld.fold_ty(f.ty),
- attrs: fold_attrs(f.attrs, fld),
- }
-}
-
-pub fn noop_fold_field<T: Folder>(Field { name, expr, span }: Field, folder: &mut T) -> Field {
- Field {
- name: respan(folder.new_span(name.span), folder.fold_name(name.node)),
- expr: folder.fold_expr(expr),
- span: folder.new_span(span),
- }
-}
-
-pub fn noop_fold_mt<T: Folder>(MutTy { ty, mutbl }: MutTy, folder: &mut T) -> MutTy {
- MutTy {
- ty: folder.fold_ty(ty),
- mutbl: mutbl,
- }
-}
-
-pub fn noop_fold_opt_bounds<T: Folder>(b: Option<TyParamBounds>,
- folder: &mut T)
- -> Option<TyParamBounds> {
- b.map(|bounds| folder.fold_bounds(bounds))
-}
-
-fn noop_fold_bounds<T: Folder>(bounds: TyParamBounds, folder: &mut T) -> TyParamBounds {
- bounds.move_map(|bound| folder.fold_ty_param_bound(bound))
-}
-
-pub fn noop_fold_block<T: Folder>(b: P<Block>, folder: &mut T) -> P<Block> {
- b.map(|Block { id, stmts, expr, rules, span }| {
- Block {
- id: folder.new_id(id),
- stmts: stmts.move_map(|s| folder.fold_stmt(s)),
- expr: expr.map(|x| folder.fold_expr(x)),
- rules: rules,
- span: folder.new_span(span),
- }
- })
-}
-
-pub fn noop_fold_item_underscore<T: Folder>(i: Item_, folder: &mut T) -> Item_ {
- match i {
- ItemExternCrate(string) => ItemExternCrate(string),
- ItemUse(view_path) => {
- ItemUse(folder.fold_view_path(view_path))
- }
- ItemStatic(t, m, e) => {
- ItemStatic(folder.fold_ty(t), m, folder.fold_expr(e))
- }
- ItemConst(t, e) => {
- ItemConst(folder.fold_ty(t), folder.fold_expr(e))
- }
- ItemFn(decl, unsafety, constness, abi, generics, body) => {
- ItemFn(folder.fold_fn_decl(decl),
- unsafety,
- constness,
- abi,
- folder.fold_generics(generics),
- folder.fold_block(body))
- }
- ItemMod(m) => ItemMod(folder.fold_mod(m)),
- ItemForeignMod(nm) => ItemForeignMod(folder.fold_foreign_mod(nm)),
- ItemTy(t, generics) => {
- ItemTy(folder.fold_ty(t), folder.fold_generics(generics))
- }
- ItemEnum(enum_definition, generics) => {
- ItemEnum(hir::EnumDef {
- variants: enum_definition.variants.move_map(|x| folder.fold_variant(x)),
- },
- folder.fold_generics(generics))
- }
- ItemStruct(struct_def, generics) => {
- let struct_def = folder.fold_variant_data(struct_def);
- ItemStruct(struct_def, folder.fold_generics(generics))
- }
- ItemUnion(struct_def, generics) => {
- let struct_def = folder.fold_variant_data(struct_def);
- ItemUnion(struct_def, folder.fold_generics(generics))
- }
- ItemDefaultImpl(unsafety, ref trait_ref) => {
- ItemDefaultImpl(unsafety, folder.fold_trait_ref((*trait_ref).clone()))
- }
- ItemImpl(unsafety, polarity, generics, ifce, ty, impl_items) => {
- let new_impl_items = impl_items
- .move_map(|item| folder.fold_impl_item(item));
- let ifce = match ifce {
- None => None,
- Some(ref trait_ref) => {
- Some(folder.fold_trait_ref((*trait_ref).clone()))
- }
- };
- ItemImpl(unsafety,
- polarity,
- folder.fold_generics(generics),
- ifce,
- folder.fold_ty(ty),
- new_impl_items)
- }
- ItemTrait(unsafety, generics, bounds, items) => {
- let bounds = folder.fold_bounds(bounds);
- let items = items.move_map(|item| folder.fold_trait_item(item));
- ItemTrait(unsafety, folder.fold_generics(generics), bounds, items)
- }
- }
-}
-
-pub fn noop_fold_trait_item<T: Folder>(i: TraitItem,
- folder: &mut T)
- -> TraitItem {
- TraitItem {
- id: folder.new_id(i.id),
- name: folder.fold_name(i.name),
- attrs: fold_attrs(i.attrs, folder),
- node: match i.node {
- ConstTraitItem(ty, default) => {
- ConstTraitItem(folder.fold_ty(ty), default.map(|x| folder.fold_expr(x)))
- }
- MethodTraitItem(sig, body) => {
- MethodTraitItem(noop_fold_method_sig(sig, folder),
- body.map(|x| folder.fold_block(x)))
- }
- TypeTraitItem(bounds, default) => {
- TypeTraitItem(folder.fold_bounds(bounds),
- default.map(|x| folder.fold_ty(x)))
- }
- },
- span: folder.new_span(i.span),
- }
-}
-
-pub fn noop_fold_impl_item<T: Folder>(i: ImplItem, folder: &mut T) -> ImplItem {
- ImplItem {
- id: folder.new_id(i.id),
- name: folder.fold_name(i.name),
- attrs: fold_attrs(i.attrs, folder),
- vis: i.vis,
- defaultness: i.defaultness,
- node: match i.node {
- ImplItemKind::Const(ty, expr) => {
- ImplItemKind::Const(folder.fold_ty(ty), folder.fold_expr(expr))
- }
- ImplItemKind::Method(sig, body) => {
- ImplItemKind::Method(noop_fold_method_sig(sig, folder), folder.fold_block(body))
- }
- ImplItemKind::Type(ty) => ImplItemKind::Type(folder.fold_ty(ty)),
- },
- span: folder.new_span(i.span),
- }
-}
-
-pub fn noop_fold_mod<T: Folder>(Mod { inner, item_ids }: Mod, folder: &mut T) -> Mod {
- Mod {
- inner: folder.new_span(inner),
- item_ids: item_ids.move_map(|x| folder.fold_item_id(x)),
- }
-}
-
-pub fn noop_fold_crate<T: Folder>(Crate { module, attrs, config, span,
- exported_macros, items }: Crate,
- folder: &mut T)
- -> Crate {
- let config = folder.fold_meta_items(config);
-
- let crate_mod = folder.fold_item(hir::Item {
- name: keywords::Invalid.name(),
- attrs: attrs,
- id: DUMMY_NODE_ID,
- vis: hir::Public,
- span: span,
- node: hir::ItemMod(module),
- });
-
- let (module, attrs, span) = match crate_mod {
- hir::Item { attrs, span, node, .. } => {
- match node {
- hir::ItemMod(m) => (m, attrs, span),
- _ => panic!("fold converted a module to not a module"),
- }
- }
- };
-
- let items = items.into_iter()
- .map(|(id, item)| (id, folder.fold_item(item)))
- .collect();
-
- Crate {
- module: module,
- attrs: attrs,
- config: config,
- span: span,
- exported_macros: exported_macros,
- items: items,
- }
-}
-
-pub fn noop_fold_item_id<T: Folder>(i: ItemId, folder: &mut T) -> ItemId {
- let id = folder.map_id(i.id);
- ItemId { id: id }
-}
-
-// fold one item into one item
-pub fn noop_fold_item<T: Folder>(item: Item, folder: &mut T) -> Item {
- let Item { id, name, attrs, node, vis, span } = item;
- let id = folder.new_id(id);
- let node = folder.fold_item_underscore(node);
-
- Item {
- id: id,
- name: folder.fold_name(name),
- attrs: fold_attrs(attrs, folder),
- node: node,
- vis: vis,
- span: folder.new_span(span),
- }
-}
-
-pub fn noop_fold_foreign_item<T: Folder>(ni: ForeignItem, folder: &mut T) -> ForeignItem {
- ForeignItem {
- id: folder.new_id(ni.id),
- name: folder.fold_name(ni.name),
- attrs: fold_attrs(ni.attrs, folder),
- node: match ni.node {
- ForeignItemFn(fdec, generics) => {
- ForeignItemFn(folder.fold_fn_decl(fdec), folder.fold_generics(generics))
- }
- ForeignItemStatic(t, m) => {
- ForeignItemStatic(folder.fold_ty(t), m)
- }
- },
- vis: ni.vis,
- span: folder.new_span(ni.span),
- }
-}
-
-pub fn noop_fold_method_sig<T: Folder>(sig: MethodSig, folder: &mut T) -> MethodSig {
- MethodSig {
- generics: folder.fold_generics(sig.generics),
- abi: sig.abi,
- unsafety: sig.unsafety,
- constness: sig.constness,
- decl: folder.fold_fn_decl(sig.decl),
- }
-}
-
-pub fn noop_fold_pat<T: Folder>(p: P<Pat>, folder: &mut T) -> P<Pat> {
- p.map(|Pat { id, node, span }| {
- Pat {
- id: folder.new_id(id),
- node: match node {
- PatKind::Wild => PatKind::Wild,
- PatKind::Binding(binding_mode, pth1, sub) => {
- PatKind::Binding(binding_mode,
- Spanned {
- span: folder.new_span(pth1.span),
- node: folder.fold_name(pth1.node),
- },
- sub.map(|x| folder.fold_pat(x)))
- }
- PatKind::Lit(e) => PatKind::Lit(folder.fold_expr(e)),
- PatKind::TupleStruct(pth, pats, ddpos) => {
- PatKind::TupleStruct(folder.fold_path(pth),
- pats.move_map(|x| folder.fold_pat(x)), ddpos)
- }
- PatKind::Path(opt_qself, pth) => {
- let opt_qself = opt_qself.map(|qself| {
- QSelf { ty: folder.fold_ty(qself.ty), position: qself.position }
- });
- PatKind::Path(opt_qself, folder.fold_path(pth))
- }
- PatKind::Struct(pth, fields, etc) => {
- let pth = folder.fold_path(pth);
- let fs = fields.move_map(|f| {
- Spanned {
- span: folder.new_span(f.span),
- node: hir::FieldPat {
- name: f.node.name,
- pat: folder.fold_pat(f.node.pat),
- is_shorthand: f.node.is_shorthand,
- },
- }
- });
- PatKind::Struct(pth, fs, etc)
- }
- PatKind::Tuple(elts, ddpos) => {
- PatKind::Tuple(elts.move_map(|x| folder.fold_pat(x)), ddpos)
- }
- PatKind::Box(inner) => PatKind::Box(folder.fold_pat(inner)),
- PatKind::Ref(inner, mutbl) => PatKind::Ref(folder.fold_pat(inner), mutbl),
- PatKind::Range(e1, e2) => {
- PatKind::Range(folder.fold_expr(e1), folder.fold_expr(e2))
- }
- PatKind::Vec(before, slice, after) => {
- PatKind::Vec(before.move_map(|x| folder.fold_pat(x)),
- slice.map(|x| folder.fold_pat(x)),
- after.move_map(|x| folder.fold_pat(x)))
- }
- },
- span: folder.new_span(span),
- }
- })
-}
-
-pub fn noop_fold_expr<T: Folder>(Expr { id, node, span, attrs }: Expr, folder: &mut T) -> Expr {
- Expr {
- id: folder.new_id(id),
- node: match node {
- ExprBox(e) => {
- ExprBox(folder.fold_expr(e))
- }
- ExprVec(exprs) => {
- ExprVec(exprs.move_map(|x| folder.fold_expr(x)))
- }
- ExprRepeat(expr, count) => {
- ExprRepeat(folder.fold_expr(expr), folder.fold_expr(count))
- }
- ExprTup(elts) => ExprTup(elts.move_map(|x| folder.fold_expr(x))),
- ExprCall(f, args) => {
- ExprCall(folder.fold_expr(f), args.move_map(|x| folder.fold_expr(x)))
- }
- ExprMethodCall(name, tps, args) => {
- ExprMethodCall(respan(folder.new_span(name.span), folder.fold_name(name.node)),
- tps.move_map(|x| folder.fold_ty(x)),
- args.move_map(|x| folder.fold_expr(x)))
- }
- ExprBinary(binop, lhs, rhs) => {
- ExprBinary(binop, folder.fold_expr(lhs), folder.fold_expr(rhs))
- }
- ExprUnary(binop, ohs) => {
- ExprUnary(binop, folder.fold_expr(ohs))
- }
- ExprLit(l) => ExprLit(l),
- ExprCast(expr, ty) => {
- ExprCast(folder.fold_expr(expr), folder.fold_ty(ty))
- }
- ExprType(expr, ty) => {
- ExprType(folder.fold_expr(expr), folder.fold_ty(ty))
- }
- ExprAddrOf(m, ohs) => ExprAddrOf(m, folder.fold_expr(ohs)),
- ExprIf(cond, tr, fl) => {
- ExprIf(folder.fold_expr(cond),
- folder.fold_block(tr),
- fl.map(|x| folder.fold_expr(x)))
- }
- ExprWhile(cond, body, opt_name) => {
- ExprWhile(folder.fold_expr(cond),
- folder.fold_block(body),
- opt_name.map(|label| {
- respan(folder.new_span(label.span), folder.fold_name(label.node))
- }))
- }
- ExprLoop(body, opt_name) => {
- ExprLoop(folder.fold_block(body),
- opt_name.map(|label| {
- respan(folder.new_span(label.span), folder.fold_name(label.node))
- }))
- }
- ExprMatch(expr, arms, source) => {
- ExprMatch(folder.fold_expr(expr),
- arms.move_map(|x| folder.fold_arm(x)),
- source)
- }
- ExprClosure(capture_clause, decl, body, fn_decl_span) => {
- ExprClosure(capture_clause,
- folder.fold_fn_decl(decl),
- folder.fold_block(body),
- folder.new_span(fn_decl_span))
- }
- ExprBlock(blk) => ExprBlock(folder.fold_block(blk)),
- ExprAssign(el, er) => {
- ExprAssign(folder.fold_expr(el), folder.fold_expr(er))
- }
- ExprAssignOp(op, el, er) => {
- ExprAssignOp(op, folder.fold_expr(el), folder.fold_expr(er))
- }
- ExprField(el, name) => {
- ExprField(folder.fold_expr(el),
- respan(folder.new_span(name.span), folder.fold_name(name.node)))
- }
- ExprTupField(el, index) => {
- ExprTupField(folder.fold_expr(el),
- respan(folder.new_span(index.span), folder.fold_usize(index.node)))
- }
- ExprIndex(el, er) => {
- ExprIndex(folder.fold_expr(el), folder.fold_expr(er))
- }
- ExprPath(qself, path) => {
- let qself = qself.map(|QSelf { ty, position }| {
- QSelf {
- ty: folder.fold_ty(ty),
- position: position,
- }
- });
- ExprPath(qself, folder.fold_path(path))
- }
- ExprBreak(opt_name) => ExprBreak(opt_name.map(|label| {
- respan(folder.new_span(label.span), folder.fold_name(label.node))
- })),
- ExprAgain(opt_name) => ExprAgain(opt_name.map(|label| {
- respan(folder.new_span(label.span), folder.fold_name(label.node))
- })),
- ExprRet(e) => ExprRet(e.map(|x| folder.fold_expr(x))),
- ExprInlineAsm(asm, outputs, inputs) => {
- ExprInlineAsm(asm,
- outputs.move_map(|x| folder.fold_expr(x)),
- inputs.move_map(|x| folder.fold_expr(x)))
- }
- ExprStruct(path, fields, maybe_expr) => {
- ExprStruct(folder.fold_path(path),
- fields.move_map(|x| folder.fold_field(x)),
- maybe_expr.map(|x| folder.fold_expr(x)))
- }
- },
- span: folder.new_span(span),
- attrs: fold_attrs(attrs, folder),
- }
-}
-
-pub fn noop_fold_stmt<T: Folder>(stmt: Stmt, folder: &mut T) -> Stmt {
- let span = folder.new_span(stmt.span);
- match stmt.node {
- StmtDecl(d, id) => {
- let id = folder.new_id(id);
- Spanned {
- node: StmtDecl(folder.fold_decl(d), id),
- span: span
- }
- }
- StmtExpr(e, id) => {
- let id = folder.new_id(id);
- Spanned {
- node: StmtExpr(folder.fold_expr(e), id),
- span: span,
- }
- }
- StmtSemi(e, id) => {
- let id = folder.new_id(id);
- Spanned {
- node: StmtSemi(folder.fold_expr(e), id),
- span: span,
- }
- }
- }
-}
impl IdRange {
pub fn max() -> IdRange {
IdRange {
- min: u32::MAX,
- max: u32::MIN,
+ min: NodeId::from_u32(u32::MAX),
+ max: NodeId::from_u32(u32::MIN),
}
}
pub fn add(&mut self, id: NodeId) {
self.min = cmp::min(self.min, id);
- self.max = cmp::max(self.max, id + 1);
+ self.max = cmp::max(self.max, NodeId::from_u32(id.as_u32() + 1));
}
}
pub struct LoweringContext<'a> {
crate_root: Option<&'static str>,
// Use to assign ids to hir nodes that do not directly correspond to an ast node
- sess: Option<&'a Session>,
+ sess: &'a Session,
// As we walk the AST we must keep track of the current 'parent' def id (in
// the form of a DefIndex) so that if we create a new node which introduces
// a definition, then we can properly create the def id.
// We must keep the set of definitions up to date as we add nodes that weren't in the AST.
// This should only return `None` during testing.
- fn definitions(&mut self) -> Option<&mut Definitions>;
-}
-
-pub struct DummyResolver;
-impl Resolver for DummyResolver {
- fn resolve_generated_global_path(&mut self, _path: &hir::Path, _is_value: bool) -> Def {
- Def::Err
- }
- fn get_resolution(&mut self, _id: NodeId) -> Option<PathResolution> {
- None
- }
- fn record_resolution(&mut self, _id: NodeId, _def: Def) {}
- fn definitions(&mut self) -> Option<&mut Definitions> {
- None
- }
+ fn definitions(&mut self) -> &mut Definitions;
}
pub fn lower_crate(sess: &Session,
} else {
Some("std")
},
- sess: Some(sess),
+ sess: sess,
parent_def: None,
resolver: resolver,
}.lower_crate(krate)
}
impl<'a> LoweringContext<'a> {
- pub fn testing_context(resolver: &'a mut Resolver) -> Self {
- LoweringContext {
- crate_root: None,
- sess: None,
- parent_def: None,
- resolver: resolver,
- }
- }
-
fn lower_crate(&mut self, c: &Crate) -> hir::Crate {
struct ItemLowerer<'lcx, 'interner: 'lcx> {
items: BTreeMap<NodeId, hir::Item>,
}
fn next_id(&self) -> NodeId {
- self.sess.map(Session::next_node_id).unwrap_or(0)
+ self.sess.next_node_id()
}
fn diagnostic(&self) -> &errors::Handler {
- self.sess.map(Session::diagnostic)
- .unwrap_or_else(|| panic!("this lowerer cannot emit diagnostics"))
+ self.sess.diagnostic()
}
fn str_to_ident(&self, s: &'static str) -> Name {
where F: FnOnce(&mut LoweringContext) -> T
{
let old_def = self.parent_def;
- self.parent_def = match self.resolver.definitions() {
- Some(defs) => Some(defs.opt_def_index(parent_id).unwrap()),
- None => old_def,
+ self.parent_def = {
+ let defs = self.resolver.definitions();
+ Some(defs.opt_def_index(parent_id).unwrap())
};
let result = f(self);
}
fn lower_ty_param(&mut self, tp: &TyParam) -> hir::TyParam {
+ let mut name = tp.ident.name;
+
+ // Don't expose `Self` (recovered "keyword used as ident" parse error).
+ // `rustc::ty` expects `Self` to be only used for a trait's `Self`.
+ // Instead, use gensym("Self") to create a distinct name that looks the same.
+ if name == token::keywords::SelfType.name() {
+ name = token::gensym("Self");
+ }
+
hir::TyParam {
id: tp.id,
- name: tp.ident.name,
+ name: name,
bounds: self.lower_bounds(&tp.bounds),
default: tp.default.as_ref().map(|x| self.lower_ty(x)),
span: tp.span,
let expr_path = hir::ExprPath(None, self.path_ident(span, id));
let expr = self.expr(span, expr_path, ThinVec::new());
- let def = self.resolver.definitions().map(|defs| {
- Def::Local(defs.local_def_id(binding), binding)
- }).unwrap_or(Def::Err);
+ let def = {
+ let defs = self.resolver.definitions();
+ Def::Local(defs.local_def_id(binding))
+ };
self.resolver.record_resolution(expr.id, def);
expr
let pat = self.pat(span, pat_ident);
let parent_def = self.parent_def;
- let def = self.resolver.definitions().map(|defs| {
+ let def = {
+ let defs = self.resolver.definitions();
let def_path_data = DefPathData::Binding(name.as_str());
let def_index = defs.create_def_with_parent(parent_def, pat.id, def_path_data);
- Def::Local(DefId::local(def_index), pat.id)
- }).unwrap_or(Def::Err);
+ Def::Local(DefId::local(def_index))
+ };
self.resolver.record_resolution(pat.id, def);
pat
pub map: Vec<MapEntry<'ast>>,
/// The parent of this node
pub parent_node: NodeId,
+ /// If true, completely ignore nested items. We set this when loading
+ /// HIR from metadata, since in that case we only want the HIR for
+ /// one specific item (and not the ones nested inside of it).
+ pub ignore_nested_items: bool
}
impl<'ast> NodeCollector<'ast> {
krate: krate,
map: vec![],
parent_node: CRATE_NODE_ID,
+ ignore_nested_items: false
};
collector.insert_entry(CRATE_NODE_ID, RootCrate);
krate: krate,
map: map,
parent_node: parent_node,
+ ignore_nested_items: true
};
assert_eq!(parent_def_path.krate, parent_def_id.krate);
fn insert_entry(&mut self, id: NodeId, entry: MapEntry<'ast>) {
debug!("ast_map: {:?} => {:?}", id, entry);
let len = self.map.len();
- if id as usize >= len {
- self.map.extend(repeat(NotPresent).take(id as usize - len + 1));
+ if id.as_usize() >= len {
+ self.map.extend(repeat(NotPresent).take(id.as_usize() - len + 1));
}
- self.map[id as usize] = entry;
+ self.map[id.as_usize()] = entry;
}
fn insert(&mut self, id: NodeId, node: Node<'ast>) {
/// their outer items.
fn visit_nested_item(&mut self, item: ItemId) {
debug!("visit_nested_item: {:?}", item);
- self.visit_item(self.krate.item(item.id))
+ if !self.ignore_nested_items {
+ self.visit_item(self.krate.item(item.id))
+ }
}
fn visit_item(&mut self, i: &'ast Item) {
use syntax::ast::*;
use syntax::visit;
-use syntax::parse::token;
+use syntax::parse::token::{self, keywords};
/// Creates def ids for nodes in the HIR.
-pub struct DefCollector<'ast> {
+pub struct DefCollector<'a> {
// If we are walking HIR (c.f., AST), we need to keep a reference to the
// crate.
- hir_crate: Option<&'ast hir::Crate>,
- definitions: &'ast mut Definitions,
+ hir_crate: Option<&'a hir::Crate>,
+ definitions: &'a mut Definitions,
parent_def: Option<DefIndex>,
+ pub visit_macro_invoc: Option<&'a mut FnMut(MacroInvocationData)>,
}
-impl<'ast> DefCollector<'ast> {
- pub fn root(definitions: &'ast mut Definitions) -> DefCollector<'ast> {
- let mut collector = DefCollector {
+pub struct MacroInvocationData {
+ pub id: NodeId,
+ pub def_index: DefIndex,
+ pub const_integer: bool,
+}
+
+impl<'a> DefCollector<'a> {
+ pub fn new(definitions: &'a mut Definitions) -> Self {
+ DefCollector {
hir_crate: None,
definitions: definitions,
parent_def: None,
- };
- let root = collector.create_def_with_parent(None, CRATE_NODE_ID, DefPathData::CrateRoot);
- assert_eq!(root, CRATE_DEF_INDEX);
- collector.parent_def = Some(root);
-
- collector.create_def_with_parent(Some(CRATE_DEF_INDEX), DUMMY_NODE_ID, DefPathData::Misc);
-
- collector
+ visit_macro_invoc: None,
+ }
}
pub fn extend(parent_node: NodeId,
parent_def_path: DefPath,
parent_def_id: DefId,
- definitions: &'ast mut Definitions)
- -> DefCollector<'ast> {
- let mut collector = DefCollector {
- hir_crate: None,
- parent_def: None,
- definitions: definitions,
- };
+ definitions: &'a mut Definitions)
+ -> Self {
+ let mut collector = DefCollector::new(definitions);
assert_eq!(parent_def_path.krate, parent_def_id.krate);
let root_path = Box::new(InlinedRootPath {
collector
}
- pub fn walk_item(&mut self, ii: &'ast InlinedItem, krate: &'ast hir::Crate) {
- self.hir_crate = Some(krate);
- ii.visit(self);
+ pub fn collect_root(&mut self) {
+ let root = self.create_def_with_parent(None, CRATE_NODE_ID, DefPathData::CrateRoot);
+ assert_eq!(root, CRATE_DEF_INDEX);
+ self.parent_def = Some(root);
+
+ self.create_def_with_parent(Some(CRATE_DEF_INDEX), DUMMY_NODE_ID, DefPathData::Misc);
}
- fn parent_def(&self) -> Option<DefIndex> {
- self.parent_def
+ pub fn walk_item(&mut self, ii: &'a InlinedItem, krate: &'a hir::Crate) {
+ self.hir_crate = Some(krate);
+ ii.visit(self);
}
fn create_def(&mut self, node_id: NodeId, data: DefPathData) -> DefIndex {
- let parent_def = self.parent_def();
+ let parent_def = self.parent_def;
debug!("create_def(node_id={:?}, data={:?}, parent_def={:?})", node_id, data, parent_def);
self.definitions.create_def_with_parent(parent_def, node_id, data)
}
self.definitions.create_def_with_parent(parent, node_id, data)
}
- fn with_parent<F: FnOnce(&mut Self)>(&mut self, parent_def: DefIndex, f: F) {
+ pub fn with_parent<F: FnOnce(&mut Self)>(&mut self, parent_def: DefIndex, f: F) {
let parent = self.parent_def;
self.parent_def = Some(parent_def);
f(self);
self.parent_def = parent;
}
- fn visit_ast_const_integer(&mut self, expr: &Expr) {
- // Find the node which will be used after lowering.
- if let ExprKind::Paren(ref inner) = expr.node {
- return self.visit_ast_const_integer(inner);
- }
-
- // FIXME(eddyb) Closures should have separate
- // function definition IDs and expression IDs.
- if let ExprKind::Closure(..) = expr.node {
- return;
+ pub fn visit_ast_const_integer(&mut self, expr: &Expr) {
+ match expr.node {
+ // Find the node which will be used after lowering.
+ ExprKind::Paren(ref inner) => return self.visit_ast_const_integer(inner),
+ ExprKind::Mac(..) => return self.visit_macro_invoc(expr.id, true),
+ // FIXME(eddyb) Closures should have separate
+ // function definition IDs and expression IDs.
+ ExprKind::Closure(..) => return,
+ _ => {}
}
self.create_def(expr.id, DefPathData::Initializer);
}
- fn visit_hir_const_integer(&mut self, expr: &'ast hir::Expr) {
+ fn visit_hir_const_integer(&mut self, expr: &hir::Expr) {
// FIXME(eddyb) Closures should have separate
// function definition IDs and expression IDs.
if let hir::ExprClosure(..) = expr.node {
self.create_def(expr.id, DefPathData::Initializer);
}
+
+ fn visit_macro_invoc(&mut self, id: NodeId, const_integer: bool) {
+ if let Some(ref mut visit) = self.visit_macro_invoc {
+ visit(MacroInvocationData {
+ id: id,
+ const_integer: const_integer,
+ def_index: self.parent_def.unwrap(),
+ })
+ }
+ }
}
-impl<'ast> visit::Visitor for DefCollector<'ast> {
+impl<'a> visit::Visitor for DefCollector<'a> {
fn visit_item(&mut self, i: &Item) {
debug!("visit_item: {:?}", i);
ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Union(..) | ItemKind::Trait(..) |
ItemKind::ExternCrate(..) | ItemKind::ForeignMod(..) | ItemKind::Ty(..) =>
DefPathData::TypeNs(i.ident.name.as_str()),
+ ItemKind::Mod(..) if i.ident == keywords::Invalid.ident() => {
+ return visit::walk_item(self, i);
+ }
ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_str()),
ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) =>
DefPathData::ValueNs(i.ident.name.as_str()),
- ItemKind::Mac(..) => DefPathData::MacroDef(i.ident.name.as_str()),
+ ItemKind::Mac(..) if i.id == DUMMY_NODE_ID => return, // Scope placeholder
+ ItemKind::Mac(..) => return self.visit_macro_invoc(i.id, false),
ItemKind::Use(..) => DefPathData::Misc,
};
let def = self.create_def(i.id, def_data);
TraitItemKind::Method(..) | TraitItemKind::Const(..) =>
DefPathData::ValueNs(ti.ident.name.as_str()),
TraitItemKind::Type(..) => DefPathData::TypeNs(ti.ident.name.as_str()),
- TraitItemKind::Macro(..) => DefPathData::MacroDef(ti.ident.name.as_str()),
+ TraitItemKind::Macro(..) => return self.visit_macro_invoc(ti.id, false),
};
let def = self.create_def(ti.id, def_data);
ImplItemKind::Method(..) | ImplItemKind::Const(..) =>
DefPathData::ValueNs(ii.ident.name.as_str()),
ImplItemKind::Type(..) => DefPathData::TypeNs(ii.ident.name.as_str()),
- ImplItemKind::Macro(..) => DefPathData::MacroDef(ii.ident.name.as_str()),
+ ImplItemKind::Macro(..) => return self.visit_macro_invoc(ii.id, false),
};
let def = self.create_def(ii.id, def_data);
fn visit_pat(&mut self, pat: &Pat) {
let parent_def = self.parent_def;
- if let PatKind::Ident(_, id, _) = pat.node {
- let def = self.create_def(pat.id, DefPathData::Binding(id.node.name.as_str()));
- self.parent_def = Some(def);
+ match pat.node {
+ PatKind::Mac(..) => return self.visit_macro_invoc(pat.id, false),
+ PatKind::Ident(_, id, _) => {
+ let def = self.create_def(pat.id, DefPathData::Binding(id.node.name.as_str()));
+ self.parent_def = Some(def);
+ }
+ _ => {}
}
visit::walk_pat(self, pat);
fn visit_expr(&mut self, expr: &Expr) {
let parent_def = self.parent_def;
- if let ExprKind::Repeat(_, ref count) = expr.node {
- self.visit_ast_const_integer(count);
- }
-
- if let ExprKind::Closure(..) = expr.node {
- let def = self.create_def(expr.id, DefPathData::ClosureExpr);
- self.parent_def = Some(def);
+ match expr.node {
+ ExprKind::Mac(..) => return self.visit_macro_invoc(expr.id, false),
+ ExprKind::Repeat(_, ref count) => self.visit_ast_const_integer(count),
+ ExprKind::Closure(..) => {
+ let def = self.create_def(expr.id, DefPathData::ClosureExpr);
+ self.parent_def = Some(def);
+ }
+ _ => {}
}
visit::walk_expr(self, expr);
}
fn visit_ty(&mut self, ty: &Ty) {
- if let TyKind::FixedLengthVec(_, ref length) = ty.node {
- self.visit_ast_const_integer(length);
- }
- if let TyKind::ImplTrait(..) = ty.node {
- self.create_def(ty.id, DefPathData::ImplTrait);
+ match ty.node {
+ TyKind::Mac(..) => return self.visit_macro_invoc(ty.id, false),
+ TyKind::FixedLengthVec(_, ref length) => self.visit_ast_const_integer(length),
+ TyKind::ImplTrait(..) => {
+ self.create_def(ty.id, DefPathData::ImplTrait);
+ }
+ _ => {}
}
visit::walk_ty(self, ty);
}
fn visit_macro_def(&mut self, macro_def: &MacroDef) {
self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name.as_str()));
}
+
+ fn visit_stmt(&mut self, stmt: &Stmt) {
+ match stmt.node {
+ StmtKind::Mac(..) => self.visit_macro_invoc(stmt.id, false),
+ _ => visit::walk_stmt(self, stmt),
+ }
+ }
}
// We walk the HIR rather than the AST when reading items from metadata.
impl<'ast> intravisit::Visitor<'ast> for DefCollector<'ast> {
- /// Because we want to track parent items and so forth, enable
- /// deep walking so that we walk nested items in the context of
- /// their outer items.
- fn visit_nested_item(&mut self, item_id: hir::ItemId) {
- debug!("visit_nested_item: {:?}", item_id);
- let item = self.hir_crate.unwrap().item(item_id.id);
- self.visit_item(item)
- }
-
fn visit_item(&mut self, i: &'ast hir::Item) {
debug!("visit_item: {:?}", i);
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use middle::cstore::LOCAL_CRATE;
-use hir::def_id::{DefId, DefIndex};
-use hir::map::def_collector::DefCollector;
+use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
use rustc_data_structures::fnv::FnvHashMap;
use std::fmt::Write;
use std::hash::{Hash, Hasher, SipHasher};
-use syntax::{ast, visit};
-use syntax::parse::token::InternedString;
+use syntax::ast;
+use syntax::parse::token::{self, InternedString};
use ty::TyCtxt;
use util::nodemap::NodeMap;
pub data: Vec<DisambiguatedDefPathData>,
/// what krate root is this path relative to?
- pub krate: ast::CrateNum,
+ pub krate: CrateNum,
}
impl DefPath {
self.krate == LOCAL_CRATE
}
- pub fn make<FN>(start_krate: ast::CrateNum,
+ pub fn make<FN>(start_krate: CrateNum,
start_index: DefIndex,
mut get_key: FN) -> DefPath
where FN: FnMut(DefIndex) -> DefKey
pub fn to_string(&self, tcx: TyCtxt) -> String {
let mut s = String::with_capacity(self.data.len() * 16);
- if self.krate == LOCAL_CRATE {
- s.push_str(&tcx.crate_name(self.krate));
- } else {
- s.push_str(&tcx.sess.cstore.original_crate_name(self.krate));
- }
+ s.push_str(&tcx.original_crate_name(self.krate));
s.push_str("/");
s.push_str(&tcx.crate_disambiguator(self.krate));
}
pub fn deterministic_hash_to<H: Hasher>(&self, tcx: TyCtxt, state: &mut H) {
- tcx.crate_name(self.krate).hash(state);
+ tcx.original_crate_name(self.krate).hash(state);
tcx.crate_disambiguator(self.krate).hash(state);
self.data.hash(state);
}
}
}
- pub fn collect(&mut self, krate: &ast::Crate) {
- let mut def_collector = DefCollector::root(self);
- visit::walk_crate(&mut def_collector, krate);
- }
-
/// Get the number of definitions.
pub fn len(&self) -> usize {
self.data.len()
}
impl DefPathData {
+ pub fn get_opt_name(&self) -> Option<ast::Name> {
+ use self::DefPathData::*;
+ match *self {
+ TypeNs(ref name) |
+ ValueNs(ref name) |
+ Module(ref name) |
+ MacroDef(ref name) |
+ TypeParam(ref name) |
+ LifetimeDef(ref name) |
+ EnumVariant(ref name) |
+ Binding(ref name) |
+ Field(ref name) => Some(token::intern(name)),
+
+ Impl |
+ CrateRoot |
+ InlinedRoot(_) |
+ Misc |
+ ClosureExpr |
+ StructCtor |
+ Initializer |
+ ImplTrait => None
+ }
+ }
+
pub fn as_interned_str(&self) -> InternedString {
use self::DefPathData::*;
match *self {
pub use self::Node::*;
use self::MapEntry::*;
use self::collector::NodeCollector;
-use self::def_collector::DefCollector;
+pub use self::def_collector::{DefCollector, MacroInvocationData};
pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData,
DisambiguatedDefPathData, InlinedRootPath};
use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
use syntax::abi::Abi;
-use syntax::ast::{self, Name, NodeId, DUMMY_NODE_ID, };
+use syntax::ast::{self, Name, NodeId, CRATE_NODE_ID};
use syntax::codemap::Spanned;
use syntax_pos::Span;
use hir::*;
-use hir::fold::Folder;
use hir::print as pprust;
use arena::TypedArena;
use std::cell::RefCell;
-use std::cmp;
use std::io;
use std::mem;
let mut id = id0;
if !self.is_inlined_node_id(id) {
loop {
- match map[id as usize] {
+ match map[id.as_usize()] {
EntryItem(_, item) => {
let def_id = self.local_def_id(item.id);
// NB ^~~~~~~
EntryVariant(p, _) |
EntryExpr(p, _) |
EntryStmt(p, _) |
- EntryTy(p, _) |
+ EntryTy(p, _) |
EntryLocal(p, _) |
EntryPat(p, _) |
EntryBlock(p, _) |
// reading from an inlined def-id is really a read out of
// the metadata from which we loaded the item.
loop {
- match map[id as usize] {
+ match map[id.as_usize()] {
EntryItem(p, _) |
EntryForeignItem(p, _) |
EntryTraitItem(p, _) |
}
fn find_entry(&self, id: NodeId) -> Option<MapEntry<'ast>> {
- self.map.borrow().get(id as usize).cloned()
+ self.map.borrow().get(id.as_usize()).cloned()
}
pub fn krate(&self) -> &'ast Crate {
let mut id = start_id;
loop {
let parent_node = self.get_parent_node(id);
- if parent_node == 0 {
- return Ok(0);
+ if parent_node == CRATE_NODE_ID {
+ return Ok(CRATE_NODE_ID);
}
if parent_node == id {
return Err(id);
}
}
- pub fn expect_struct(&self, id: NodeId) -> &'ast VariantData {
+ pub fn expect_variant_data(&self, id: NodeId) -> &'ast VariantData {
match self.find(id) {
Some(NodeItem(i)) => {
match i.node {
- ItemStruct(ref struct_def, _) => struct_def,
- _ => bug!("struct ID bound to non-struct")
+ ItemStruct(ref struct_def, _) |
+ ItemUnion(ref struct_def, _) => struct_def,
+ _ => {
+ bug!("struct ID bound to non-struct {}",
+ self.node_to_string(id));
+ }
}
}
- Some(NodeVariant(variant)) => {
- if variant.node.data.is_struct() {
- &variant.node.data
- } else {
- bug!("struct ID bound to enum variant that isn't struct-like")
- }
+ Some(NodeStructCtor(data)) => data,
+ Some(NodeVariant(variant)) => &variant.node.data,
+ _ => {
+ bug!("expected struct or variant, found {}",
+ self.node_to_string(id));
}
- _ => bug!("expected struct, found {}", self.node_to_string(id)),
}
}
map: self,
item_name: parts.last().unwrap(),
in_which: &parts[..parts.len() - 1],
- idx: 0,
+ idx: CRATE_NODE_ID,
}
}
fn next(&mut self) -> Option<NodeId> {
loop {
let idx = self.idx;
- if idx as usize >= self.map.entry_count() {
+ if idx.as_usize() >= self.map.entry_count() {
return None;
}
- self.idx += 1;
+ self.idx = NodeId::from_u32(self.idx.as_u32() + 1);
let name = match self.map.find_entry(idx) {
Some(EntryItem(_, n)) => n.name(),
Some(EntryForeignItem(_, n))=> n.name(),
impl Named for TraitItem { fn name(&self) -> Name { self.name } }
impl Named for ImplItem { fn name(&self) -> Name { self.name } }
-pub trait FoldOps {
- fn new_id(&self, id: NodeId) -> NodeId {
- id
- }
- fn new_def_id(&self, def_id: DefId) -> DefId {
- def_id
- }
- fn new_span(&self, span: Span) -> Span {
- span
- }
-}
-
-/// A Folder that updates IDs and Span's according to fold_ops.
-pub struct IdAndSpanUpdater<F> {
- fold_ops: F,
- min_id_assigned: NodeId,
- max_id_assigned: NodeId,
-}
-
-impl<F: FoldOps> IdAndSpanUpdater<F> {
- pub fn new(fold_ops: F) -> IdAndSpanUpdater<F> {
- IdAndSpanUpdater {
- fold_ops: fold_ops,
- min_id_assigned: ::std::u32::MAX,
- max_id_assigned: ::std::u32::MIN,
- }
- }
-
- pub fn id_range(&self) -> intravisit::IdRange {
- intravisit::IdRange {
- min: self.min_id_assigned,
- max: self.max_id_assigned + 1,
- }
- }
-}
-
-impl<F: FoldOps> Folder for IdAndSpanUpdater<F> {
- fn new_id(&mut self, id: NodeId) -> NodeId {
- let id = self.fold_ops.new_id(id);
-
- self.min_id_assigned = cmp::min(self.min_id_assigned, id);
- self.max_id_assigned = cmp::max(self.max_id_assigned, id);
-
- id
- }
-
- fn new_span(&mut self, span: Span) -> Span {
- self.fold_ops.new_span(span)
- }
-}
-
pub fn map_crate<'ast>(forest: &'ast mut Forest,
definitions: Definitions)
-> Map<'ast> {
entries, vector_length, (entries as f64 / vector_length as f64) * 100.);
}
- let local_node_id_watermark = map.len() as NodeId;
+ let local_node_id_watermark = NodeId::new(map.len());
let local_def_id_watermark = definitions.len();
Map {
/// Used for items loaded from external crate that are being inlined into this
/// crate.
-pub fn map_decoded_item<'ast, F: FoldOps>(map: &Map<'ast>,
- parent_def_path: DefPath,
- parent_def_id: DefId,
- ii: InlinedItem,
- fold_ops: F)
- -> &'ast InlinedItem {
+pub fn map_decoded_item<'ast>(map: &Map<'ast>,
+ parent_def_path: DefPath,
+ parent_def_id: DefId,
+ ii: InlinedItem,
+ ii_parent_id: NodeId)
+ -> &'ast InlinedItem {
let _ignore = map.forest.dep_graph.in_ignore();
- let mut fld = IdAndSpanUpdater::new(fold_ops);
- let ii = match ii {
- II::Item(d, i) => II::Item(fld.fold_ops.new_def_id(d),
- i.map(|i| fld.fold_item(i))),
- II::TraitItem(d, ti) => {
- II::TraitItem(fld.fold_ops.new_def_id(d),
- ti.map(|ti| fld.fold_trait_item(ti)))
- }
- II::ImplItem(d, ii) => {
- II::ImplItem(fld.fold_ops.new_def_id(d),
- ii.map(|ii| fld.fold_impl_item(ii)))
- }
- };
-
let ii = map.forest.inlined_items.alloc(ii);
- let ii_parent_id = fld.new_id(DUMMY_NODE_ID);
-
- // Assert that the ii_parent_id is the last NodeId in our reserved range
- assert!(ii_parent_id == fld.max_id_assigned);
- // Assert that we did not violate the invariant that all inlined HIR items
- // have NodeIds greater than or equal to `local_node_id_watermark`
- assert!(fld.min_id_assigned >= map.local_node_id_watermark);
let defs = &mut *map.definitions.borrow_mut();
let mut def_collector = DefCollector::extend(ii_parent_id,
pub mod check_attr;
pub mod def;
pub mod def_id;
-pub mod fold;
pub mod intravisit;
pub mod lowering;
pub mod map;
PatKind::Path(..) |
PatKind::Struct(..) => {
match dm.get(&p.id) {
- Some(&PathResolution { base_def: Def::Variant(_, id), .. }) => {
+ Some(&PathResolution { base_def: Def::Variant(id), .. }) => {
variants.push(id);
}
_ => ()
return;
}
- let requested_node: Option<ast::NodeId> = env::var("RUST_REGION_GRAPH_NODE")
- .ok()
- .and_then(|s| s.parse().ok());
+ let requested_node = env::var("RUST_REGION_GRAPH_NODE")
+ .ok().and_then(|s| s.parse().map(ast::NodeId::new).ok());
if requested_node.is_some() && requested_node != Some(subject_node) {
return;
extern crate getopts;
extern crate graphviz;
extern crate libc;
-extern crate rbml;
extern crate rustc_llvm as llvm;
extern crate rustc_back;
extern crate rustc_data_structures;
// probably get a better home if someone can find one.
use hir::def::{self, Def};
-use hir::def_id::{DefId, DefIndex};
+use hir::def_id::{CrateNum, DefId, DefIndex};
use hir::map as hir_map;
-use hir::map::definitions::DefKey;
+use hir::map::definitions::{Definitions, DefKey};
use hir::svh::Svh;
use middle::lang_items;
-use ty::{self, Ty, TyCtxt, VariantKind};
+use ty::{self, Ty, TyCtxt};
use mir::repr::Mir;
use mir::mir_map::MirMap;
use session::Session;
-use session::config::PanicStrategy;
use session::search_paths::PathKind;
-use util::nodemap::{FnvHashMap, NodeSet, DefIdMap};
-use std::rc::Rc;
+use util::nodemap::{NodeSet, DefIdMap};
use std::path::PathBuf;
+use std::rc::Rc;
use syntax::ast;
use syntax::attr;
-use syntax::ext::base::LoadedMacro;
+use syntax::ext::base::MultiItemModifier;
use syntax::ptr::P;
use syntax::parse::token::InternedString;
use syntax_pos::Span;
use rustc_back::target::Target;
use hir;
use hir::intravisit::Visitor;
+use rustc_back::PanicStrategy;
-pub use self::DefLike::{DlDef, DlField, DlImpl};
pub use self::NativeLibraryKind::{NativeStatic, NativeFramework, NativeUnknown};
// lonely orphan structs and enums looking for a better home
pub struct CrateSource {
pub dylib: Option<(PathBuf, PathKind)>,
pub rlib: Option<(PathBuf, PathKind)>,
- pub cnum: ast::CrateNum,
+ pub cnum: CrateNum,
}
-#[derive(Copy, Debug, PartialEq, Clone)]
+#[derive(Copy, Debug, PartialEq, Clone, RustcEncodable, RustcDecodable)]
pub enum LinkagePreference {
RequireDynamic,
RequireStatic,
}
-enum_from_u32! {
- #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
- pub enum NativeLibraryKind {
- NativeStatic, // native static library (.a archive)
- NativeFramework, // OSX-specific
- NativeUnknown, // default way to specify a dynamic library
- }
-}
-
-// Something that a name can resolve to.
-#[derive(Copy, Clone, Debug)]
-pub enum DefLike {
- DlDef(Def),
- DlImpl(DefId),
- DlField
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
+pub enum NativeLibraryKind {
+ NativeStatic, // native static library (.a archive)
+ NativeFramework, // OSX-specific
+ NativeUnknown, // default way to specify a dynamic library
}
/// The data we save and restore about an inlined item or method. This is not
}
/// A borrowed version of `hir::InlinedItem`.
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, Hash, Debug)]
pub enum InlinedItemRef<'a> {
Item(DefId, &'a hir::Item),
TraitItem(DefId, &'a hir::TraitItem),
ImplItem(DefId, &'a hir::ImplItem)
}
-/// Item definitions in the currently-compiled crate would have the CrateNum
-/// LOCAL_CRATE in their DefId.
-pub const LOCAL_CRATE: ast::CrateNum = 0;
-
-#[derive(Copy, Clone)]
-pub struct ChildItem {
- pub def: DefLike,
- pub name: ast::Name,
- pub vis: ty::Visibility,
-}
-
#[derive(Copy, Clone, Debug)]
pub struct ExternCrate {
/// def_id of an `extern crate` in the current crate that caused
/// can be accessed.
pub trait CrateStore<'tcx> {
// item info
+ fn describe_def(&self, def: DefId) -> Option<Def>;
fn stability(&self, def: DefId) -> Option<attr::Stability>;
fn deprecation(&self, def: DefId) -> Option<attr::Deprecation>;
fn visibility(&self, def: DefId) -> ty::Visibility;
fn closure_ty<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
-> ty::ClosureTy<'tcx>;
fn item_variances(&self, def: DefId) -> Vec<ty::Variance>;
- fn repr_attrs(&self, def: DefId) -> Vec<attr::ReprAttr>;
fn item_type<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> Ty<'tcx>;
fn visible_parent_map<'a>(&'a self) -> ::std::cell::RefMut<'a, DefIdMap<DefId>>;
- fn item_name(&self, def: DefId) -> ast::Name;
- fn opt_item_name(&self, def: DefId) -> Option<ast::Name>;
fn item_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> ty::GenericPredicates<'tcx>;
fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> ty::GenericPredicates<'tcx>;
fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
- -> &'tcx ty::Generics<'tcx>;
+ -> ty::Generics<'tcx>;
fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute>;
fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)-> ty::TraitDef<'tcx>;
fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx>;
- fn method_arg_names(&self, did: DefId) -> Vec<String>;
+ fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>;
fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId>;
// trait info
- fn implementations_of_trait(&self, def_id: DefId) -> Vec<DefId>;
- fn provided_trait_methods<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
- -> Vec<Rc<ty::Method<'tcx>>>;
- fn trait_item_def_ids(&self, def: DefId)
- -> Vec<ty::ImplOrTraitItemId>;
+ fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>;
// impl info
- fn impl_items(&self, impl_def_id: DefId) -> Vec<ty::ImplOrTraitItemId>;
+ fn impl_or_trait_items(&self, def_id: DefId) -> Vec<DefId>;
fn impl_trait_ref<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> Option<ty::TraitRef<'tcx>>;
- fn impl_polarity(&self, def: DefId) -> Option<hir::ImplPolarity>;
+ fn impl_polarity(&self, def: DefId) -> hir::ImplPolarity;
fn custom_coerce_unsized_kind(&self, def: DefId)
-> Option<ty::adjustment::CustomCoerceUnsized>;
- fn associated_consts<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
- -> Vec<Rc<ty::AssociatedConst<'tcx>>>;
fn impl_parent(&self, impl_def_id: DefId) -> Option<DefId>;
// trait/impl-item info
// flags
fn is_const_fn(&self, did: DefId) -> bool;
fn is_defaulted_trait(&self, did: DefId) -> bool;
- fn is_impl(&self, did: DefId) -> bool;
fn is_default_impl(&self, impl_did: DefId) -> bool;
fn is_extern_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId) -> bool;
fn is_foreign_item(&self, did: DefId) -> bool;
fn is_statically_included_foreign_item(&self, id: ast::NodeId) -> bool;
- fn is_typedef(&self, did: DefId) -> bool;
// crate metadata
- fn dylib_dependency_formats(&self, cnum: ast::CrateNum)
- -> Vec<(ast::CrateNum, LinkagePreference)>;
- fn lang_items(&self, cnum: ast::CrateNum) -> Vec<(DefIndex, usize)>;
- fn missing_lang_items(&self, cnum: ast::CrateNum) -> Vec<lang_items::LangItem>;
- fn is_staged_api(&self, cnum: ast::CrateNum) -> bool;
- fn is_explicitly_linked(&self, cnum: ast::CrateNum) -> bool;
- fn is_allocator(&self, cnum: ast::CrateNum) -> bool;
- fn is_panic_runtime(&self, cnum: ast::CrateNum) -> bool;
- fn is_compiler_builtins(&self, cnum: ast::CrateNum) -> bool;
- fn panic_strategy(&self, cnum: ast::CrateNum) -> PanicStrategy;
- fn extern_crate(&self, cnum: ast::CrateNum) -> Option<ExternCrate>;
- fn crate_attrs(&self, cnum: ast::CrateNum) -> Vec<ast::Attribute>;
+ fn dylib_dependency_formats(&self, cnum: CrateNum)
+ -> Vec<(CrateNum, LinkagePreference)>;
+ fn lang_items(&self, cnum: CrateNum) -> Vec<(DefIndex, usize)>;
+ fn missing_lang_items(&self, cnum: CrateNum) -> Vec<lang_items::LangItem>;
+ fn is_staged_api(&self, cnum: CrateNum) -> bool;
+ fn is_explicitly_linked(&self, cnum: CrateNum) -> bool;
+ fn is_allocator(&self, cnum: CrateNum) -> bool;
+ fn is_panic_runtime(&self, cnum: CrateNum) -> bool;
+ fn is_compiler_builtins(&self, cnum: CrateNum) -> bool;
+ fn panic_strategy(&self, cnum: CrateNum) -> PanicStrategy;
+ fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate>;
/// The name of the crate as it is referred to in source code of the current
/// crate.
- fn crate_name(&self, cnum: ast::CrateNum) -> InternedString;
+ fn crate_name(&self, cnum: CrateNum) -> InternedString;
/// The name of the crate as it is stored in the crate's metadata.
- fn original_crate_name(&self, cnum: ast::CrateNum) -> InternedString;
- fn crate_hash(&self, cnum: ast::CrateNum) -> Svh;
- fn crate_disambiguator(&self, cnum: ast::CrateNum) -> InternedString;
- fn crate_struct_field_attrs(&self, cnum: ast::CrateNum)
- -> FnvHashMap<DefId, Vec<ast::Attribute>>;
- fn plugin_registrar_fn(&self, cnum: ast::CrateNum) -> Option<DefId>;
- fn native_libraries(&self, cnum: ast::CrateNum) -> Vec<(NativeLibraryKind, String)>;
- fn reachable_ids(&self, cnum: ast::CrateNum) -> Vec<DefId>;
- fn is_no_builtins(&self, cnum: ast::CrateNum) -> bool;
+ fn original_crate_name(&self, cnum: CrateNum) -> InternedString;
+ fn crate_hash(&self, cnum: CrateNum) -> Svh;
+ fn crate_disambiguator(&self, cnum: CrateNum) -> InternedString;
+ fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>;
+ fn native_libraries(&self, cnum: CrateNum) -> Vec<(NativeLibraryKind, String)>;
+ fn reachable_ids(&self, cnum: CrateNum) -> Vec<DefId>;
+ fn is_no_builtins(&self, cnum: CrateNum) -> bool;
// resolve
fn def_index_for_def_key(&self,
- cnum: ast::CrateNum,
+ cnum: CrateNum,
def: DefKey)
-> Option<DefIndex>;
fn def_key(&self, def: DefId) -> hir_map::DefKey;
fn relative_def_path(&self, def: DefId) -> Option<hir_map::DefPath>;
- fn variant_kind(&self, def_id: DefId) -> Option<VariantKind>;
+ fn variant_kind(&self, def_id: DefId) -> Option<ty::VariantKind>;
fn struct_ctor_def_id(&self, struct_def_id: DefId) -> Option<DefId>;
- fn tuple_struct_definition_if_ctor(&self, did: DefId) -> Option<DefId>;
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name>;
- fn item_children(&self, did: DefId) -> Vec<ChildItem>;
- fn crate_top_level_items(&self, cnum: ast::CrateNum) -> Vec<ChildItem>;
+ fn item_children(&self, did: DefId) -> Vec<def::Export>;
// misc. metadata
fn maybe_get_item_ast<'a>(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
// This is basically a 1-based range of ints, which is a little
// silly - I may fix that.
- fn crates(&self) -> Vec<ast::CrateNum>;
+ fn crates(&self) -> Vec<CrateNum>;
fn used_libraries(&self) -> Vec<(String, NativeLibraryKind)>;
fn used_link_args(&self) -> Vec<String>;
// utility functions
fn metadata_filename(&self) -> &str;
fn metadata_section_name(&self, target: &Target) -> &str;
- fn encode_type<'a>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>,
- def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String)
- -> Vec<u8>;
- fn used_crates(&self, prefer: LinkagePreference) -> Vec<(ast::CrateNum, Option<PathBuf>)>;
- fn used_crate_source(&self, cnum: ast::CrateNum) -> CrateSource;
- fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<ast::CrateNum>;
+ fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, Option<PathBuf>)>;
+ fn used_crate_source(&self, cnum: CrateNum) -> CrateSource;
+ fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum>;
fn encode_metadata<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
reexports: &def::ExportMap,
link_meta: &LinkMeta,
reachable: &NodeSet,
- mir_map: &MirMap<'tcx>,
- krate: &hir::Crate) -> Vec<u8>;
+ mir_map: &MirMap<'tcx>) -> Vec<u8>;
fn metadata_encoding_version(&self) -> &[u8];
}
#[allow(unused_variables)]
impl<'tcx> CrateStore<'tcx> for DummyCrateStore {
// item info
+ fn describe_def(&self, def: DefId) -> Option<Def> { bug!("describe_def") }
fn stability(&self, def: DefId) -> Option<attr::Stability> { bug!("stability") }
fn deprecation(&self, def: DefId) -> Option<attr::Deprecation> { bug!("deprecation") }
fn visibility(&self, def: DefId) -> ty::Visibility { bug!("visibility") }
- fn closure_kind(&self, def_id: DefId) -> ty::ClosureKind { bug!("closure_kind") }
+ fn closure_kind(&self, def_id: DefId) -> ty::ClosureKind { bug!("closure_kind") }
fn closure_ty<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
-> ty::ClosureTy<'tcx> { bug!("closure_ty") }
fn item_variances(&self, def: DefId) -> Vec<ty::Variance> { bug!("item_variances") }
- fn repr_attrs(&self, def: DefId) -> Vec<attr::ReprAttr> { bug!("repr_attrs") }
fn item_type<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> Ty<'tcx> { bug!("item_type") }
fn visible_parent_map<'a>(&'a self) -> ::std::cell::RefMut<'a, DefIdMap<DefId>> {
bug!("visible_parent_map")
}
- fn item_name(&self, def: DefId) -> ast::Name { bug!("item_name") }
- fn opt_item_name(&self, def: DefId) -> Option<ast::Name> { bug!("opt_item_name") }
fn item_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> ty::GenericPredicates<'tcx> { bug!("item_predicates") }
fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> ty::GenericPredicates<'tcx> { bug!("item_super_predicates") }
fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
- -> &'tcx ty::Generics<'tcx> { bug!("item_generics") }
+ -> ty::Generics<'tcx> { bug!("item_generics") }
fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute> { bug!("item_attrs") }
fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)-> ty::TraitDef<'tcx>
{ bug!("trait_def") }
fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx>
{ bug!("adt_def") }
- fn method_arg_names(&self, did: DefId) -> Vec<String> { bug!("method_arg_names") }
+ fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name> { bug!("fn_arg_names") }
fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId> { vec![] }
// trait info
- fn implementations_of_trait(&self, def_id: DefId) -> Vec<DefId> { vec![] }
- fn provided_trait_methods<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
- -> Vec<Rc<ty::Method<'tcx>>> { bug!("provided_trait_methods") }
- fn trait_item_def_ids(&self, def: DefId)
- -> Vec<ty::ImplOrTraitItemId> { bug!("trait_item_def_ids") }
+ fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId> { vec![] }
fn def_index_for_def_key(&self,
- cnum: ast::CrateNum,
+ cnum: CrateNum,
def: DefKey)
-> Option<DefIndex> {
None
}
// impl info
- fn impl_items(&self, impl_def_id: DefId) -> Vec<ty::ImplOrTraitItemId>
- { bug!("impl_items") }
+ fn impl_or_trait_items(&self, def_id: DefId) -> Vec<DefId>
+ { bug!("impl_or_trait_items") }
fn impl_trait_ref<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> Option<ty::TraitRef<'tcx>> { bug!("impl_trait_ref") }
- fn impl_polarity(&self, def: DefId) -> Option<hir::ImplPolarity> { bug!("impl_polarity") }
+ fn impl_polarity(&self, def: DefId) -> hir::ImplPolarity { bug!("impl_polarity") }
fn custom_coerce_unsized_kind(&self, def: DefId)
-> Option<ty::adjustment::CustomCoerceUnsized>
{ bug!("custom_coerce_unsized_kind") }
- fn associated_consts<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
- -> Vec<Rc<ty::AssociatedConst<'tcx>>> { bug!("associated_consts") }
fn impl_parent(&self, def: DefId) -> Option<DefId> { bug!("impl_parent") }
// trait/impl-item info
// flags
fn is_const_fn(&self, did: DefId) -> bool { bug!("is_const_fn") }
fn is_defaulted_trait(&self, did: DefId) -> bool { bug!("is_defaulted_trait") }
- fn is_impl(&self, did: DefId) -> bool { bug!("is_impl") }
fn is_default_impl(&self, impl_did: DefId) -> bool { bug!("is_default_impl") }
fn is_extern_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId) -> bool
{ bug!("is_extern_item") }
fn is_foreign_item(&self, did: DefId) -> bool { bug!("is_foreign_item") }
fn is_statically_included_foreign_item(&self, id: ast::NodeId) -> bool { false }
- fn is_typedef(&self, did: DefId) -> bool { bug!("is_typedef") }
// crate metadata
- fn dylib_dependency_formats(&self, cnum: ast::CrateNum)
- -> Vec<(ast::CrateNum, LinkagePreference)>
+ fn dylib_dependency_formats(&self, cnum: CrateNum)
+ -> Vec<(CrateNum, LinkagePreference)>
{ bug!("dylib_dependency_formats") }
- fn lang_items(&self, cnum: ast::CrateNum) -> Vec<(DefIndex, usize)>
+ fn lang_items(&self, cnum: CrateNum) -> Vec<(DefIndex, usize)>
{ bug!("lang_items") }
- fn missing_lang_items(&self, cnum: ast::CrateNum) -> Vec<lang_items::LangItem>
+ fn missing_lang_items(&self, cnum: CrateNum) -> Vec<lang_items::LangItem>
{ bug!("missing_lang_items") }
- fn is_staged_api(&self, cnum: ast::CrateNum) -> bool { bug!("is_staged_api") }
- fn is_explicitly_linked(&self, cnum: ast::CrateNum) -> bool { bug!("is_explicitly_linked") }
- fn is_allocator(&self, cnum: ast::CrateNum) -> bool { bug!("is_allocator") }
- fn is_panic_runtime(&self, cnum: ast::CrateNum) -> bool { bug!("is_panic_runtime") }
- fn is_compiler_builtins(&self, cnum: ast::CrateNum) -> bool { bug!("is_compiler_builtins") }
- fn panic_strategy(&self, cnum: ast::CrateNum) -> PanicStrategy {
+ fn is_staged_api(&self, cnum: CrateNum) -> bool { bug!("is_staged_api") }
+ fn is_explicitly_linked(&self, cnum: CrateNum) -> bool { bug!("is_explicitly_linked") }
+ fn is_allocator(&self, cnum: CrateNum) -> bool { bug!("is_allocator") }
+ fn is_panic_runtime(&self, cnum: CrateNum) -> bool { bug!("is_panic_runtime") }
+ fn is_compiler_builtins(&self, cnum: CrateNum) -> bool { bug!("is_compiler_builtins") }
+ fn panic_strategy(&self, cnum: CrateNum) -> PanicStrategy {
bug!("panic_strategy")
}
- fn extern_crate(&self, cnum: ast::CrateNum) -> Option<ExternCrate> { bug!("extern_crate") }
- fn crate_attrs(&self, cnum: ast::CrateNum) -> Vec<ast::Attribute>
- { bug!("crate_attrs") }
- fn crate_name(&self, cnum: ast::CrateNum) -> InternedString { bug!("crate_name") }
- fn original_crate_name(&self, cnum: ast::CrateNum) -> InternedString {
+ fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate> { bug!("extern_crate") }
+ fn crate_name(&self, cnum: CrateNum) -> InternedString { bug!("crate_name") }
+ fn original_crate_name(&self, cnum: CrateNum) -> InternedString {
bug!("original_crate_name")
}
- fn crate_hash(&self, cnum: ast::CrateNum) -> Svh { bug!("crate_hash") }
- fn crate_disambiguator(&self, cnum: ast::CrateNum)
+ fn crate_hash(&self, cnum: CrateNum) -> Svh { bug!("crate_hash") }
+ fn crate_disambiguator(&self, cnum: CrateNum)
-> InternedString { bug!("crate_disambiguator") }
- fn crate_struct_field_attrs(&self, cnum: ast::CrateNum)
- -> FnvHashMap<DefId, Vec<ast::Attribute>>
- { bug!("crate_struct_field_attrs") }
- fn plugin_registrar_fn(&self, cnum: ast::CrateNum) -> Option<DefId>
+ fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>
{ bug!("plugin_registrar_fn") }
- fn native_libraries(&self, cnum: ast::CrateNum) -> Vec<(NativeLibraryKind, String)>
+ fn native_libraries(&self, cnum: CrateNum) -> Vec<(NativeLibraryKind, String)>
{ bug!("native_libraries") }
- fn reachable_ids(&self, cnum: ast::CrateNum) -> Vec<DefId> { bug!("reachable_ids") }
- fn is_no_builtins(&self, cnum: ast::CrateNum) -> bool { bug!("is_no_builtins") }
+ fn reachable_ids(&self, cnum: CrateNum) -> Vec<DefId> { bug!("reachable_ids") }
+ fn is_no_builtins(&self, cnum: CrateNum) -> bool { bug!("is_no_builtins") }
// resolve
fn def_key(&self, def: DefId) -> hir_map::DefKey { bug!("def_key") }
fn relative_def_path(&self, def: DefId) -> Option<hir_map::DefPath> {
bug!("relative_def_path")
}
- fn variant_kind(&self, def_id: DefId) -> Option<VariantKind> { bug!("variant_kind") }
+ fn variant_kind(&self, def_id: DefId) -> Option<ty::VariantKind> { bug!("variant_kind") }
fn struct_ctor_def_id(&self, struct_def_id: DefId) -> Option<DefId>
{ bug!("struct_ctor_def_id") }
- fn tuple_struct_definition_if_ctor(&self, did: DefId) -> Option<DefId>
- { bug!("tuple_struct_definition_if_ctor") }
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name> { bug!("struct_field_names") }
- fn item_children(&self, did: DefId) -> Vec<ChildItem> { bug!("item_children") }
- fn crate_top_level_items(&self, cnum: ast::CrateNum) -> Vec<ChildItem>
- { bug!("crate_top_level_items") }
+ fn item_children(&self, did: DefId) -> Vec<def::Export> { bug!("item_children") }
// misc. metadata
fn maybe_get_item_ast<'a>(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
// This is basically a 1-based range of ints, which is a little
// silly - I may fix that.
- fn crates(&self) -> Vec<ast::CrateNum> { vec![] }
+ fn crates(&self) -> Vec<CrateNum> { vec![] }
fn used_libraries(&self) -> Vec<(String, NativeLibraryKind)> { vec![] }
fn used_link_args(&self) -> Vec<String> { vec![] }
// utility functions
fn metadata_filename(&self) -> &str { bug!("metadata_filename") }
fn metadata_section_name(&self, target: &Target) -> &str { bug!("metadata_section_name") }
- fn encode_type<'a>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>,
- def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String)
- -> Vec<u8> {
- bug!("encode_type")
- }
- fn used_crates(&self, prefer: LinkagePreference) -> Vec<(ast::CrateNum, Option<PathBuf>)>
+ fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, Option<PathBuf>)>
{ vec![] }
- fn used_crate_source(&self, cnum: ast::CrateNum) -> CrateSource { bug!("used_crate_source") }
- fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<ast::CrateNum> { None }
+ fn used_crate_source(&self, cnum: CrateNum) -> CrateSource { bug!("used_crate_source") }
+ fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum> { None }
fn encode_metadata<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
reexports: &def::ExportMap,
link_meta: &LinkMeta,
reachable: &NodeSet,
- mir_map: &MirMap<'tcx>,
- krate: &hir::Crate) -> Vec<u8> { vec![] }
+ mir_map: &MirMap<'tcx>) -> Vec<u8> { vec![] }
fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") }
}
-pub trait MacroLoader {
- fn load_crate(&mut self, extern_crate: &ast::Item, allows_macros: bool) -> Vec<LoadedMacro>;
+pub enum LoadedMacro {
+ Def(ast::MacroDef),
+ CustomDerive(String, Rc<MultiItemModifier>),
}
-/// Metadata encoding and decoding can make use of thread-local encoding and
-/// decoding contexts. These allow implementers of serialize::Encodable and
-/// Decodable to access information and datastructures that would otherwise not
-/// be available to them. For example, we can automatically translate def-id and
-/// span information during decoding because the decoding context knows which
-/// crate the data is decoded from. Or it allows to make ty::Ty decodable
-/// because the context has access to the TyCtxt that is needed for creating
-/// ty::Ty instances.
-///
-/// Note, however, that this only works for RBML-based encoding and decoding at
-/// the moment.
-pub mod tls {
- use rbml::opaque::Encoder as OpaqueEncoder;
- use rbml::opaque::Decoder as OpaqueDecoder;
- use serialize;
- use std::cell::Cell;
- use std::mem;
- use ty::{self, Ty, TyCtxt};
- use ty::subst::Substs;
- use hir::def_id::DefId;
-
- pub trait EncodingContext<'tcx> {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx>;
- fn encode_ty(&self, encoder: &mut OpaqueEncoder, t: Ty<'tcx>);
- fn encode_substs(&self, encoder: &mut OpaqueEncoder, substs: &Substs<'tcx>);
- }
-
- /// Marker type used for the TLS slot.
- /// The type context cannot be used directly because the TLS
- /// in libstd doesn't allow types generic over lifetimes.
- struct TlsPayload;
-
- thread_local! {
- static TLS_ENCODING: Cell<Option<*const TlsPayload>> = Cell::new(None)
- }
-
- /// Execute f after pushing the given EncodingContext onto the TLS stack.
- pub fn enter_encoding_context<'tcx, F, R>(ecx: &EncodingContext<'tcx>,
- encoder: &mut OpaqueEncoder,
- f: F) -> R
- where F: FnOnce(&EncodingContext<'tcx>, &mut OpaqueEncoder) -> R
- {
- let tls_payload = (ecx as *const _, encoder as *mut _);
- let tls_ptr = &tls_payload as *const _ as *const TlsPayload;
- TLS_ENCODING.with(|tls| {
- let prev = tls.get();
- tls.set(Some(tls_ptr));
- let ret = f(ecx, encoder);
- tls.set(prev);
- return ret
- })
- }
-
- /// Execute f with access to the thread-local encoding context and
- /// rbml encoder. This function will panic if the encoder passed in and the
- /// context encoder are not the same.
- ///
- /// Note that this method is 'practically' safe due to its checking that the
- /// encoder passed in is the same as the one in TLS, but it would still be
- /// possible to construct cases where the EncodingContext is exchanged
- /// while the same encoder is used, thus working with a wrong context.
- pub fn with_encoding_context<'tcx, E, F, R>(encoder: &mut E, f: F) -> R
- where F: FnOnce(&EncodingContext<'tcx>, &mut OpaqueEncoder) -> R,
- E: serialize::Encoder
- {
- unsafe {
- unsafe_with_encoding_context(|ecx, tls_encoder| {
- assert!(encoder as *mut _ as usize == tls_encoder as *mut _ as usize);
-
- let ecx: &EncodingContext<'tcx> = mem::transmute(ecx);
-
- f(ecx, tls_encoder)
- })
- }
- }
-
- /// Execute f with access to the thread-local encoding context and
- /// rbml encoder.
- pub unsafe fn unsafe_with_encoding_context<F, R>(f: F) -> R
- where F: FnOnce(&EncodingContext, &mut OpaqueEncoder) -> R
- {
- TLS_ENCODING.with(|tls| {
- let tls = tls.get().unwrap();
- let tls_payload = tls as *mut (&EncodingContext, &mut OpaqueEncoder);
- f((*tls_payload).0, (*tls_payload).1)
- })
- }
-
- pub trait DecodingContext<'tcx> {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx>;
- fn decode_ty(&self, decoder: &mut OpaqueDecoder) -> ty::Ty<'tcx>;
- fn decode_substs(&self, decoder: &mut OpaqueDecoder) -> &'tcx Substs<'tcx>;
- fn translate_def_id(&self, def_id: DefId) -> DefId;
- }
-
- thread_local! {
- static TLS_DECODING: Cell<Option<*const TlsPayload>> = Cell::new(None)
- }
-
- /// Execute f after pushing the given DecodingContext onto the TLS stack.
- pub fn enter_decoding_context<'tcx, F, R>(dcx: &DecodingContext<'tcx>,
- decoder: &mut OpaqueDecoder,
- f: F) -> R
- where F: FnOnce(&DecodingContext<'tcx>, &mut OpaqueDecoder) -> R
- {
- let tls_payload = (dcx as *const _, decoder as *mut _);
- let tls_ptr = &tls_payload as *const _ as *const TlsPayload;
- TLS_DECODING.with(|tls| {
- let prev = tls.get();
- tls.set(Some(tls_ptr));
- let ret = f(dcx, decoder);
- tls.set(prev);
- return ret
- })
- }
-
- /// Execute f with access to the thread-local decoding context and
- /// rbml decoder. This function will panic if the decoder passed in and the
- /// context decoder are not the same.
- ///
- /// Note that this method is 'practically' safe due to its checking that the
- /// decoder passed in is the same as the one in TLS, but it would still be
- /// possible to construct cases where the DecodingContext is exchanged
- /// while the same decoder is used, thus working with a wrong context.
- pub fn with_decoding_context<'decoder, 'tcx, D, F, R>(d: &'decoder mut D, f: F) -> R
- where D: serialize::Decoder,
- F: FnOnce(&DecodingContext<'tcx>,
- &mut OpaqueDecoder) -> R,
- 'tcx: 'decoder
- {
- unsafe {
- unsafe_with_decoding_context(|dcx, decoder| {
- assert!((d as *mut _ as usize) == (decoder as *mut _ as usize));
-
- let dcx: &DecodingContext<'tcx> = mem::transmute(dcx);
-
- f(dcx, decoder)
- })
- }
- }
-
- /// Execute f with access to the thread-local decoding context and
- /// rbml decoder.
- pub unsafe fn unsafe_with_decoding_context<F, R>(f: F) -> R
- where F: FnOnce(&DecodingContext, &mut OpaqueDecoder) -> R
- {
- TLS_DECODING.with(|tls| {
- let tls = tls.get().unwrap();
- let tls_payload = tls as *mut (&DecodingContext, &mut OpaqueDecoder);
- f((*tls_payload).0, (*tls_payload).1)
- })
- }
+pub trait CrateLoader {
+ fn load_macros(&mut self, extern_crate: &ast::Item, allows_macros: bool) -> Vec<LoadedMacro>;
+ fn process_item(&mut self, item: &ast::Item, defs: &Definitions);
+ fn postprocess(&mut self, krate: &ast::Crate);
}
ps: &mut pprust::State,
node: pprust::AnnNode) -> io::Result<()> {
let id = match node {
- pprust::NodeName(_) => 0,
+ pprust::NodeName(_) => ast::CRATE_NODE_ID,
pprust::NodeExpr(expr) => expr.id,
pprust::NodeBlock(blk) => blk.id,
- pprust::NodeItem(_) | pprust::NodeSubItem(_) => 0,
+ pprust::NodeItem(_) | pprust::NodeSubItem(_) => ast::CRATE_NODE_ID,
pprust::NodePat(pat) => pat.id
};
_ if self.ignore_non_const_paths => (),
Def::PrimTy(_) => (),
Def::SelfTy(..) => (),
- Def::Variant(enum_id, variant_id) => {
- self.check_def_id(enum_id);
+ Def::Variant(variant_id) => {
+ if let Some(enum_id) = self.tcx.parent_def_id(variant_id) {
+ self.check_def_id(enum_id);
+ }
if !self.ignore_variant_stack.contains(&variant_id) {
self.check_def_id(variant_id);
}
// This is done to handle the case where, for example, the static
// method of a private type is used, but the type itself is never
// called directly.
- let impl_items = self.tcx.impl_items.borrow();
+ let impl_items = self.tcx.impl_or_trait_item_def_ids.borrow();
if let Some(impl_list) =
self.tcx.inherent_impls.borrow().get(&self.tcx.map.local_def_id(id)) {
for impl_did in impl_list.iter() {
- for item_did in impl_items.get(impl_did).unwrap().iter() {
- if let Some(item_node_id) =
- self.tcx.map.as_local_node_id(item_did.def_id()) {
+ for &item_did in &impl_items[impl_did][..] {
+ if let Some(item_node_id) = self.tcx.map.as_local_node_id(item_did) {
if self.live_symbols.contains(&item_node_id) {
return true;
}
//! Additionally, the algorithm is geared towards finding *any* solution rather
//! than finding a number of solutions (there are normally quite a few).
-use syntax::ast;
+use hir::def_id::CrateNum;
use session;
-use session::config::{self, PanicStrategy};
+use session::config;
use middle::cstore::LinkagePreference::{self, RequireStatic, RequireDynamic};
use util::nodemap::FnvHashMap;
+use rustc_back::PanicStrategy;
/// A list of dependencies for a certain crate type.
///
}
// Collect what we've got so far in the return vector.
- let last_crate = sess.cstore.crates().len() as ast::CrateNum;
+ let last_crate = sess.cstore.crates().len();
let mut ret = (1..last_crate+1).map(|cnum| {
- match formats.get(&cnum) {
+ match formats.get(&CrateNum::new(cnum)) {
Some(&RequireDynamic) => Linkage::Dynamic,
Some(&RequireStatic) => Linkage::IncludedFromDylib,
None => Linkage::NotLinked,
assert!(src.rlib.is_some());
info!("adding staticlib: {}", sess.cstore.crate_name(cnum));
add_library(sess, cnum, RequireStatic, &mut formats);
- ret[cnum as usize - 1] = Linkage::Static;
+ ret[cnum.as_usize() - 1] = Linkage::Static;
}
}
// For situations like this, we perform one last pass over the dependencies,
// making sure that everything is available in the requested format.
for (cnum, kind) in ret.iter().enumerate() {
- let cnum = (cnum + 1) as ast::CrateNum;
+ let cnum = CrateNum::new(cnum + 1);
let src = sess.cstore.used_crate_source(cnum);
match *kind {
Linkage::NotLinked |
}
fn add_library(sess: &session::Session,
- cnum: ast::CrateNum,
+ cnum: CrateNum,
link: LinkagePreference,
- m: &mut FnvHashMap<ast::CrateNum, LinkagePreference>) {
+ m: &mut FnvHashMap<CrateNum, LinkagePreference>) {
match m.get(&cnum) {
Some(&link2) => {
// If the linkages differ, then we'd have two copies of the library
// All crates are available in an rlib format, so we're just going to link
// everything in explicitly so long as it's actually required.
- let last_crate = sess.cstore.crates().len() as ast::CrateNum;
+ let last_crate = sess.cstore.crates().len();
let mut ret = (1..last_crate+1).map(|cnum| {
- if sess.cstore.is_explicitly_linked(cnum) {
+ if sess.cstore.is_explicitly_linked(CrateNum::new(cnum)) {
Linkage::Static
} else {
Linkage::NotLinked
// a required dependency) in one of the session's field. If this field is not
// set then this compilation doesn't actually need the dependency and we can
// also skip this step entirely.
-fn activate_injected_dep(injected: Option<ast::CrateNum>,
+fn activate_injected_dep(injected: Option<CrateNum>,
list: &mut DependencyList,
- replaces_injected: &Fn(ast::CrateNum) -> bool) {
+ replaces_injected: &Fn(CrateNum) -> bool) {
for (i, slot) in list.iter().enumerate() {
- let cnum = (i + 1) as ast::CrateNum;
+ let cnum = CrateNum::new(i + 1);
if !replaces_injected(cnum) {
continue
}
}
}
if let Some(injected) = injected {
- let idx = injected as usize - 1;
+ let idx = injected.as_usize() - 1;
assert_eq!(list[idx], Linkage::NotLinked);
list[idx] = Linkage::Static;
}
if let Linkage::NotLinked = *linkage {
continue
}
- let cnum = (i + 1) as ast::CrateNum;
+ let cnum = CrateNum::new(i + 1);
if sess.cstore.is_allocator(cnum) {
if let Some(prev) = allocator {
let prev_name = sess.cstore.crate_name(prev);
// only one, but we perform validation here that all the panic strategy
// compilation modes for the whole DAG are valid.
if let Some((cnum, found_strategy)) = panic_runtime {
- let desired_strategy = sess.opts.cg.panic.clone();
+ let desired_strategy = sess.panic_strategy();
// First up, validate that our selected panic runtime is indeed exactly
// our same strategy.
if desired_strategy == PanicStrategy::Abort {
continue
}
- let cnum = (i + 1) as ast::CrateNum;
+ let cnum = CrateNum::new(i + 1);
let found_strategy = sess.cstore.panic_strategy(cnum);
if desired_strategy == found_strategy {
continue
// the leaves of the pattern tree structure.
return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| {
match tcx.expect_def_or_none(pat.id) {
- Some(Def::Variant(enum_did, variant_did)) => {
+ Some(Def::Variant(variant_did)) => {
+ let enum_did = tcx.parent_def_id(variant_did).unwrap();
let downcast_cmt = if tcx.lookup_adt_def(enum_did).is_univariant() {
cmt_pat
} else {
self.tcx().with_freevars(closure_expr.id, |freevars| {
for freevar in freevars {
- let id_var = freevar.def.var_id();
+ let def_id = freevar.def.def_id();
+ let id_var = self.tcx().map.as_local_node_id(def_id).unwrap();
let upvar_id = ty::UpvarId { var_id: id_var,
closure_expr_id: closure_expr.id };
let upvar_capture = self.mc.infcx.upvar_capture(upvar_id).unwrap();
-> mc::McResult<mc::cmt<'tcx>> {
// Create the cmt for the variable being borrowed, from the
// caller's perspective
- let var_id = upvar_def.var_id();
+ let var_id = self.tcx().map.as_local_node_id(upvar_def.def_id()).unwrap();
let var_ty = self.mc.infcx.node_ty(var_id)?;
self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def)
}
}
};
- span_err!(self.infcx.tcx.sess, span, E0512,
+ struct_span_err!(self.infcx.tcx.sess, span, E0512,
"transmute called with differently sized types: \
{} ({}) to {} ({})",
from, skeleton_string(from, sk_from),
- to, skeleton_string(to, sk_to));
+ to, skeleton_string(to, sk_to))
+ .span_label(span,
+ &format!("transmuting between {} and {}",
+ skeleton_string(from, sk_from),
+ skeleton_string(to, sk_to)))
+ .emit();
}
}
enum_from_u32! {
- #[derive(Copy, Clone, PartialEq, Eq, Hash)]
+ #[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum LangItem {
$($variant,)*
}
let mut call_caps = Vec::new();
ir.tcx.with_freevars(expr.id, |freevars| {
for fv in freevars {
- if let Def::Local(_, rv) = fv.def {
+ if let Def::Local(def_id) = fv.def {
+ let rv = ir.tcx.map.as_local_node_id(def_id).unwrap();
let fv_ln = ir.add_live_node(FreeVarNode(fv.span));
call_caps.push(CaptureInfo {ln: fv_ln,
var_nid: rv});
fn access_path(&mut self, expr: &Expr, succ: LiveNode, acc: u32)
-> LiveNode {
match self.ir.tcx.expect_def(expr.id) {
- Def::Local(_, nid) => {
+ Def::Local(def_id) => {
+ let nid = self.ir.tcx.map.as_local_node_id(def_id).unwrap();
let ln = self.live_node(expr.id, expr.span);
if acc != 0 {
self.init_from_succ(ln, succ);
fn check_lvalue(&mut self, expr: &Expr) {
match expr.node {
hir::ExprPath(..) => {
- if let Def::Local(_, nid) = self.ir.tcx.expect_def(expr.id) {
+ if let Def::Local(def_id) = self.ir.tcx.expect_def(expr.id) {
// Assignment to an immutable variable or argument: only legal
// if there is no later assignment. If this local is actually
// mutable, then check for a reassignment to flag the mutability
// as being used.
+ let nid = self.ir.tcx.map.as_local_node_id(def_id).unwrap();
let ln = self.live_node(expr.id, expr.span);
let var = self.variable(nid, expr.span);
self.warn_about_dead_assign(expr.span, expr.id, ln, var);
Ok(self.cat_rvalue_node(id, span, expr_ty))
}
- Def::Mod(_) | Def::ForeignMod(_) |
+ Def::Mod(_) |
Def::Trait(_) | Def::Enum(..) | Def::TyAlias(..) | Def::PrimTy(_) |
Def::TyParam(..) |
Def::Label(_) | Def::SelfTy(..) |
}))
}
- Def::Upvar(_, var_id, _, fn_node_id) => {
+ Def::Upvar(def_id, _, fn_node_id) => {
+ let var_id = self.tcx().map.as_local_node_id(def_id).unwrap();
let ty = self.node_ty(fn_node_id)?;
match ty.sty {
ty::TyClosure(closure_id, _) => {
}
}
- Def::Local(_, vid) => {
+ Def::Local(def_id) => {
+ let vid = self.tcx().map.as_local_node_id(def_id).unwrap();
Ok(Rc::new(cmt_ {
id: id,
span: span,
// alone) because PatKind::Struct can also refer to variants.
let cmt = match self.tcx().expect_def_or_none(pat.id) {
Some(Def::Err) => return Err(()),
- Some(Def::Variant(enum_did, variant_did))
+ Some(Def::Variant(variant_did)) => {
// univariant enums do not need downcasts
- if !self.tcx().lookup_adt_def(enum_did).is_univariant() => {
+ let enum_did = self.tcx().parent_def_id(variant_did).unwrap();
+ if !self.tcx().lookup_adt_def(enum_did).is_univariant() {
self.cat_downcast(pat, cmt.clone(), cmt.ty, variant_did)
+ } else {
+ cmt
}
+ }
_ => cmt
};
match pat.node {
PatKind::TupleStruct(_, ref subpats, ddpos) => {
let expected_len = match self.tcx().expect_def(pat.id) {
- Def::Variant(enum_def, def_id) => {
+ Def::Variant(def_id) => {
+ let enum_def = self.tcx().parent_def_id(def_id).unwrap();
self.tcx().lookup_adt_def(enum_def).variant_with_id(def_id).fields.len()
}
Def::Struct(..) => {
use hir::map as ast_map;
use session::Session;
use util::nodemap::{FnvHashMap, NodeMap, NodeSet};
-use middle::cstore::InlinedItem;
use ty;
use std::cell::RefCell;
}
return maps;
}
-
-pub fn resolve_inlined_item(sess: &Session,
- region_maps: &RegionMaps,
- item: &InlinedItem) {
- let mut visitor = RegionResolutionVisitor {
- sess: sess,
- region_maps: region_maps,
- cx: Context {
- root_id: None,
- parent: ROOT_CODE_EXTENT,
- var_parent: ROOT_CODE_EXTENT
- },
- terminating_scopes: NodeSet()
- };
- item.visit(&mut visitor);
-}
if !self.trait_ref_hack || !trait_ref.bound_lifetimes.is_empty() {
if self.trait_ref_hack {
- println!("{:?}", trait_ref.span);
span_err!(self.sess, trait_ref.span, E0316,
"nested quantification of lifetimes");
}
use hir::map as hir_map;
use session::Session;
use lint;
-use middle::cstore::LOCAL_CRATE;
use hir::def::Def;
-use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
+use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, DefIndex, LOCAL_CRATE};
use ty::{self, TyCtxt, AdtKind};
use middle::privacy::AccessLevels;
use syntax::parse::token::InternedString;
depr_map: DefIdMap<Option<DeprecationEntry>>,
/// Maps for each crate whether it is part of the staged API.
- staged_api: FnvHashMap<ast::CrateNum, bool>
+ staged_api: FnvHashMap<CrateNum, bool>
}
// A private tree-walker for producing an Index.
&feature, &r),
None => format!("use of unstable library feature '{}'", &feature)
};
- emit_feature_err(&self.tcx.sess.parse_sess.span_diagnostic,
- &feature, span, GateIssue::Library(Some(issue)), &msg);
+ emit_feature_err(&self.tcx.sess.parse_sess, &feature, span,
+ GateIssue::Library(Some(issue)), &msg);
}
}
Some(&Stability { ref level, ref feature, .. }) => {
fn is_staged_api<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefId) -> bool {
match tcx.trait_item_of_item(id) {
- Some(ty::MethodTraitItemId(trait_method_id))
- if trait_method_id != id => {
- is_staged_api(tcx, trait_method_id)
- }
+ Some(trait_method_id) if trait_method_id != id => {
+ is_staged_api(tcx, trait_method_id)
+ }
_ => {
*tcx.stability.borrow_mut().staged_api.entry(id.krate).or_insert_with(
|| tcx.sess.cstore.is_staged_api(id.krate))
//! Validity checking for weak lang items
-use session::config::{self, PanicStrategy};
+use session::config;
use session::Session;
use middle::lang_items;
+use rustc_back::PanicStrategy;
use syntax::ast;
use syntax::parse::token::InternedString;
use syntax_pos::Span;
// symbols. Other panic runtimes ensure that the relevant symbols are
// available to link things together, but they're never exercised.
let mut whitelisted = HashSet::new();
- if sess.opts.cg.panic != PanicStrategy::Unwind {
+ if sess.panic_strategy() != PanicStrategy::Unwind {
whitelisted.insert(lang_items::EhPersonalityLangItem);
whitelisted.insert(lang_items::EhUnwindResumeLangItem);
}
/// Rvalues promoted from this function, such as borrows of constants.
/// Each of them is the Mir of a constant with the fn's type parameters
- /// in scope, but no vars or args and a separate set of temps.
+ /// in scope, but a separate set of locals.
pub promoted: IndexVec<Promoted, Mir<'tcx>>,
/// Return type of the function.
pub return_ty: Ty<'tcx>,
- /// Variables: these are stack slots corresponding to user variables. They may be
- /// assigned many times.
- pub var_decls: IndexVec<Var, VarDecl<'tcx>>,
-
- /// Args: these are stack slots corresponding to the input arguments.
- pub arg_decls: IndexVec<Arg, ArgDecl<'tcx>>,
+ /// Declarations of locals.
+ ///
+ /// The first local is the return value pointer, followed by `arg_count`
+ /// locals for the function arguments, followed by any user-declared
+ /// variables and temporaries.
+ pub local_decls: IndexVec<Local, LocalDecl<'tcx>>,
- /// Temp declarations: stack slots that for temporaries created by
- /// the compiler. These are assigned once, but they are not SSA
- /// values in that it is possible to borrow them and mutate them
- /// through the resulting reference.
- pub temp_decls: IndexVec<Temp, TempDecl<'tcx>>,
+ /// Number of arguments this function takes.
+ ///
+ /// Starting at local 1, `arg_count` locals will be provided by the caller
+ /// and can be assumed to be initialized.
+ ///
+ /// If this MIR was built for a constant, this will be 0.
+ pub arg_count: usize,
/// Names and capture modes of all the closure upvars, assuming
/// the first argument is either the closure or a reference to it.
pub upvar_decls: Vec<UpvarDecl>,
+ /// Mark an argument local (which must be a tuple) as getting passed as
+ /// its individual components at the LLVM level.
+ ///
+ /// This is used for the "rust-call" ABI.
+ pub spread_arg: Option<Local>,
+
/// A span representing this MIR, for error reporting
pub span: Span,
visibility_scopes: IndexVec<VisibilityScope, VisibilityScopeData>,
promoted: IndexVec<Promoted, Mir<'tcx>>,
return_ty: Ty<'tcx>,
- var_decls: IndexVec<Var, VarDecl<'tcx>>,
- arg_decls: IndexVec<Arg, ArgDecl<'tcx>>,
- temp_decls: IndexVec<Temp, TempDecl<'tcx>>,
+ local_decls: IndexVec<Local, LocalDecl<'tcx>>,
+ arg_count: usize,
upvar_decls: Vec<UpvarDecl>,
span: Span) -> Self
{
+ // We need `arg_count` locals, and one for the return pointer
+ assert!(local_decls.len() >= arg_count + 1,
+ "expected at least {} locals, got {}", arg_count + 1, local_decls.len());
+ assert_eq!(local_decls[RETURN_POINTER].ty, return_ty);
+
Mir {
basic_blocks: basic_blocks,
visibility_scopes: visibility_scopes,
promoted: promoted,
return_ty: return_ty,
- var_decls: var_decls,
- arg_decls: arg_decls,
- temp_decls: temp_decls,
+ local_decls: local_decls,
+ arg_count: arg_count,
upvar_decls: upvar_decls,
+ spread_arg: None,
span: span,
cache: Cache::new()
}
dominators(self)
}
- /// Maps locals (Arg's, Var's, Temp's and ReturnPointer, in that order)
- /// to their index in the whole list of locals. This is useful if you
- /// want to treat all locals the same instead of repeating yourself.
- pub fn local_index(&self, lvalue: &Lvalue<'tcx>) -> Option<Local> {
- let idx = match *lvalue {
- Lvalue::Arg(arg) => arg.index(),
- Lvalue::Var(var) => {
- self.arg_decls.len() +
- var.index()
- }
- Lvalue::Temp(temp) => {
- self.arg_decls.len() +
- self.var_decls.len() +
- temp.index()
+ #[inline]
+ pub fn local_kind(&self, local: Local) -> LocalKind {
+ let index = local.0 as usize;
+ if index == 0 {
+ debug_assert!(self.local_decls[local].mutability == Mutability::Mut,
+ "return pointer should be mutable");
+
+ LocalKind::ReturnPointer
+ } else if index < self.arg_count + 1 {
+ LocalKind::Arg
+ } else if self.local_decls[local].name.is_some() {
+ LocalKind::Var
+ } else {
+ debug_assert!(self.local_decls[local].mutability == Mutability::Mut,
+ "temp should be mutable");
+
+ LocalKind::Temp
+ }
+ }
+
+ /// Returns an iterator over all temporaries.
+ #[inline]
+ pub fn temps_iter<'a>(&'a self) -> impl Iterator<Item=Local> + 'a {
+ (self.arg_count+1..self.local_decls.len()).filter_map(move |index| {
+ let local = Local::new(index);
+ if self.local_decls[local].source_info.is_none() {
+ Some(local)
+ } else {
+ None
}
- Lvalue::ReturnPointer => {
- self.arg_decls.len() +
- self.var_decls.len() +
- self.temp_decls.len()
+ })
+ }
+
+ /// Returns an iterator over all user-declared locals.
+ #[inline]
+ pub fn vars_iter<'a>(&'a self) -> impl Iterator<Item=Local> + 'a {
+ (self.arg_count+1..self.local_decls.len()).filter_map(move |index| {
+ let local = Local::new(index);
+ if self.local_decls[local].source_info.is_none() {
+ None
+ } else {
+ Some(local)
}
- Lvalue::Static(_) |
- Lvalue::Projection(_) => return None
- };
- Some(Local::new(idx))
+ })
}
- /// Counts the number of locals, such that that local_index
- /// will always return an index smaller than this count.
- pub fn count_locals(&self) -> usize {
- self.arg_decls.len() +
- self.var_decls.len() +
- self.temp_decls.len() + 1
+ /// Returns an iterator over all function arguments.
+ #[inline]
+ pub fn args_iter(&self) -> impl Iterator<Item=Local> {
+ let arg_count = self.arg_count;
+ (1..arg_count+1).map(Local::new)
+ }
+
+ /// Returns an iterator over all user-defined variables and compiler-generated temporaries (all
+ /// locals that are neither arguments nor the return pointer).
+ #[inline]
+ pub fn vars_and_temps_iter(&self) -> impl Iterator<Item=Local> {
+ let arg_count = self.arg_count;
+ let local_count = self.local_decls.len();
+ (arg_count+1..local_count).map(Local::new)
+ }
+
+ /// Changes a statement to a nop. This is both faster than deleting instructions and avoids
+ /// invalidating statement indices in `Location`s.
+ pub fn make_statement_nop(&mut self, location: Location) {
+ let block = &mut self[location.block];
+ debug_assert!(location.statement_index < block.statements.len());
+ block.statements[location.statement_index].make_nop()
}
}
///////////////////////////////////////////////////////////////////////////
// Variables and temps
-/// A "variable" is a binding declared by the user as part of the fn
-/// decl, a let, etc.
-#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub struct VarDecl<'tcx> {
- /// `let mut x` vs `let x`
- pub mutability: Mutability,
-
- /// name that user gave the variable; not that, internally,
- /// mir references variables by index
- pub name: Name,
-
- /// type inferred for this variable (`let x: ty = ...`)
- pub ty: Ty<'tcx>,
+newtype_index!(Local, "_");
- /// source information (span, scope, etc.) for the declaration
- pub source_info: SourceInfo,
-}
+pub const RETURN_POINTER: Local = Local(0);
-/// A "temp" is a temporary that we place on the stack. They are
-/// anonymous, always mutable, and have only a type.
-#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub struct TempDecl<'tcx> {
- pub ty: Ty<'tcx>,
+/// Classifies locals into categories. See `Mir::local_kind`.
+#[derive(PartialEq, Eq, Debug)]
+pub enum LocalKind {
+ /// User-declared variable binding
+ Var,
+ /// Compiler-introduced temporary
+ Temp,
+ /// Function argument
+ Arg,
+ /// Location of function's return value
+ ReturnPointer,
}
-/// A "arg" is one of the function's formal arguments. These are
-/// anonymous and distinct from the bindings that the user declares.
+/// A MIR local.
///
-/// For example, in this function:
-///
-/// ```
-/// fn foo((x, y): (i32, u32)) { ... }
-/// ```
-///
-/// there is only one argument, of type `(i32, u32)`, but two bindings
-/// (`x` and `y`).
+/// This can be a binding declared by the user, a temporary inserted by the compiler, a function
+/// argument, or the return pointer.
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub struct ArgDecl<'tcx> {
+pub struct LocalDecl<'tcx> {
+ /// `let mut x` vs `let x`.
+ ///
+ /// Temporaries and the return pointer are always mutable.
+ pub mutability: Mutability,
+
+ /// Type of this local.
pub ty: Ty<'tcx>,
- /// If true, this argument is a tuple after monomorphization,
- /// and has to be collected from multiple actual arguments.
- pub spread: bool,
+ /// Name of the local, used in debuginfo and pretty-printing.
+ ///
+ /// Note that function arguments can also have this set to `Some(_)`
+ /// to generate better debuginfo.
+ pub name: Option<Name>,
- /// Either keywords::Invalid or the name of a single-binding
- /// pattern associated with this argument. Useful for debuginfo.
- pub debug_name: Name
+ /// For user-declared variables, stores their source information.
+ ///
+ /// For temporaries, this is `None`.
+ ///
+ /// This is the primary way to differentiate between user-declared
+ /// variables and compiler-generated temporaries.
+ pub source_info: Option<SourceInfo>,
+}
+
+impl<'tcx> LocalDecl<'tcx> {
+ /// Create a new `LocalDecl` for a temporary.
+ #[inline]
+ pub fn new_temp(ty: Ty<'tcx>) -> Self {
+ LocalDecl {
+ mutability: Mutability::Mut,
+ ty: ty,
+ name: None,
+ source_info: None,
+ }
+ }
+
+ /// Builds a `LocalDecl` for the return pointer.
+ ///
+ /// This must be inserted into the `local_decls` list as the first local.
+ #[inline]
+ pub fn new_return_pointer(return_ty: Ty) -> LocalDecl {
+ LocalDecl {
+ mutability: Mutability::Mut,
+ ty: return_ty,
+ source_info: None,
+ name: None, // FIXME maybe we do want some name here?
+ }
+ }
}
/// A closure capture, with its name and mode.
/// continue. Emitted by build::scope::diverge_cleanup.
Resume,
- /// Indicates a normal return. The ReturnPointer lvalue should
+ /// Indicates a normal return. The return pointer lvalue should
/// have been filled in by now. This should occur at most once.
Return,
pub kind: StatementKind<'tcx>,
}
+impl<'tcx> Statement<'tcx> {
+ /// Changes a statement to a nop. This is both faster than deleting instructions and avoids
+ /// invalidating statement indices in `Location`s.
+ pub fn make_nop(&mut self) {
+ self.kind = StatementKind::Nop
+ }
+}
+
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub enum StatementKind<'tcx> {
/// Write the RHS Rvalue to the LHS Lvalue.
/// End the current live range for the storage of the local.
StorageDead(Lvalue<'tcx>),
+
+ /// No-op. Useful for deleting instructions without affecting statement indices.
+ Nop,
}
impl<'tcx> Debug for Statement<'tcx> {
SetDiscriminant{lvalue: ref lv, variant_index: index} => {
write!(fmt, "discriminant({:?}) = {:?}", lv, index)
}
+ Nop => write!(fmt, "nop"),
}
}
}
///////////////////////////////////////////////////////////////////////////
// Lvalues
-newtype_index!(Var, "var");
-newtype_index!(Temp, "tmp");
-newtype_index!(Arg, "arg");
-newtype_index!(Local, "local");
-
/// A path to a value; something that can be evaluated without
/// changing or disturbing program state.
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum Lvalue<'tcx> {
- /// local variable declared by the user
- Var(Var),
-
- /// temporary introduced during lowering into MIR
- Temp(Temp),
-
- /// formal parameter of the function; note that these are NOT the
- /// bindings that the user declares, which are vars
- Arg(Arg),
+ /// local variable
+ Local(Local),
/// static or static mut variable
Static(DefId),
- /// the return pointer of the fn
- ReturnPointer,
-
/// projection out of an lvalue (access a field, deref a pointer, etc)
Projection(Box<LvalueProjection<'tcx>>),
}
use self::Lvalue::*;
match *self {
- Var(id) => write!(fmt, "{:?}", id),
- Arg(id) => write!(fmt, "{:?}", id),
- Temp(id) => write!(fmt, "{:?}", id),
+ Local(id) => write!(fmt, "{:?}", id),
Static(def_id) =>
write!(fmt, "{}", ty::tls::with(|tcx| tcx.item_path_str(def_id))),
- ReturnPointer =>
- write!(fmt, "return"),
Projection(ref data) =>
match data.elem {
ProjectionElem::Downcast(ref adt_def, index) =>
tcx.with_freevars(node_id, |freevars| {
for (freevar, lv) in freevars.iter().zip(lvs) {
- let var_name = tcx.local_var_name_str(freevar.def.var_id());
+ let def_id = freevar.def.def_id();
+ let var_id = tcx.map.as_local_node_id(def_id).unwrap();
+ let var_name = tcx.local_var_name_str(var_id);
struct_fmt.field(&var_name, lv);
}
});
write!(fmt, "{:?}[{}]", self.block, self.statement_index)
}
}
+
+impl Location {
+ pub fn dominates(&self, other: &Location, dominators: &Dominators<BasicBlock>) -> bool {
+ if self.block == other.block {
+ self.statement_index <= other.statement_index
+ } else {
+ dominators.is_dominated_by(other.block, self.block)
+ }
+ }
+}
-> LvalueTy<'tcx>
{
match *elem {
- ProjectionElem::Deref =>
+ ProjectionElem::Deref => {
+ let ty = self.to_ty(tcx)
+ .builtin_deref(true, ty::LvaluePreference::NoPreference)
+ .unwrap_or_else(|| {
+ bug!("deref projection of non-dereferencable ty {:?}", self)
+ })
+ .ty;
LvalueTy::Ty {
- ty: self.to_ty(tcx).builtin_deref(true, ty::LvaluePreference::NoPreference)
- .unwrap()
- .ty
- },
+ ty: ty,
+ }
+ }
ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } =>
LvalueTy::Ty {
ty: self.to_ty(tcx).builtin_index().unwrap()
impl<'tcx> Lvalue<'tcx> {
pub fn ty<'a, 'gcx>(&self, mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> LvalueTy<'tcx> {
- match self {
- &Lvalue::Var(index) =>
- LvalueTy::Ty { ty: mir.var_decls[index].ty },
- &Lvalue::Temp(index) =>
- LvalueTy::Ty { ty: mir.temp_decls[index].ty },
- &Lvalue::Arg(index) =>
- LvalueTy::Ty { ty: mir.arg_decls[index].ty },
- &Lvalue::Static(def_id) =>
+ match *self {
+ Lvalue::Local(index) =>
+ LvalueTy::Ty { ty: mir.local_decls[index].ty },
+ Lvalue::Static(def_id) =>
LvalueTy::Ty { ty: tcx.lookup_item_type(def_id).ty },
- &Lvalue::ReturnPointer =>
- LvalueTy::Ty { ty: mir.return_ty },
- &Lvalue::Projection(ref proj) =>
+ Lvalue::Projection(ref proj) =>
proj.base.ty(mir, tcx).projection_ty(tcx, &proj.elem),
}
}
fn visit_lvalue(&mut self,
lvalue: & $($mutability)* Lvalue<'tcx>,
- context: LvalueContext,
+ context: LvalueContext<'tcx>,
location: Location) {
self.super_lvalue(lvalue, context, location);
}
self.super_typed_const_val(val, location);
}
- fn visit_var_decl(&mut self,
- var_decl: & $($mutability)* VarDecl<'tcx>) {
- self.super_var_decl(var_decl);
- }
-
- fn visit_temp_decl(&mut self,
- temp_decl: & $($mutability)* TempDecl<'tcx>) {
- self.super_temp_decl(temp_decl);
- }
-
- fn visit_arg_decl(&mut self,
- arg_decl: & $($mutability)* ArgDecl<'tcx>) {
- self.super_arg_decl(arg_decl);
+ fn visit_local_decl(&mut self,
+ local_decl: & $($mutability)* LocalDecl<'tcx>) {
+ self.super_local_decl(local_decl);
}
fn visit_visibility_scope(&mut self,
self.visit_ty(&$($mutability)* mir.return_ty);
- for var_decl in &$($mutability)* mir.var_decls {
- self.visit_var_decl(var_decl);
- }
-
- for arg_decl in &$($mutability)* mir.arg_decls {
- self.visit_arg_decl(arg_decl);
- }
-
- for temp_decl in &$($mutability)* mir.temp_decls {
- self.visit_temp_decl(temp_decl);
+ for local_decl in &$($mutability)* mir.local_decls {
+ self.visit_local_decl(local_decl);
}
self.visit_span(&$($mutability)* mir.span);
StatementKind::StorageDead(ref $($mutability)* lvalue) => {
self.visit_lvalue(lvalue, LvalueContext::StorageDead, location);
}
+ StatementKind::Nop => {}
}
}
fn super_lvalue(&mut self,
lvalue: & $($mutability)* Lvalue<'tcx>,
- context: LvalueContext,
+ context: LvalueContext<'tcx>,
location: Location) {
match *lvalue {
- Lvalue::Var(_) |
- Lvalue::Temp(_) |
- Lvalue::Arg(_) |
- Lvalue::ReturnPointer => {
+ Lvalue::Local(_) => {
}
Lvalue::Static(ref $($mutability)* def_id) => {
self.visit_def_id(def_id, location);
ref $($mutability)* base,
ref $($mutability)* elem,
} = *proj;
- self.visit_lvalue(base, LvalueContext::Projection, location);
+ let context = if context.is_mutating_use() {
+ LvalueContext::Projection(Mutability::Mut)
+ } else {
+ LvalueContext::Projection(Mutability::Not)
+ };
+ self.visit_lvalue(base, context, location);
self.visit_projection_elem(elem, context, location);
}
}
}
- fn super_var_decl(&mut self,
- var_decl: & $($mutability)* VarDecl<'tcx>) {
- let VarDecl {
+ fn super_local_decl(&mut self,
+ local_decl: & $($mutability)* LocalDecl<'tcx>) {
+ let LocalDecl {
mutability: _,
- name: _,
ref $($mutability)* ty,
+ name: _,
ref $($mutability)* source_info,
- } = *var_decl;
-
- self.visit_ty(ty);
- self.visit_source_info(source_info);
- }
-
- fn super_temp_decl(&mut self,
- temp_decl: & $($mutability)* TempDecl<'tcx>) {
- let TempDecl {
- ref $($mutability)* ty,
- } = *temp_decl;
-
- self.visit_ty(ty);
- }
-
- fn super_arg_decl(&mut self,
- arg_decl: & $($mutability)* ArgDecl<'tcx>) {
- let ArgDecl {
- ref $($mutability)* ty,
- spread: _,
- debug_name: _
- } = *arg_decl;
+ } = *local_decl;
self.visit_ty(ty);
+ if let Some(ref $($mutability)* info) = *source_info {
+ self.visit_source_info(info);
+ }
}
fn super_visibility_scope(&mut self,
fn super_const_usize(&mut self, _substs: & $($mutability)* ConstUsize) {
}
+
+ // Convenience methods
+
+ fn visit_location(&mut self, mir: & $($mutability)* Mir<'tcx>, location: Location) {
+ let basic_block = & $($mutability)* mir[location.block];
+ if basic_block.statements.len() == location.statement_index {
+ if let Some(ref $($mutability)* terminator) = basic_block.terminator {
+ self.visit_terminator(location.block, terminator, location)
+ }
+ } else {
+ let statement = & $($mutability)*
+ basic_block.statements[location.statement_index];
+ self.visit_statement(location.block, statement, location)
+ }
+ }
}
}
}
// Being borrowed
Borrow { region: &'tcx Region, kind: BorrowKind },
- // Used as base for another lvalue, e.g. `x` in `x.y`
- Projection,
+ // Used as base for another lvalue, e.g. `x` in `x.y`.
+ //
+ // The `Mutability` argument specifies whether the projection is being performed in order to
+ // (potentially) mutate the lvalue. For example, the projection `x.y` is marked as a mutation
+ // in these cases:
+ //
+ // x.y = ...;
+ // f(&mut x.y);
+ //
+ // But not in these cases:
+ //
+ // z = x.y;
+ // f(&x.y);
+ Projection(Mutability),
// Consumed as part of an operand
Consume,
StorageLive,
StorageDead,
}
+
+impl<'tcx> LvalueContext<'tcx> {
+ /// Returns true if this lvalue context represents a drop.
+ pub fn is_drop(&self) -> bool {
+ match *self {
+ LvalueContext::Drop => true,
+ _ => false,
+ }
+ }
+
+ /// Returns true if this lvalue context represents a storage live or storage dead marker.
+ pub fn is_storage_marker(&self) -> bool {
+ match *self {
+ LvalueContext::StorageLive | LvalueContext::StorageDead => true,
+ _ => false,
+ }
+ }
+
+ /// Returns true if this lvalue context represents a storage live marker.
+ pub fn is_storage_live_marker(&self) -> bool {
+ match *self {
+ LvalueContext::StorageLive => true,
+ _ => false,
+ }
+ }
+
+ /// Returns true if this lvalue context represents a storage dead marker.
+ pub fn is_storage_dead_marker(&self) -> bool {
+ match *self {
+ LvalueContext::StorageDead => true,
+ _ => false,
+ }
+ }
+
+ /// Returns true if this lvalue context represents a use that potentially changes the value.
+ pub fn is_mutating_use(&self) -> bool {
+ match *self {
+ LvalueContext::Store | LvalueContext::Call |
+ LvalueContext::Borrow { kind: BorrowKind::Mut, .. } |
+ LvalueContext::Projection(Mutability::Mut) |
+ LvalueContext::Drop => true,
+ LvalueContext::Inspect |
+ LvalueContext::Borrow { kind: BorrowKind::Shared, .. } |
+ LvalueContext::Borrow { kind: BorrowKind::Unique, .. } |
+ LvalueContext::Projection(Mutability::Not) | LvalueContext::Consume |
+ LvalueContext::StorageLive | LvalueContext::StorageDead => false,
+ }
+ }
+
+ /// Returns true if this lvalue context represents a use that does not change the value.
+ pub fn is_nonmutating_use(&self) -> bool {
+ match *self {
+ LvalueContext::Inspect | LvalueContext::Borrow { kind: BorrowKind::Shared, .. } |
+ LvalueContext::Borrow { kind: BorrowKind::Unique, .. } |
+ LvalueContext::Projection(Mutability::Not) | LvalueContext::Consume => true,
+ LvalueContext::Borrow { kind: BorrowKind::Mut, .. } | LvalueContext::Store |
+ LvalueContext::Call | LvalueContext::Projection(Mutability::Mut) |
+ LvalueContext::Drop | LvalueContext::StorageLive | LvalueContext::StorageDead => false,
+ }
+ }
+
+ pub fn is_use(&self) -> bool {
+ self.is_mutating_use() || self.is_nonmutating_use()
+ }
+}
+
use session::{early_error, early_warn, Session};
use session::search_paths::SearchPaths;
+use rustc_back::PanicStrategy;
use rustc_back::target::Target;
use lint;
use middle::cstore;
use std::collections::btree_map::Keys as BTreeMapKeysIter;
use std::collections::btree_map::Values as BTreeMapValuesIter;
-use std::env;
use std::fmt;
use std::hash::{Hasher, SipHasher};
use std::iter::FromIterator;
}
}
-#[derive(Clone, PartialEq, Hash)]
-pub enum PanicStrategy {
- Unwind,
- Abort,
-}
-
-impl PanicStrategy {
- pub fn desc(&self) -> &str {
- match *self {
- PanicStrategy::Unwind => "unwind",
- PanicStrategy::Abort => "abort",
- }
- }
-}
-
/// Declare a macro that will define all CodegenOptions/DebuggingOptions fields and parsers all
/// at once. The goal of this macro is to define an interface that can be
/// programmatically used by the option parser in order to initialize the struct
pub const parse_opt_bool: Option<&'static str> =
Some("one of: `y`, `yes`, `on`, `n`, `no`, or `off`");
pub const parse_string: Option<&'static str> = Some("a string");
+ pub const parse_string_push: Option<&'static str> = Some("a string");
pub const parse_opt_string: Option<&'static str> = Some("a string");
pub const parse_list: Option<&'static str> = Some("a space-separated list of strings");
pub const parse_opt_list: Option<&'static str> = Some("a space-separated list of strings");
#[allow(dead_code)]
mod $mod_set {
- use super::{$struct_name, Passes, SomePasses, AllPasses, PanicStrategy};
+ use super::{$struct_name, Passes, SomePasses, AllPasses};
+ use rustc_back::PanicStrategy;
$(
pub fn $opt(cg: &mut $struct_name, v: Option<&str>) -> bool {
}
}
+ fn parse_string_push(slot: &mut Vec<String>, v: Option<&str>) -> bool {
+ match v {
+ Some(s) => { slot.push(s.to_string()); true },
+ None => false,
+ }
+ }
+
fn parse_list(slot: &mut Vec<String>, v: Option<&str>)
-> bool {
match v {
}
}
- fn parse_panic_strategy(slot: &mut PanicStrategy, v: Option<&str>) -> bool {
+ fn parse_panic_strategy(slot: &mut Option<PanicStrategy>, v: Option<&str>) -> bool {
match v {
- Some("unwind") => *slot = PanicStrategy::Unwind,
- Some("abort") => *slot = PanicStrategy::Abort,
+ Some("unwind") => *slot = Some(PanicStrategy::Unwind),
+ Some("abort") => *slot = Some(PanicStrategy::Abort),
_ => return false
}
true
"tool to assemble archives with"),
linker: Option<String> = (None, parse_opt_string, [UNTRACKED],
"system linker to link outputs with"),
+ link_arg: Vec<String> = (vec![], parse_string_push, [UNTRACKED],
+ "a single extra argument to pass to the linker (can be used several times)"),
link_args: Option<Vec<String>> = (None, parse_opt_list, [UNTRACKED],
"extra arguments to pass to the linker (space separated)"),
link_dead_code: bool = (false, parse_bool, [UNTRACKED],
no_vectorize_slp: bool = (false, parse_bool, [TRACKED],
"don't run LLVM's SLP vectorization pass"),
soft_float: bool = (false, parse_bool, [TRACKED],
- "generate software floating point library calls"),
+ "use soft float ABI (*eabihf targets only)"),
prefer_dynamic: bool = (false, parse_bool, [TRACKED],
"prefer dynamic linking to static linking"),
no_integrated_as: bool = (false, parse_bool, [TRACKED],
"explicitly enable the cfg(debug_assertions) directive"),
inline_threshold: Option<usize> = (None, parse_opt_uint, [TRACKED],
"set the inlining threshold for"),
- panic: PanicStrategy = (PanicStrategy::Unwind, parse_panic_strategy,
+ panic: Option<PanicStrategy> = (None, parse_panic_strategy,
[TRACKED], "panic strategy to compile crate with"),
}
crate_name: crate_name,
alt_std_name: None,
libs: libs,
- unstable_features: get_unstable_features_setting(),
+ unstable_features: UnstableFeatures::from_environment(),
debug_assertions: debug_assertions,
},
cfg)
}
-pub fn get_unstable_features_setting() -> UnstableFeatures {
- // Whether this is a feature-staged build, i.e. on the beta or stable channel
- let disable_unstable_features = option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some();
- // The secret key needed to get through the rustc build itself by
- // subverting the unstable features lints
- let bootstrap_secret_key = option_env!("CFG_BOOTSTRAP_KEY");
- // The matching key to the above, only known by the build system
- let bootstrap_provided_key = env::var("RUSTC_BOOTSTRAP_KEY").ok();
- match (disable_unstable_features, bootstrap_secret_key, bootstrap_provided_key) {
- (_, Some(ref s), Some(ref p)) if s == p => UnstableFeatures::Cheat,
- (true, ..) => UnstableFeatures::Disallow,
- (false, ..) => UnstableFeatures::Allow
- }
-}
-
pub fn parse_crate_types_from_list(list_list: Vec<String>) -> Result<Vec<CrateType>, String> {
let mut crate_types: Vec<CrateType> = Vec::new();
for unparsed_crate_type in &list_list {
pub mod nightly_options {
use getopts;
use syntax::feature_gate::UnstableFeatures;
- use super::{ErrorOutputType, OptionStability, RustcOptGroup, get_unstable_features_setting};
+ use super::{ErrorOutputType, OptionStability, RustcOptGroup};
use session::{early_error, early_warn};
pub fn is_unstable_enabled(matches: &getopts::Matches) -> bool {
}
pub fn is_nightly_build() -> bool {
- match get_unstable_features_setting() {
- UnstableFeatures::Allow | UnstableFeatures::Cheat => true,
- _ => false,
- }
+ UnstableFeatures::from_environment().is_nightly_build()
}
pub fn check_nightly_options(matches: &getopts::Matches, flags: &[RustcOptGroup]) {
let has_z_unstable_option = matches.opt_strs("Z").iter().any(|x| *x == "unstable-options");
- let really_allows_unstable_options = match get_unstable_features_setting() {
- UnstableFeatures::Disallow => false,
- _ => true,
- };
+ let really_allows_unstable_options = UnstableFeatures::from_environment()
+ .is_nightly_build();
for opt in flags.iter() {
if opt.stability == OptionStability::Stable {
use std::collections::BTreeMap;
use std::hash::{Hash, SipHasher};
use std::path::PathBuf;
- use super::{Passes, PanicStrategy, CrateType, OptLevel, DebugInfoLevel,
+ use super::{Passes, CrateType, OptLevel, DebugInfoLevel,
OutputTypes, Externs, ErrorOutputType};
use syntax::feature_gate::UnstableFeatures;
+ use rustc_back::PanicStrategy;
pub trait DepTrackingHash {
fn hash(&self, &mut SipHasher, ErrorOutputType);
impl_dep_tracking_hash_via_hash!(Option<bool>);
impl_dep_tracking_hash_via_hash!(Option<usize>);
impl_dep_tracking_hash_via_hash!(Option<String>);
+ impl_dep_tracking_hash_via_hash!(Option<PanicStrategy>);
impl_dep_tracking_hash_via_hash!(Option<lint::Level>);
impl_dep_tracking_hash_via_hash!(Option<PathBuf>);
impl_dep_tracking_hash_via_hash!(CrateType);
use std::iter::FromIterator;
use std::path::PathBuf;
use std::rc::Rc;
- use super::{OutputType, OutputTypes, Externs, PanicStrategy};
+ use super::{OutputType, OutputTypes, Externs};
+ use rustc_back::PanicStrategy;
use syntax::{ast, attr};
use syntax::parse::token::InternedString;
use syntax::codemap::dummy_spanned;
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
opts = reference.clone();
- opts.cg.panic = PanicStrategy::Abort;
+ opts.cg.panic = Some(PanicStrategy::Abort);
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
}
// except according to those terms.
use dep_graph::DepGraph;
-use hir::def_id::DefIndex;
+use hir::def_id::{CrateNum, DefIndex};
use hir::svh::Svh;
use lint;
use middle::cstore::CrateStore;
use middle::dependency_format;
use session::search_paths::PathKind;
-use session::config::{DebugInfoLevel, PanicStrategy};
+use session::config::DebugInfoLevel;
use ty::tls;
use util::nodemap::{NodeMap, FnvHashMap};
use util::common::duration_to_secs_str;
use syntax::feature_gate::AttributeType;
use syntax_pos::{Span, MultiSpan};
+use rustc_back::PanicStrategy;
use rustc_back::target::Target;
use rustc_data_structures::flock;
use llvm;
use std::collections::HashMap;
use std::env;
use std::ffi::CString;
+use std::io::Write;
use std::rc::Rc;
use std::fmt;
use std::time::Duration;
/// The metadata::creader module may inject an allocator/panic_runtime
/// dependency if it didn't already find one, and this tracks what was
/// injected.
- pub injected_allocator: Cell<Option<ast::CrateNum>>,
- pub injected_panic_runtime: Cell<Option<ast::CrateNum>>,
+ pub injected_allocator: Cell<Option<CrateNum>>,
+ pub injected_panic_runtime: Cell<Option<CrateNum>>,
/// Map from imported macro spans (which consist of
/// the localized span for the macro body) to the
}
lints.insert(id, vec!((lint_id, sp, msg)));
}
- pub fn reserve_node_ids(&self, count: ast::NodeId) -> ast::NodeId {
+ pub fn reserve_node_ids(&self, count: usize) -> ast::NodeId {
let id = self.next_node_id.get();
- match id.checked_add(count) {
- Some(next) => self.next_node_id.set(next),
+ match id.as_usize().checked_add(count) {
+ Some(next) => {
+ self.next_node_id.set(ast::NodeId::new(next));
+ }
None => bug!("Input too large, ran out of node ids!")
}
pub fn lto(&self) -> bool {
self.opts.cg.lto
}
+ /// Returns the panic strategy for this compile session. If the user explicitly selected one
+ /// using '-C panic', use that, otherwise use the panic strategy defined by the target.
+ pub fn panic_strategy(&self) -> PanicStrategy {
+ self.opts.cg.panic.unwrap_or(self.target.target.options.panic_strategy)
+ }
pub fn no_landing_pads(&self) -> bool {
- self.opts.debugging_opts.no_landing_pads ||
- self.opts.cg.panic == PanicStrategy::Abort
+ self.opts.debugging_opts.no_landing_pads || self.panic_strategy() == PanicStrategy::Abort
}
pub fn unstable_options(&self) -> bool {
self.opts.debugging_opts.unstable_options
local_crate_source_file,
registry,
cstore,
- Rc::new(codemap::CodeMap::new()))
+ Rc::new(codemap::CodeMap::new()),
+ None)
}
pub fn build_session_with_codemap(sopts: config::Options,
local_crate_source_file: Option<PathBuf>,
registry: errors::registry::Registry,
cstore: Rc<for<'a> CrateStore<'a>>,
- codemap: Rc<codemap::CodeMap>)
+ codemap: Rc<codemap::CodeMap>,
+ emitter_dest: Option<Box<Write + Send>>)
-> Session {
// FIXME: This is not general enough to make the warning lint completely override
// normal diagnostic warnings, since the warning lint can also be denied and changed
.unwrap_or(true);
let treat_err_as_bug = sopts.debugging_opts.treat_err_as_bug;
- let emitter: Box<Emitter> = match sopts.error_format {
- config::ErrorOutputType::HumanReadable(color_config) => {
+ let emitter: Box<Emitter> = match (sopts.error_format, emitter_dest) {
+ (config::ErrorOutputType::HumanReadable(color_config), None) => {
Box::new(EmitterWriter::stderr(color_config,
Some(codemap.clone())))
}
- config::ErrorOutputType::Json => {
+ (config::ErrorOutputType::HumanReadable(_), Some(dst)) => {
+ Box::new(EmitterWriter::new(dst,
+ Some(codemap.clone())))
+ }
+ (config::ErrorOutputType::Json, None) => {
Box::new(JsonEmitter::stderr(Some(registry), codemap.clone()))
}
+ (config::ErrorOutputType::Json, Some(dst)) => {
+ Box::new(JsonEmitter::new(dst, Some(registry), codemap.clone()))
+ }
};
let diagnostic_handler =
crate_disambiguator: RefCell::new(token::intern("").as_str()),
features: RefCell::new(feature_gate::Features::new()),
recursion_limit: Cell::new(64),
- next_node_id: Cell::new(1),
+ next_node_id: Cell::new(NodeId::new(1)),
injected_allocator: Cell::new(None),
injected_panic_runtime: Cell::new(None),
imported_macro_spans: RefCell::new(HashMap::new()),
use super::{SelectionContext, Obligation, ObligationCause};
-use middle::cstore::LOCAL_CRATE;
-use hir::def_id::DefId;
+use hir::def_id::{DefId, LOCAL_CRATE};
use ty::{self, Ty, TyCtxt};
use infer::{InferCtxt, TypeOrigin};
use syntax_pos::DUMMY_SP;
pub use self::select::{MethodMatchResult, MethodMatched, MethodAmbiguous, MethodDidNotMatch};
pub use self::select::{MethodMatchedData}; // intentionally don't export variants
pub use self::specialize::{OverlapError, specialization_graph, specializes, translate_substs};
-pub use self::specialize::{SpecializesCache};
+pub use self::specialize::{SpecializesCache, find_method};
pub use self::util::elaborate_predicates;
pub use self::util::supertraits;
pub use self::util::Supertraits;
Ok(resolved_value)
}
+/// Normalizes the predicates and checks whether they hold. If this
+/// returns false, then either normalize encountered an error or one
+/// of the predicates did not hold. Used when creating vtables to
+/// check for unsatisfiable methods.
+pub fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ predicates: Vec<ty::Predicate<'tcx>>)
+ -> bool
+{
+ debug!("normalize_and_test_predicates(predicates={:?})",
+ predicates);
+
+ tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| {
+ let mut selcx = SelectionContext::new(&infcx);
+ let mut fulfill_cx = FulfillmentContext::new();
+ let cause = ObligationCause::dummy();
+ let Normalized { value: predicates, obligations } =
+ normalize(&mut selcx, cause.clone(), &predicates);
+ for obligation in obligations {
+ fulfill_cx.register_predicate_obligation(&infcx, obligation);
+ }
+ for predicate in predicates {
+ let obligation = Obligation::new(cause.clone(), predicate);
+ fulfill_cx.register_predicate_obligation(&infcx, obligation);
+ }
+
+ fulfill_cx.select_all_or_error(&infcx).is_ok()
+ })
+}
+
+/// Given a trait `trait_ref`, iterates the vtable entries
+/// that come from `trait_ref`, including its supertraits.
+#[inline] // FIXME(#35870) Avoid closures being unexported due to impl Trait.
+pub fn get_vtable_methods<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ trait_ref: ty::PolyTraitRef<'tcx>)
+ -> impl Iterator<Item=Option<(DefId, &'tcx Substs<'tcx>)>> + 'a
+{
+ debug!("get_vtable_methods({:?})", trait_ref);
+
+ supertraits(tcx, trait_ref).flat_map(move |trait_ref| {
+ tcx.populate_implementations_for_trait_if_necessary(trait_ref.def_id());
+
+ let trait_item_def_ids = tcx.impl_or_trait_items(trait_ref.def_id());
+ let trait_methods = (0..trait_item_def_ids.len()).filter_map(move |i| {
+ match tcx.impl_or_trait_item(trait_item_def_ids[i]) {
+ ty::MethodTraitItem(m) => Some(m),
+ _ => None
+ }
+ });
+
+ // Now list each method's DefId and Substs (for within its trait).
+ // If the method can never be called from this object, produce None.
+ trait_methods.map(move |trait_method| {
+ debug!("get_vtable_methods: trait_method={:?}", trait_method);
+
+ // Some methods cannot be called on an object; skip those.
+ if !tcx.is_vtable_safe_method(trait_ref.def_id(), &trait_method) {
+ debug!("get_vtable_methods: not vtable safe");
+ return None;
+ }
+
+ // the method may have some early-bound lifetimes, add
+ // regions for those
+ let substs = Substs::for_item(tcx, trait_method.def_id,
+ |_, _| tcx.mk_region(ty::ReErased),
+ |def, _| trait_ref.substs().type_for_def(def));
+
+ // It's possible that the method relies on where clauses that
+ // do not hold for this particular set of type parameters.
+ // Note that this method could then never be called, so we
+ // do not want to try and trans it, in that case (see #23435).
+ let predicates = trait_method.predicates.instantiate_own(tcx, substs);
+ if !normalize_and_test_predicates(tcx, predicates.predicates) {
+ debug!("get_vtable_methods: predicates do not hold");
+ return None;
+ }
+
+ Some((trait_method.def_id, substs))
+ })
+ })
+}
+
impl<'tcx,O> Obligation<'tcx,O> {
pub fn new(cause: ObligationCause<'tcx>,
trait_ref: O)
}
pub fn dummy() -> ObligationCause<'tcx> {
- ObligationCause { span: DUMMY_SP, body_id: 0, code: MiscObligation }
+ ObligationCause { span: DUMMY_SP, body_id: ast::CRATE_NODE_ID, code: MiscObligation }
}
}
fn filter_negative_impls(&self, candidate: SelectionCandidate<'tcx>)
-> SelectionResult<'tcx, SelectionCandidate<'tcx>> {
if let ImplCandidate(def_id) = candidate {
- if self.tcx().trait_impl_polarity(def_id) == Some(hir::ImplPolarity::Negative) {
+ if self.tcx().trait_impl_polarity(def_id) == hir::ImplPolarity::Negative {
return Err(Unimplemented)
}
}
use middle::region;
use ty::subst::{Subst, Substs};
use traits::{self, Reveal, ObligationCause, Normalized};
-use ty::{self, TyCtxt};
+use ty::{self, TyCtxt, TypeFoldable};
use syntax_pos::DUMMY_SP;
+use syntax::ast;
+
pub mod specialization_graph;
/// Information pertinent to an overlapping impl error.
source_substs.rebase_onto(infcx.tcx, source_impl, target_substs)
}
+/// Given a selected impl described by `impl_data`, returns the
+/// definition and substitions for the method with the name `name`,
+/// and trait method substitutions `substs`, in that impl, a less
+/// specialized impl, or the trait default, whichever applies.
+pub fn find_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ name: ast::Name,
+ substs: &'tcx Substs<'tcx>,
+ impl_data: &super::VtableImplData<'tcx, ()>)
+ -> (DefId, &'tcx Substs<'tcx>)
+{
+ assert!(!substs.needs_infer());
+
+ let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap();
+ let trait_def = tcx.lookup_trait_def(trait_def_id);
+
+ match trait_def.ancestors(impl_data.impl_def_id).fn_defs(tcx, name).next() {
+ Some(node_item) => {
+ let substs = tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| {
+ let substs = substs.rebase_onto(tcx, trait_def_id, impl_data.substs);
+ let substs = translate_substs(&infcx, impl_data.impl_def_id,
+ substs, node_item.node);
+ tcx.lift(&substs).unwrap_or_else(|| {
+ bug!("find_method: translate_substs \
+ returned {:?} which contains inference types/regions",
+ substs);
+ })
+ });
+ (node_item.item.def_id, substs)
+ }
+ None => {
+ bug!("method {:?} not found in {:?}", name, impl_data.impl_def_id)
+ }
+ }
+}
+
/// Is impl1 a specialization of impl2?
///
/// Specialization is determined by the sets of types to which the impls apply;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::cell;
use std::rc::Rc;
use super::{OverlapError, specializes};
/// Iterate over the items defined directly by the given (impl or trait) node.
pub fn items(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> NodeItems<'a, 'gcx> {
- match *self {
- Node::Impl(impl_def_id) => {
- NodeItems::Impl {
- tcx: tcx.global_tcx(),
- items: cell::Ref::map(tcx.impl_items.borrow(),
- |impl_items| &impl_items[&impl_def_id]),
- idx: 0,
- }
- }
- Node::Trait(trait_def_id) => {
- NodeItems::Trait {
- items: tcx.trait_items(trait_def_id).clone(),
- idx: 0,
- }
- }
+ NodeItems {
+ tcx: tcx.global_tcx(),
+ items: tcx.impl_or_trait_items(self.def_id()),
+ idx: 0,
}
}
}
/// An iterator over the items defined within a trait or impl.
-pub enum NodeItems<'a, 'tcx: 'a> {
- Impl {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- items: cell::Ref<'a, Vec<ty::ImplOrTraitItemId>>,
- idx: usize,
- },
- Trait {
- items: Rc<Vec<ImplOrTraitItem<'tcx>>>,
- idx: usize,
- },
+pub struct NodeItems<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ items: Rc<Vec<DefId>>,
+ idx: usize
}
impl<'a, 'tcx> Iterator for NodeItems<'a, 'tcx> {
type Item = ImplOrTraitItem<'tcx>;
fn next(&mut self) -> Option<ImplOrTraitItem<'tcx>> {
- match *self {
- NodeItems::Impl { tcx, ref items, ref mut idx } => {
- let items_table = tcx.impl_or_trait_items.borrow();
- if *idx < items.len() {
- let item_def_id = items[*idx].def_id();
- let item = items_table[&item_def_id].clone();
- *idx += 1;
- Some(item)
- } else {
- None
- }
- }
- NodeItems::Trait { ref items, ref mut idx } => {
- if *idx < items.len() {
- let item = items[*idx].clone();
- *idx += 1;
- Some(item)
- } else {
- None
- }
- }
+ if self.idx < self.items.len() {
+ let item_def_id = self.items[self.idx];
+ let items_table = self.tcx.impl_or_trait_items.borrow();
+ let item = items_table[&item_def_id].clone();
+ self.idx += 1;
+ Some(item)
+ } else {
+ None
}
}
}
use hir;
-#[derive(Copy, Clone)]
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
pub enum AutoAdjustment<'tcx> {
AdjustNeverToAny(Ty<'tcx>), // go from ! to any type
AdjustReifyFnPointer, // go from a fn-item type to a fn-pointer type
/// unsize: Some(Box<[i32]>),
/// }
/// ```
-#[derive(Copy, Clone)]
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
pub struct AutoDerefRef<'tcx> {
/// Step 1. Apply a number of dereferences, producing an lvalue.
pub autoderefs: usize,
}
-#[derive(Copy, Clone, PartialEq, Debug)]
+#[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)]
pub enum AutoRef<'tcx> {
/// Convert from T to &T.
AutoPtr(&'tcx ty::Region, hir::Mutability),
use dep_graph::{DepGraph, DepTrackingMap};
use session::Session;
use middle;
-use middle::cstore::LOCAL_CRATE;
use hir::TraitMap;
use hir::def::DefMap;
-use hir::def_id::{DefId, DefIndex};
+use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
use hir::map as ast_map;
-use hir::map::{DefKey, DefPath, DefPathData, DisambiguatedDefPathData};
+use hir::map::{DefKey, DefPathData, DisambiguatedDefPathData};
use middle::free_region::FreeRegionMap;
use middle::region::RegionMaps;
use middle::resolve_lifetime;
use ty::subst::Substs;
use traits;
use ty::{self, TraitRef, Ty, TypeAndMut};
-use ty::{TyS, TypeVariants};
+use ty::{TyS, TypeVariants, Slice};
use ty::{AdtKind, AdtDef, ClosureSubsts, Region};
use hir::FreevarMap;
use ty::{BareFnTy, InferTy, ParamTy, ProjectionTy, TraitObject};
/// Specifically use a speedy hash algorithm for these hash sets,
/// they're accessed quite often.
type_: RefCell<FnvHashSet<Interned<'tcx, TyS<'tcx>>>>,
- type_list: RefCell<FnvHashSet<Interned<'tcx, [Ty<'tcx>]>>>,
+ type_list: RefCell<FnvHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
substs: RefCell<FnvHashSet<Interned<'tcx, Substs<'tcx>>>>,
bare_fn: RefCell<FnvHashSet<Interned<'tcx, BareFnTy<'tcx>>>>,
region: RefCell<FnvHashSet<Interned<'tcx, Region>>>,
/// Maps from a trait item to the trait item "descriptor"
pub impl_or_trait_items: RefCell<DepTrackingMap<maps::ImplOrTraitItems<'tcx>>>,
- /// Maps from a trait def-id to a list of the def-ids of its trait items
- pub trait_item_def_ids: RefCell<DepTrackingMap<maps::TraitItemDefIds<'tcx>>>,
+ /// Maps from an impl/trait def-id to a list of the def-ids of its items
+ pub impl_or_trait_item_def_ids: RefCell<DepTrackingMap<maps::ImplOrTraitItemDefIds<'tcx>>>,
/// A cache for the trait_items() routine; note that the routine
/// itself pushes the `TraitItems` dependency node.
/// Methods in these implementations don't need to be exported.
pub inherent_impls: RefCell<DepTrackingMap<maps::InherentImpls<'tcx>>>,
- /// Maps a DefId of an impl to a list of its items.
- /// Note that this contains all of the impls that we know about,
- /// including ones in other crates. It's not clear that this is the best
- /// way to do it.
- pub impl_items: RefCell<DepTrackingMap<maps::ImplItems<'tcx>>>,
-
/// Set of used unsafe nodes (functions or blocks). Unsafe nodes not
/// present in this set can be warned about.
pub used_unsafe: RefCell<NodeSet>,
/// Cache for layouts computed from types.
pub layout_cache: RefCell<FnvHashMap<Ty<'tcx>, &'tcx Layout>>,
+ /// Used to prevent layout from recursing too deeply.
+ pub layout_depth: Cell<usize>,
+
/// Map from function to the `#[derive]` mode that it's defining. Only used
/// by `rustc-macro` crates.
pub derive_macros: RefCell<NodeMap<token::InternedString>>,
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- pub fn crate_name(self, cnum: ast::CrateNum) -> token::InternedString {
+ pub fn crate_name(self, cnum: CrateNum) -> token::InternedString {
if cnum == LOCAL_CRATE {
self.crate_name.clone()
} else {
}
}
- pub fn crate_disambiguator(self, cnum: ast::CrateNum) -> token::InternedString {
+ pub fn original_crate_name(self, cnum: CrateNum) -> token::InternedString {
+ if cnum == LOCAL_CRATE {
+ self.crate_name.clone()
+ } else {
+ self.sess.cstore.original_crate_name(cnum)
+ }
+ }
+
+ pub fn crate_disambiguator(self, cnum: CrateNum) -> token::InternedString {
if cnum == LOCAL_CRATE {
self.sess.local_crate_disambiguator()
} else {
/// relative to `krate`.
///
/// Returns `None` if there is no `DefIndex` with that key.
- pub fn def_index_for_def_key(self, krate: ast::CrateNum, key: DefKey)
+ pub fn def_index_for_def_key(self, krate: CrateNum, key: DefKey)
-> Option<DefIndex> {
if krate == LOCAL_CRATE {
self.map.def_index_for_def_key(key)
}
}
- pub fn retrace_path(self, path: &DefPath) -> Option<DefId> {
- debug!("retrace_path(path={:?}, krate={:?})", path, self.crate_name(path.krate));
+ pub fn retrace_path(self,
+ krate: CrateNum,
+ path_data: &[DisambiguatedDefPathData])
+ -> Option<DefId> {
+ debug!("retrace_path(path={:?}, krate={:?})", path_data, self.crate_name(krate));
let root_key = DefKey {
parent: None,
},
};
- let root_index = self.def_index_for_def_key(path.krate, root_key)
+ let root_index = self.def_index_for_def_key(krate, root_key)
.expect("no root key?");
debug!("retrace_path: root_index={:?}", root_index);
let mut index = root_index;
- for data in &path.data {
+ for data in path_data {
let key = DefKey { parent: Some(index), disambiguated_data: data.clone() };
debug!("retrace_path: key={:?}", key);
- match self.def_index_for_def_key(path.krate, key) {
+ match self.def_index_for_def_key(krate, key) {
Some(i) => index = i,
None => return None,
}
}
- Some(DefId { krate: path.krate, index: index })
+ Some(DefId { krate: krate, index: index })
}
pub fn type_parameter_def(self,
rcache: RefCell::new(FnvHashMap()),
tc_cache: RefCell::new(FnvHashMap()),
impl_or_trait_items: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
- trait_item_def_ids: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
+ impl_or_trait_item_def_ids: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
trait_items_cache: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
ty_param_defs: RefCell::new(NodeMap()),
normalized_cache: RefCell::new(FnvHashMap()),
lang_items: lang_items,
inherent_impls: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
- impl_items: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
used_unsafe: RefCell::new(NodeSet()),
used_mut_nodes: RefCell::new(NodeSet()),
used_trait_imports: RefCell::new(NodeSet()),
crate_name: token::intern_and_get_ident(crate_name),
data_layout: data_layout,
layout_cache: RefCell::new(FnvHashMap()),
+ layout_depth: Cell::new(0),
derive_macros: RefCell::new(NodeMap()),
}, f)
}
}
}
-impl<'a, 'tcx> Lift<'tcx> for &'a [Ty<'a>] {
- type Lifted = &'tcx [Ty<'tcx>];
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx [Ty<'tcx>]> {
- if let Some(&Interned(list)) = tcx.interners.type_list.borrow().get(*self) {
+impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Ty<'a>> {
+ type Lifted = &'tcx Slice<Ty<'tcx>>;
+ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
+ -> Option<&'tcx Slice<Ty<'tcx>>> {
+ if let Some(&Interned(list)) = tcx.interners.type_list.borrow().get(&self[..]) {
if *self as *const _ == list as *const _ {
return Some(list);
}
}
}
-impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, [Ty<'tcx>]> {
+// NB: An Interned<Slice<T>> compares and hashes as its elements.
+impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, Slice<T>> {
+ fn eq(&self, other: &Interned<'tcx, Slice<T>>) -> bool {
+ self.0[..] == other.0[..]
+ }
+}
+
+impl<'tcx, T: Eq> Eq for Interned<'tcx, Slice<T>> {}
+
+impl<'tcx, T: Hash> Hash for Interned<'tcx, Slice<T>> {
+ fn hash<H: Hasher>(&self, s: &mut H) {
+ self.0[..].hash(s)
+ }
+}
+
+impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, Slice<Ty<'tcx>>> {
fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
- self.0
+ &self.0[..]
}
}
}
}
-macro_rules! items { ($($item:item)+) => ($($item)+) }
-macro_rules! impl_interners {
- ($lt_tcx:tt, $($name:ident: $method:ident($alloc:ty, $needs_infer:expr)-> $ty:ty),+) => {
- items!($(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
- fn eq(&self, other: &Self) -> bool {
- self.0 == other.0
- }
- }
-
- impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
-
- impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
- fn hash<H: Hasher>(&self, s: &mut H) {
- self.0.hash(s)
- }
- }
-
+macro_rules! intern_method {
+ ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
+ $alloc_to_key:expr,
+ $alloc_to_ret:expr,
+ $needs_infer:expr) -> $ty:ty) => {
impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
- if let Some(i) = self.interners.$name.borrow().get::<$ty>(&v) {
- return i.0;
- }
- if !self.is_global() {
- if let Some(i) = self.global_interners.$name.borrow().get::<$ty>(&v) {
+ {
+ let key = ($alloc_to_key)(&v);
+ if let Some(i) = self.interners.$name.borrow().get(key) {
return i.0;
}
+ if !self.is_global() {
+ if let Some(i) = self.global_interners.$name.borrow().get(key) {
+ return i.0;
+ }
+ }
}
// HACK(eddyb) Depend on flags being accurate to
let v = unsafe {
mem::transmute(v)
};
- let i = self.global_interners.arenas.$name.alloc(v);
+ let i = ($alloc_to_ret)(self.global_interners.arenas.$name.alloc(v));
self.global_interners.$name.borrow_mut().insert(Interned(i));
return i;
}
}
}
- let i = self.interners.arenas.$name.alloc(v);
+ let i = ($alloc_to_ret)(self.interners.arenas.$name.alloc(v));
self.interners.$name.borrow_mut().insert(Interned(i));
i
}
- })+);
+ }
+ }
+}
+
+macro_rules! direct_interners {
+ ($lt_tcx:tt, $($name:ident: $method:ident($needs_infer:expr) -> $ty:ty),+) => {
+ $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
+ fn eq(&self, other: &Self) -> bool {
+ self.0 == other.0
+ }
+ }
+
+ impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
+
+ impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
+ fn hash<H: Hasher>(&self, s: &mut H) {
+ self.0.hash(s)
+ }
+ }
+
+ intern_method!($lt_tcx, $name: $method($ty, |x| x, |x| x, $needs_infer) -> $ty);)+
}
}
x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
}
-impl_interners!('tcx,
- type_list: mk_type_list(Vec<Ty<'tcx>>, keep_local) -> [Ty<'tcx>],
- substs: mk_substs(Substs<'tcx>, |substs: &Substs| {
+direct_interners!('tcx,
+ substs: mk_substs(|substs: &Substs| {
substs.params().iter().any(keep_local)
}) -> Substs<'tcx>,
- bare_fn: mk_bare_fn(BareFnTy<'tcx>, |fty: &BareFnTy| {
+ bare_fn: mk_bare_fn(|fty: &BareFnTy| {
keep_local(&fty.sig)
}) -> BareFnTy<'tcx>,
- region: mk_region(Region, |r| {
+ region: mk_region(|r| {
match r {
&ty::ReVar(_) | &ty::ReSkolemized(..) => true,
_ => false
}) -> Region
);
+intern_method!('tcx,
+ type_list: mk_type_list(Vec<Ty<'tcx>>, Deref::deref, |xs: &[Ty]| -> &Slice<Ty> {
+ unsafe { mem::transmute(xs) }
+ }, keep_local) -> Slice<Ty<'tcx>>
+);
+
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
/// Create an unsafe fn ty based on a safe fn ty.
pub fn safe_to_unsafe_fn_ty(self, bare_fn: &BareFnTy<'tcx>) -> Ty<'tcx> {
pub fn trait_items(self, trait_did: DefId) -> Rc<Vec<ty::ImplOrTraitItem<'gcx>>> {
self.trait_items_cache.memoize(trait_did, || {
- let def_ids = self.trait_item_def_ids(trait_did);
+ let def_ids = self.impl_or_trait_items(trait_did);
Rc::new(def_ids.iter()
- .map(|d| self.impl_or_trait_item(d.def_id()))
+ .map(|&def_id| self.impl_or_trait_item(def_id))
.collect())
})
}
/// Obtain the representation annotation for a struct definition.
pub fn lookup_repr_hints(self, did: DefId) -> Rc<Vec<attr::ReprAttr>> {
self.repr_hint_cache.memoize(did, || {
- Rc::new(if did.is_local() {
- self.get_attrs(did).iter().flat_map(|meta| {
- attr::find_repr_attrs(self.sess.diagnostic(), meta).into_iter()
- }).collect()
- } else {
- self.sess.cstore.repr_attrs(did)
- })
+ Rc::new(self.get_attrs(did).iter().flat_map(|meta| {
+ attr::find_repr_attrs(self.sess.diagnostic(), meta).into_iter()
+ }).collect())
})
}
}
// except according to those terms.
use hir::map::DefPathData;
-use middle::cstore::LOCAL_CRATE;
-use hir::def_id::{DefId, CRATE_DEF_INDEX};
+use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use ty::{self, Ty, TyCtxt};
use syntax::ast;
use syntax::parse::token;
/// Returns the "path" to a particular crate. This can proceed in
/// various ways, depending on the `root_mode` of the `buffer`.
/// (See `RootMode` enum for more details.)
- pub fn push_krate_path<T>(self, buffer: &mut T, cnum: ast::CrateNum)
+ pub fn push_krate_path<T>(self, buffer: &mut T, cnum: CrateNum)
where T: ItemPathBuffer
{
match *buffer.root_mode() {
RootMode::Absolute => {
// In absolute mode, just write the crate name
// unconditionally.
- if cnum == LOCAL_CRATE {
- buffer.push(&self.crate_name(cnum));
- } else {
- buffer.push(&self.sess.cstore.original_crate_name(cnum));
- }
+ buffer.push(&self.original_crate_name(cnum));
}
}
}
}
}
- cur_path.push(self.sess.cstore.opt_item_name(cur_def).unwrap_or_else(||
+ cur_path.push(self.sess.cstore.def_key(cur_def)
+ .disambiguated_data.data.get_opt_name().unwrap_or_else(||
token::intern("<unnamed>")));
match visible_parent_map.get(&cur_def) {
Some(&def) => cur_def = def,
/// Returns the def-id of `def_id`'s parent in the def tree. If
/// this returns `None`, then `def_id` represents a crate root or
/// inlined root.
- fn parent_def_id(&self, def_id: DefId) -> Option<DefId> {
+ pub fn parent_def_id(self, def_id: DefId) -> Option<DefId> {
let key = self.def_key(def_id);
key.parent.map(|index| DefId { krate: def_id.krate, index: index })
}
}
impl Integer {
+ pub fn size(&self) -> Size {
+ match *self {
+ I1 => Size::from_bits(1),
+ I8 => Size::from_bytes(1),
+ I16 => Size::from_bytes(2),
+ I32 => Size::from_bytes(4),
+ I64 => Size::from_bytes(8),
+ }
+ }
+
+ pub fn align(&self, dl: &TargetDataLayout)-> Align {
+ match *self {
+ I1 => dl.i1_align,
+ I8 => dl.i8_align,
+ I16 => dl.i16_align,
+ I32 => dl.i32_align,
+ I64 => dl.i64_align,
+ }
+ }
+
+ pub fn to_ty<'a, 'tcx>(&self, tcx: &ty::TyCtxt<'a, 'tcx, 'tcx>,
+ signed: bool) -> Ty<'tcx> {
+ match (*self, signed) {
+ (I1, false) => tcx.types.u8,
+ (I8, false) => tcx.types.u8,
+ (I16, false) => tcx.types.u16,
+ (I32, false) => tcx.types.u32,
+ (I64, false) => tcx.types.u64,
+ (I1, true) => tcx.types.i8,
+ (I8, true) => tcx.types.i8,
+ (I16, true) => tcx.types.i16,
+ (I32, true) => tcx.types.i32,
+ (I64, true) => tcx.types.i64,
+ }
+ }
+
/// Find the smallest Integer type which can represent the signed value.
pub fn fit_signed(x: i64) -> Integer {
match x {
}
}
+ /// Find the smallest integer with the given alignment.
+ pub fn for_abi_align(dl: &TargetDataLayout, align: Align) -> Option<Integer> {
+ let wanted = align.abi();
+ for &candidate in &[I8, I16, I32, I64] {
+ let ty = Int(candidate);
+ if wanted == ty.align(dl).abi() && wanted == ty.size(dl).bytes() {
+ return Some(candidate);
+ }
+ }
+ None
+ }
+
/// Get the Integer type from an attr::IntType.
pub fn from_attr(dl: &TargetDataLayout, ity: attr::IntType) -> Integer {
match ity {
/// signed discriminant range and #[repr] attribute.
/// N.B.: u64 values above i64::MAX will be treated as signed, but
/// that shouldn't affect anything, other than maybe debuginfo.
- pub fn repr_discr(tcx: TyCtxt, hint: attr::ReprAttr, min: i64, max: i64)
+ pub fn repr_discr(tcx: TyCtxt, ty: Ty, hint: attr::ReprAttr, min: i64, max: i64)
-> (Integer, bool) {
// Theoretically, negative values could be larger in unsigned representation
// than the unsigned representation of the signed minimum. However, if there
let signed_fit = cmp::max(Integer::fit_signed(min), Integer::fit_signed(max));
let at_least = match hint {
- attr::ReprInt(span, ity) => {
+ attr::ReprInt(ity) => {
let discr = Integer::from_attr(&tcx.data_layout, ity);
let fit = if ity.is_signed() { signed_fit } else { unsigned_fit };
if discr < fit {
- span_bug!(span, "representation hint insufficient for discriminant range")
+ bug!("Integer::repr_discr: `#[repr]` hint too small for \
+ discriminant range of enum `{}", ty)
}
return (discr, ity.is_signed());
}
}
attr::ReprAny => I8,
attr::ReprPacked => {
- bug!("Integer::repr_discr: found #[repr(packed)] on an enum");
+ bug!("Integer::repr_discr: found #[repr(packed)] on enum `{}", ty);
}
attr::ReprSimd => {
- bug!("Integer::repr_discr: found #[repr(simd)] on an enum");
+ bug!("Integer::repr_discr: found #[repr(simd)] on enum `{}", ty);
}
};
}
Ok(None)
}
+
+ pub fn offset_of_field(&self, index: usize) -> Size {
+ assert!(index < self.offset_after_field.len());
+ if index == 0 {
+ Size::from_bytes(0)
+ } else {
+ self.offset_after_field[index-1]
+ }
+ }
}
/// An untagged union.
Univariant { variant: unit, non_zero: false }
}
- // Tuples.
+ // Tuples and closures.
ty::TyClosure(_, ty::ClosureSubsts { upvar_tys: tys, .. }) |
ty::TyTuple(tys) => {
let mut st = Struct::new(dl, false);
if x > max { max = x; }
}
- let (discr, signed) = Integer::repr_discr(tcx, hint, min, max);
+ let (discr, signed) = Integer::repr_discr(tcx, ty, hint, min, max);
return success(CEnum {
discr: discr,
signed: signed,
});
}
- if def.variants.len() == 1 {
+ if !def.is_enum() || def.variants.len() == 1 && hint == attr::ReprAny {
// Struct, or union, or univariant enum equivalent to a struct.
// (Typechecking will reject discriminant-sizing attrs.)
- assert!(!def.is_enum() || hint == attr::ReprAny);
+
let fields = def.variants[0].fields.iter().map(|field| {
field.ty(tcx, substs).layout(infcx)
});
// The general case.
let discr_max = (variants.len() - 1) as i64;
assert!(discr_max >= 0);
- let (min_ity, _) = Integer::repr_discr(tcx, hint, 0, discr_max);
+ let (min_ity, _) = Integer::repr_discr(tcx, ty, hint, 0, discr_max);
let mut align = dl.aggregate_align;
let mut size = Size::from_bytes(0);
// won't be so conservative.
// Use the initial field alignment
- let wanted = start_align.abi();
- let mut ity = min_ity;
- for &candidate in &[I16, I32, I64] {
- let ty = Int(candidate);
- if wanted == ty.align(dl).abi() && wanted == ty.size(dl).bytes() {
- ity = candidate;
- break;
- }
- }
-
- // FIXME(eddyb) conservative only to avoid diverging from trans::adt.
- if align.abi() != start_align.abi() {
- ity = min_ity;
- }
+ let mut ity = Integer::for_abi_align(dl, start_align).unwrap_or(min_ity);
// If the alignment is not larger than the chosen discriminant size,
// don't use the alignment as the final size.
dep_map_ty! { Generics: ItemSignature(DefId) -> &'tcx ty::Generics<'tcx> }
dep_map_ty! { Predicates: ItemSignature(DefId) -> ty::GenericPredicates<'tcx> }
dep_map_ty! { SuperPredicates: ItemSignature(DefId) -> ty::GenericPredicates<'tcx> }
-dep_map_ty! { TraitItemDefIds: TraitItemDefIds(DefId) -> Rc<Vec<ty::ImplOrTraitItemId>> }
+dep_map_ty! { ImplOrTraitItemDefIds: ImplOrTraitItemDefIds(DefId) -> Rc<Vec<DefId>> }
dep_map_ty! { ImplTraitRefs: ItemSignature(DefId) -> Option<ty::TraitRef<'tcx>> }
dep_map_ty! { TraitDefs: ItemSignature(DefId) -> &'tcx ty::TraitDef<'tcx> }
dep_map_ty! { AdtDefs: ItemSignature(DefId) -> ty::AdtDefMaster<'tcx> }
dep_map_ty! { ItemVariances: ItemSignature(DefId) -> Rc<Vec<ty::Variance>> }
dep_map_ty! { InherentImpls: InherentImpls(DefId) -> Vec<DefId> }
-dep_map_ty! { ImplItems: ImplItems(DefId) -> Vec<ty::ImplOrTraitItemId> }
dep_map_ty! { TraitItems: TraitItems(DefId) -> Rc<Vec<ty::ImplOrTraitItem<'tcx>>> }
dep_map_ty! { ReprHints: ReprHints(DefId) -> Rc<Vec<attr::ReprAttr>> }
dep_map_ty! { InlinedClosures: Hir(DefId) -> ast::NodeId }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-pub use self::ImplOrTraitItemId::*;
pub use self::Variance::*;
pub use self::DtorKind::*;
pub use self::ImplOrTraitItemContainer::*;
use dep_graph::{self, DepNode};
use hir::map as ast_map;
use middle;
-use middle::cstore::{self, LOCAL_CRATE};
use hir::def::{Def, PathResolution, ExportMap};
-use hir::def_id::DefId;
+use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
use middle::region::{CodeExtent, ROOT_CODE_EXTENT};
use traits;
use util::nodemap::NodeSet;
use util::nodemap::FnvHashMap;
-use serialize::{Encodable, Encoder, Decodable, Decoder};
+use serialize::{self, Encodable, Encoder};
use std::borrow::Cow;
use std::cell::Cell;
use std::hash::{Hash, Hasher};
use std::iter;
+use std::ops::Deref;
use std::rc::Rc;
use std::slice;
use std::vec::IntoIter;
-use syntax::ast::{self, CrateNum, Name, NodeId};
+use syntax::ast::{self, Name, NodeId};
use syntax::attr;
-use syntax::parse::token::InternedString;
+use syntax::parse::token::{self, InternedString};
use syntax_pos::{DUMMY_SP, Span};
use rustc_const_math::ConstInt;
use hir;
-use hir::{ItemImpl, ItemTrait, PatKind};
use hir::intravisit::Visitor;
pub use self::sty::{Binder, DebruijnIndex};
}
impl<'tcx> ImplOrTraitItem<'tcx> {
- fn id(&self) -> ImplOrTraitItemId {
- match *self {
- ConstTraitItem(ref associated_const) => {
- ConstTraitItemId(associated_const.def_id)
- }
- MethodTraitItem(ref method) => MethodTraitItemId(method.def_id),
- TypeTraitItem(ref associated_type) => {
- TypeTraitItemId(associated_type.def_id)
- }
- }
- }
-
pub fn def(&self) -> Def {
match *self {
ConstTraitItem(ref associated_const) => Def::AssociatedConst(associated_const.def_id),
MethodTraitItem(ref method) => Def::Method(method.def_id),
- TypeTraitItem(ref ty) => Def::AssociatedTy(ty.container.id(), ty.def_id),
+ TypeTraitItem(ref ty) => Def::AssociatedTy(ty.def_id),
}
}
}
}
-#[derive(Clone, Copy, Debug)]
-pub enum ImplOrTraitItemId {
- ConstTraitItemId(DefId),
- MethodTraitItemId(DefId),
- TypeTraitItemId(DefId),
-}
-
-impl ImplOrTraitItemId {
- pub fn def_id(&self) -> DefId {
- match *self {
- ConstTraitItemId(def_id) => def_id,
- MethodTraitItemId(def_id) => def_id,
- TypeTraitItemId(def_id) => def_id,
- }
- }
-}
-
-#[derive(Clone, Debug, PartialEq, Eq, Copy)]
+#[derive(Clone, Debug, PartialEq, Eq, Copy, RustcEncodable, RustcDecodable)]
pub enum Visibility {
/// Visible everywhere (including in other crates).
Public,
pub explicit_self: ExplicitSelfCategory<'tcx>,
pub vis: Visibility,
pub defaultness: hir::Defaultness,
+ pub has_body: bool,
pub def_id: DefId,
pub container: ImplOrTraitItemContainer,
}
impl<'tcx> Method<'tcx> {
- pub fn new(name: Name,
- generics: &'tcx ty::Generics<'tcx>,
- predicates: GenericPredicates<'tcx>,
- fty: &'tcx BareFnTy<'tcx>,
- explicit_self: ExplicitSelfCategory<'tcx>,
- vis: Visibility,
- defaultness: hir::Defaultness,
- def_id: DefId,
- container: ImplOrTraitItemContainer)
- -> Method<'tcx> {
- Method {
- name: name,
- generics: generics,
- predicates: predicates,
- fty: fty,
- explicit_self: explicit_self,
- vis: vis,
- defaultness: defaultness,
- def_id: def_id,
- container: container,
- }
- }
-
pub fn container_id(&self) -> DefId {
match self.container {
TraitContainer(id) => id,
Bivariant, // T<A> <: T<B> -- e.g., unused type parameter
}
-#[derive(Clone, Copy, Debug)]
+#[derive(Clone, Copy, Debug, RustcDecodable, RustcEncodable)]
pub struct MethodCallee<'tcx> {
/// Impl method ID, for inherent methods, or trait method ID, otherwise.
pub def_id: DefId,
pub type Ty<'tcx> = &'tcx TyS<'tcx>;
-impl<'tcx> Encodable for Ty<'tcx> {
- fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- cstore::tls::with_encoding_context(s, |ecx, rbml_w| {
- ecx.encode_ty(rbml_w, *self);
- Ok(())
- })
+impl<'tcx> serialize::UseSpecializedEncodable for Ty<'tcx> {}
+impl<'tcx> serialize::UseSpecializedDecodable for Ty<'tcx> {}
+
+/// A wrapper for slices with the additioanl invariant
+/// that the slice is interned and no other slice with
+/// the same contents can exist in the same context.
+/// This means we can use pointer + length for both
+/// equality comparisons and hashing.
+#[derive(Debug, RustcEncodable)]
+pub struct Slice<T>([T]);
+
+impl<T> PartialEq for Slice<T> {
+ #[inline]
+ fn eq(&self, other: &Slice<T>) -> bool {
+ (&self.0 as *const [T]) == (&other.0 as *const [T])
}
}
+impl<T> Eq for Slice<T> {}
-impl<'tcx> Decodable for Ty<'tcx> {
- fn decode<D: Decoder>(d: &mut D) -> Result<Ty<'tcx>, D::Error> {
- cstore::tls::with_decoding_context(d, |dcx, rbml_r| {
- Ok(dcx.decode_ty(rbml_r))
- })
+impl<T> Hash for Slice<T> {
+ fn hash<H: Hasher>(&self, s: &mut H) {
+ (self.as_ptr(), self.len()).hash(s)
}
}
+impl<T> Deref for Slice<T> {
+ type Target = [T];
+ fn deref(&self) -> &[T] {
+ &self.0
+ }
+}
+
+impl<'a, T> IntoIterator for &'a Slice<T> {
+ type Item = &'a T;
+ type IntoIter = <&'a [T] as IntoIterator>::IntoIter;
+ fn into_iter(self) -> Self::IntoIter {
+ self[..].iter()
+ }
+}
+
+impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Slice<Ty<'tcx>> {}
/// Upvars do not get their own node-id. Instead, we use the pair of
/// the original var id (that is, the root variable that is referenced
/// Information describing the capture of an upvar. This is computed
/// during `typeck`, specifically by `regionck`.
-#[derive(PartialEq, Clone, Debug, Copy)]
+#[derive(PartialEq, Clone, Debug, Copy, RustcEncodable, RustcDecodable)]
pub enum UpvarCapture<'tcx> {
/// Upvar is captured by value. This is always true when the
/// closure is labeled `move`, but can also be true in other cases
ByRef(UpvarBorrow<'tcx>),
}
-#[derive(PartialEq, Clone, Copy)]
+#[derive(PartialEq, Clone, Copy, RustcEncodable, RustcDecodable)]
pub struct UpvarBorrow<'tcx> {
/// The kind of borrow: by-ref upvars have access to shared
/// immutable borrows, which are not part of the normal language
/// from `T:'a` annotations appearing in the type definition. If
/// this is `None`, then the default is inherited from the
/// surrounding context. See RFC #599 for details.
-#[derive(Copy, Clone)]
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
pub enum ObjectLifetimeDefault<'tcx> {
/// Require an explicit annotation. Occurs when multiple
/// `T:'a` constraints are found.
Specific(&'tcx Region),
}
-#[derive(Clone)]
+#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct TypeParameterDef<'tcx> {
pub name: Name,
pub def_id: DefId,
pub object_lifetime_default: ObjectLifetimeDefault<'tcx>,
}
-#[derive(Clone)]
+#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct RegionParameterDef<'tcx> {
pub name: Name,
pub def_id: DefId,
/// Information about the formal type/lifetime parameters associated
/// with an item or method. Analogous to hir::Generics.
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub struct Generics<'tcx> {
pub parent: Option<DefId>,
pub parent_regions: u32,
pub predicates: Vec<Predicate<'tcx>>,
}
+impl<'tcx> serialize::UseSpecializedEncodable for GenericPredicates<'tcx> {}
+impl<'tcx> serialize::UseSpecializedDecodable for GenericPredicates<'tcx> {}
+
impl<'a, 'gcx, 'tcx> GenericPredicates<'tcx> {
pub fn instantiate(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: &Substs<'tcx>)
-> InstantiatedPredicates<'tcx> {
}
}
-#[derive(Clone, PartialEq, Eq, Hash)]
+#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum Predicate<'tcx> {
/// Corresponds to `where Foo : Bar<A,B,C>`. `Foo` here would be
/// the `Self` type of the trait reference and `A`, `B`, and `C`
}
}
-#[derive(Clone, PartialEq, Eq, Hash)]
+#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct TraitPredicate<'tcx> {
pub trait_ref: TraitRef<'tcx>
}
}
}
-#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct EquatePredicate<'tcx>(pub Ty<'tcx>, pub Ty<'tcx>); // `0 == 1`
pub type PolyEquatePredicate<'tcx> = ty::Binder<EquatePredicate<'tcx>>;
-#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct OutlivesPredicate<A,B>(pub A, pub B); // `A : B`
pub type PolyOutlivesPredicate<A,B> = ty::Binder<OutlivesPredicate<A,B>>;
pub type PolyRegionOutlivesPredicate<'tcx> = PolyOutlivesPredicate<&'tcx ty::Region,
/// equality between arbitrary types. Processing an instance of Form
/// #2 eventually yields one of these `ProjectionPredicate`
/// instances to normalize the LHS.
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct ProjectionPredicate<'tcx> {
pub projection_ty: ProjectionTy<'tcx>,
pub ty: Ty<'tcx>,
}
}
-impl<'tcx> Encodable for AdtDef<'tcx> {
- fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+impl<'tcx> serialize::UseSpecializedEncodable for AdtDef<'tcx> {
+ fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
self.did.encode(s)
}
}
-impl<'tcx> Decodable for AdtDef<'tcx> {
- fn decode<D: Decoder>(d: &mut D) -> Result<AdtDef<'tcx>, D::Error> {
- let def_id: DefId = Decodable::decode(d)?;
-
- cstore::tls::with_decoding_context(d, |dcx, _| {
- let def_id = dcx.translate_def_id(def_id);
- Ok(dcx.tcx().lookup_adt_def(def_id))
- })
- }
-}
-
+impl<'tcx> serialize::UseSpecializedDecodable for AdtDef<'tcx> {}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum AdtKind { Struct, Union, Enum }
pub fn variant_of_def(&self, def: Def) -> &VariantDefData<'gcx, 'container> {
match def {
- Def::Variant(_, vid) => self.variant_with_id(vid),
+ Def::Variant(vid) => self.variant_with_id(vid),
Def::Struct(..) | Def::Union(..) |
Def::TyAlias(..) | Def::AssociatedTy(..) => self.struct_variant(),
_ => bug!("unexpected def {:?} in variant_of_def", def)
/// Records the substitutions used to translate the polytype for an
/// item into the monotype of an item reference.
-#[derive(Clone)]
+#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct ItemSubsts<'tcx> {
pub substs: &'tcx Substs<'tcx>,
}
match self.map.find(id) {
Some(ast_map::NodeLocal(pat)) => {
match pat.node {
- PatKind::Binding(_, ref path1, _) => path1.node.as_str(),
+ hir::PatKind::Binding(_, ref path1, _) => path1.node.as_str(),
_ => {
bug!("Variable id {} maps to {:?}, not local", id, pat);
},
}
pub fn provided_trait_methods(self, id: DefId) -> Vec<Rc<Method<'gcx>>> {
- if let Some(id) = self.map.as_local_node_id(id) {
- if let ItemTrait(.., ref ms) = self.map.expect_item(id).node {
- ms.iter().filter_map(|ti| {
- if let hir::MethodTraitItem(_, Some(_)) = ti.node {
- match self.impl_or_trait_item(self.map.local_def_id(ti.id)) {
- MethodTraitItem(m) => Some(m),
- _ => {
- bug!("provided_trait_methods(): \
- non-method item found from \
- looking up provided method?!")
- }
- }
- } else {
- None
- }
- }).collect()
- } else {
- bug!("provided_trait_methods: `{:?}` is not a trait", id)
+ self.impl_or_trait_items(id).iter().filter_map(|&def_id| {
+ match self.impl_or_trait_item(def_id) {
+ MethodTraitItem(ref m) if m.has_body => Some(m.clone()),
+ _ => None
}
- } else {
- self.sess.cstore.provided_trait_methods(self.global_tcx(), id)
- }
+ }).collect()
}
- pub fn associated_consts(self, id: DefId) -> Vec<Rc<AssociatedConst<'gcx>>> {
+ pub fn trait_impl_polarity(self, id: DefId) -> hir::ImplPolarity {
if let Some(id) = self.map.as_local_node_id(id) {
match self.map.expect_item(id).node {
- ItemTrait(.., ref tis) => {
- tis.iter().filter_map(|ti| {
- if let hir::ConstTraitItem(..) = ti.node {
- match self.impl_or_trait_item(self.map.local_def_id(ti.id)) {
- ConstTraitItem(ac) => Some(ac),
- _ => {
- bug!("associated_consts(): \
- non-const item found from \
- looking up a constant?!")
- }
- }
- } else {
- None
- }
- }).collect()
- }
- ItemImpl(.., ref iis) => {
- iis.iter().filter_map(|ii| {
- if let hir::ImplItemKind::Const(..) = ii.node {
- match self.impl_or_trait_item(self.map.local_def_id(ii.id)) {
- ConstTraitItem(ac) => Some(ac),
- _ => {
- bug!("associated_consts(): \
- non-const item found from \
- looking up a constant?!")
- }
- }
- } else {
- None
- }
- }).collect()
- }
- _ => {
- bug!("associated_consts: `{:?}` is not a trait or impl", id)
- }
- }
- } else {
- self.sess.cstore.associated_consts(self.global_tcx(), id)
- }
- }
-
- pub fn trait_impl_polarity(self, id: DefId) -> Option<hir::ImplPolarity> {
- if let Some(id) = self.map.as_local_node_id(id) {
- match self.map.find(id) {
- Some(ast_map::NodeItem(item)) => {
- match item.node {
- hir::ItemImpl(_, polarity, ..) => Some(polarity),
- _ => None
- }
- }
- _ => None
+ hir::ItemImpl(_, polarity, ..) => polarity,
+ ref item => bug!("trait_impl_polarity: {:?} not an impl", item)
}
} else {
self.sess.cstore.impl_polarity(id)
.expect("missing ImplOrTraitItem in metadata"))
}
- pub fn trait_item_def_ids(self, id: DefId) -> Rc<Vec<ImplOrTraitItemId>> {
+ pub fn impl_or_trait_items(self, id: DefId) -> Rc<Vec<DefId>> {
lookup_locally_or_in_crate_store(
- "trait_item_def_ids", id, &self.trait_item_def_ids,
- || Rc::new(self.sess.cstore.trait_item_def_ids(id)))
+ "impl_or_trait_items", id, &self.impl_or_trait_item_def_ids,
+ || Rc::new(self.sess.cstore.impl_or_trait_items(id)))
}
/// Returns the trait-ref corresponding to a given impl, or None if it is
|| self.sess.cstore.impl_trait_ref(self.global_tcx(), id))
}
- /// Returns whether this DefId refers to an impl
- pub fn is_impl(self, id: DefId) -> bool {
- if let Some(id) = self.map.as_local_node_id(id) {
- if let Some(ast_map::NodeItem(
- &hir::Item { node: hir::ItemImpl(..), .. })) = self.map.find(id) {
- true
- } else {
- false
- }
- } else {
- self.sess.cstore.is_impl(id)
- }
- }
-
/// Returns a path resolution for node id if it exists, panics otherwise.
pub fn expect_resolution(self, id: NodeId) -> PathResolution {
*self.def_map.borrow().get(&id).expect("no def-map entry for node id")
// or variant or their constructors, panics otherwise.
pub fn expect_variant_def(self, def: Def) -> VariantDef<'tcx> {
match def {
- Def::Variant(enum_did, did) => {
+ Def::Variant(did) => {
+ let enum_did = self.parent_def_id(did).unwrap();
self.lookup_adt_def(enum_did).variant_with_id(did)
}
Def::Struct(did) | Def::Union(did) => {
pub fn item_name(self, id: DefId) -> ast::Name {
if let Some(id) = self.map.as_local_node_id(id) {
self.map.name(id)
+ } else if id.index == CRATE_DEF_INDEX {
+ token::intern(&self.sess.cstore.original_crate_name(id.krate))
} else {
- self.sess.cstore.item_name(id)
+ let def_key = self.sess.cstore.def_key(id);
+ // The name of a StructCtor is that of its struct parent.
+ if let ast_map::DefPathData::StructCtor = def_key.disambiguated_data.data {
+ self.item_name(DefId {
+ krate: id.krate,
+ index: def_key.parent.unwrap()
+ })
+ } else {
+ def_key.disambiguated_data.data.get_opt_name().unwrap_or_else(|| {
+ bug!("item_name: no name for {:?}", self.def_path(id));
+ })
+ }
}
}
pub fn lookup_generics(self, did: DefId) -> &'gcx Generics<'gcx> {
lookup_locally_or_in_crate_store(
"generics", did, &self.generics,
- || self.sess.cstore.item_generics(self.global_tcx(), did))
+ || self.alloc_generics(self.sess.cstore.item_generics(self.global_tcx(), did)))
}
/// Given the did of an item, returns its full set of predicates.
debug!("populate_implementations_for_primitive_if_necessary: searching for {:?}",
primitive_def_id);
- let impl_items = self.sess.cstore.impl_items(primitive_def_id);
+ let impl_items = self.sess.cstore.impl_or_trait_items(primitive_def_id);
// Store the implementation info.
- self.impl_items.borrow_mut().insert(primitive_def_id, impl_items);
+ self.impl_or_trait_item_def_ids.borrow_mut().insert(primitive_def_id, Rc::new(impl_items));
self.populated_external_primitive_impls.borrow_mut().insert(primitive_def_id);
}
let inherent_impls = self.sess.cstore.inherent_implementations_for_type(type_id);
for &impl_def_id in &inherent_impls {
// Store the implementation info.
- let impl_items = self.sess.cstore.impl_items(impl_def_id);
- self.impl_items.borrow_mut().insert(impl_def_id, impl_items);
+ let impl_items = self.sess.cstore.impl_or_trait_items(impl_def_id);
+ self.impl_or_trait_item_def_ids.borrow_mut().insert(impl_def_id, Rc::new(impl_items));
}
self.inherent_impls.borrow_mut().insert(type_id, inherent_impls);
self.record_trait_has_default_impl(trait_id);
}
- for impl_def_id in self.sess.cstore.implementations_of_trait(trait_id) {
- let impl_items = self.sess.cstore.impl_items(impl_def_id);
+ for impl_def_id in self.sess.cstore.implementations_of_trait(Some(trait_id)) {
+ let impl_items = self.sess.cstore.impl_or_trait_items(impl_def_id);
let trait_ref = self.impl_trait_ref(impl_def_id).unwrap();
// Record the trait->implementation mapping.
- if let Some(parent) = self.sess.cstore.impl_parent(impl_def_id) {
- def.record_remote_impl(self, impl_def_id, trait_ref, parent);
- } else {
- def.record_remote_impl(self, impl_def_id, trait_ref, trait_id);
- }
+ let parent = self.sess.cstore.impl_parent(impl_def_id).unwrap_or(trait_id);
+ def.record_remote_impl(self, impl_def_id, trait_ref, parent);
// For any methods that use a default implementation, add them to
// the map. This is a bit unfortunate.
- for impl_item_def_id in &impl_items {
- let method_def_id = impl_item_def_id.def_id();
+ for &impl_item_def_id in &impl_items {
// load impl items eagerly for convenience
// FIXME: we may want to load these lazily
- self.impl_or_trait_item(method_def_id);
+ self.impl_or_trait_item(impl_item_def_id);
}
// Store the implementation info.
- self.impl_items.borrow_mut().insert(impl_def_id, impl_items);
+ self.impl_or_trait_item_def_ids.borrow_mut().insert(impl_def_id, Rc::new(impl_items));
}
def.flags.set(def.flags.get() | TraitFlags::IMPLS_VALID);
/// is already that of the original trait method, then the return value is
/// the same).
/// Otherwise, return `None`.
- pub fn trait_item_of_item(self, def_id: DefId) -> Option<ImplOrTraitItemId> {
+ pub fn trait_item_of_item(self, def_id: DefId) -> Option<DefId> {
let impl_or_trait_item = match self.impl_or_trait_items.borrow().get(&def_id) {
Some(m) => m.clone(),
None => return None,
};
match impl_or_trait_item.container() {
- TraitContainer(_) => Some(impl_or_trait_item.id()),
+ TraitContainer(_) => Some(impl_or_trait_item.def_id()),
ImplContainer(def_id) => {
self.trait_id_of_impl(def_id).and_then(|trait_did| {
let name = impl_or_trait_item.name();
self.trait_items(trait_did).iter()
.find(|item| item.name() == name)
- .map(|item| item.id())
+ .map(|item| item.def_id())
})
}
}
/// See `ParameterEnvironment` struct def'n for details.
/// If you were using `free_id: NodeId`, you might try `self.region_maps.item_extent(free_id)`
- /// for the `free_id_outlive` parameter. (But note that that is not always quite right.)
+ /// for the `free_id_outlive` parameter. (But note that this is not always quite right.)
pub fn construct_parameter_environment(self,
span: Span,
def_id: DefId,
}
/// The category of explicit self.
-#[derive(Clone, Copy, Eq, PartialEq, Debug)]
+#[derive(Clone, Copy, Eq, PartialEq, Debug, RustcEncodable, RustcDecodable)]
pub enum ExplicitSelfCategory<'tcx> {
Static,
ByValue,
use std::rc::Rc;
use syntax::abi;
-use syntax::ptr::P;
use hir;
}
}
-impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for P<[T]> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
- self.iter().map(|t| t.fold_with(folder)).collect()
- }
-
- fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
- self.iter().any(|t| t.visit_with(visitor))
- }
-}
-
impl<'tcx> TypeFoldable<'tcx> for ty::TraitObject<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
ty::TraitObject {
}
}
-impl<'tcx> TypeFoldable<'tcx> for &'tcx [Ty<'tcx>] {
+impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Slice<Ty<'tcx>> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
let tys = self.iter().map(|t| t.fold_with(folder)).collect();
folder.tcx().mk_type_list(tys)
//! This module contains TypeVariants and its major components
-use middle::cstore;
use hir::def_id::DefId;
use middle::region;
use ty::subst::Substs;
-use ty::{self, AdtDef, ToPredicate, TypeFlags, Ty, TyCtxt, TyS, TypeFoldable};
+use ty::{self, AdtDef, ToPredicate, TypeFlags, Ty, TyCtxt, TypeFoldable};
+use ty::{Slice, TyS};
use util::common::ErrorReported;
use collections::enum_set::{self, EnumSet, CLike};
use std::fmt;
-use std::mem;
use std::ops;
use syntax::abi;
use syntax::ast::{self, Name};
use syntax::parse::token::{keywords, InternedString};
-use serialize::{Decodable, Decoder, Encodable, Encoder};
+use serialize;
use hir;
use self::InferTy::*;
use self::TypeVariants::*;
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct TypeAndMut<'tcx> {
pub ty: Ty<'tcx>,
pub mutbl: hir::Mutability,
// NB: If you change this, you'll probably want to change the corresponding
// AST structure in libsyntax/ast.rs as well.
-#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub enum TypeVariants<'tcx> {
/// The primitive boolean type. Written as `bool`.
TyBool,
TyNever,
/// A tuple type. For example, `(i32, bool)`.
- TyTuple(&'tcx [Ty<'tcx>]),
+ TyTuple(&'tcx Slice<Ty<'tcx>>),
/// The projection of an associated type. For example,
/// `<T as Trait<..>>::N`.
/// closure C wind up influencing the decisions we ought to make for
/// closure C (which would then require fixed point iteration to
/// handle). Plus it fixes an ICE. :P
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct ClosureSubsts<'tcx> {
/// Lifetime and type parameters from the enclosing function.
/// These are separated out because trans wants to pass them around
/// The types of the upvars. The list parallels the freevars and
/// `upvar_borrows` lists. These are kept distinct so that we can
/// easily index into them.
- pub upvar_tys: &'tcx [Ty<'tcx>]
+ pub upvar_tys: &'tcx Slice<Ty<'tcx>>
}
-impl<'tcx> Encodable for ClosureSubsts<'tcx> {
- fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- (self.func_substs, self.upvar_tys).encode(s)
- }
-}
-
-impl<'tcx> Decodable for ClosureSubsts<'tcx> {
- fn decode<D: Decoder>(d: &mut D) -> Result<ClosureSubsts<'tcx>, D::Error> {
- let (func_substs, upvar_tys) = Decodable::decode(d)?;
- cstore::tls::with_decoding_context(d, |dcx, _| {
- Ok(ClosureSubsts {
- func_substs: func_substs,
- upvar_tys: dcx.tcx().mk_type_list(upvar_tys)
- })
- })
- }
-}
-
-#[derive(Clone, PartialEq, Eq, Hash)]
+#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct TraitObject<'tcx> {
pub principal: PolyExistentialTraitRef<'tcx>,
pub region_bound: &'tcx ty::Region,
/// Note that a `TraitRef` introduces a level of region binding, to
/// account for higher-ranked trait bounds like `T : for<'a> Foo<&'a
/// U>` or higher-ranked object types.
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct TraitRef<'tcx> {
pub def_id: DefId,
pub substs: &'tcx Substs<'tcx>,
///
/// The substitutions don't include the erased `Self`, only trait
/// type and lifetime parameters (`[X, Y]` and `['a, 'b]` above).
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct ExistentialTraitRef<'tcx> {
pub def_id: DefId,
pub substs: &'tcx Substs<'tcx>,
/// erase, or otherwise "discharge" these bound regions, we change the
/// type from `Binder<T>` to just `T` (see
/// e.g. `liberate_late_bound_regions`).
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Binder<T>(pub T);
impl<T> Binder<T> {
/// Represents the projection of an associated type. In explicit UFCS
/// form this would be written `<T as Trait<..>>::N`.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct ProjectionTy<'tcx> {
/// The trait reference `T as Trait<..>`.
pub trait_ref: ty::TraitRef<'tcx>,
pub item_name: Name,
}
-#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct BareFnTy<'tcx> {
pub unsafety: hir::Unsafety,
pub abi: abi::Abi,
pub sig: PolyFnSig<'tcx>,
}
-#[derive(Clone, PartialEq, Eq, Hash)]
+impl<'tcx> serialize::UseSpecializedDecodable for &'tcx BareFnTy<'tcx> {}
+
+#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct ClosureTy<'tcx> {
pub unsafety: hir::Unsafety,
pub abi: abi::Abi,
/// - `inputs` is the list of arguments and their modes.
/// - `output` is the return type.
/// - `variadic` indicates whether this is a variadic function. (only true for foreign fns)
-#[derive(Clone, PartialEq, Eq, Hash)]
+#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct FnSig<'tcx> {
pub inputs: Vec<Ty<'tcx>>,
pub output: Ty<'tcx>,
}
}
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct ParamTy {
pub idx: u32,
pub name: Name,
ReErased,
}
-impl<'tcx> Decodable for &'tcx Region {
- fn decode<D: Decoder>(d: &mut D) -> Result<&'tcx Region, D::Error> {
- let r = Decodable::decode(d)?;
- cstore::tls::with_decoding_context(d, |dcx, _| {
- Ok(dcx.tcx().mk_region(r))
- })
- }
-}
+impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Region {}
#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)]
pub struct EarlyBoundRegion {
pub name: Name,
}
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct TyVid {
pub index: u32,
}
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct IntVid {
pub index: u32
}
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct FloatVid {
pub index: u32
}
pub index: u32
}
-#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum InferTy {
TyVar(TyVid),
IntVar(IntVid),
}
/// A `ProjectionPredicate` for an `ExistentialTraitRef`.
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct ExistentialProjection<'tcx> {
pub trait_ref: ExistentialTraitRef<'tcx>,
pub item_name: Name,
}
}
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct BuiltinBounds(EnumSet<BuiltinBound>);
impl<'a, 'gcx, 'tcx> BuiltinBounds {
#[derive(Clone, RustcEncodable, PartialEq, Eq, RustcDecodable, Hash,
Debug, Copy)]
-#[repr(usize)]
pub enum BuiltinBound {
- Send,
- Sized,
- Copy,
- Sync,
+ Send = 0,
+ Sized = 1,
+ Copy = 2,
+ Sync = 3,
}
impl CLike for BuiltinBound {
*self as usize
}
fn from_usize(v: usize) -> BuiltinBound {
- unsafe { mem::transmute(v) }
+ match v {
+ 0 => BuiltinBound::Send,
+ 1 => BuiltinBound::Sized,
+ 2 => BuiltinBound::Copy,
+ 3 => BuiltinBound::Sync,
+ _ => bug!("{} is not a valid BuiltinBound", v)
+ }
}
}
// Type substitutions.
-use middle::cstore;
use hir::def_id::DefId;
use ty::{self, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
-use serialize::{Encodable, Encoder, Decodable, Decoder};
+use serialize::{self, Encodable, Encoder, Decodable, Decoder};
use syntax_pos::{Span, DUMMY_SP};
use core::nonzero::NonZero;
}
}
+impl<'tcx> Encodable for Kind<'tcx> {
+ fn encode<E: Encoder>(&self, e: &mut E) -> Result<(), E::Error> {
+ e.emit_enum("Kind", |e| {
+ if let Some(ty) = self.as_type() {
+ e.emit_enum_variant("Ty", TYPE_TAG, 1, |e| {
+ e.emit_enum_variant_arg(0, |e| ty.encode(e))
+ })
+ } else if let Some(r) = self.as_region() {
+ e.emit_enum_variant("Region", REGION_TAG, 1, |e| {
+ e.emit_enum_variant_arg(0, |e| r.encode(e))
+ })
+ } else {
+ bug!()
+ }
+ })
+ }
+}
+
+impl<'tcx> Decodable for Kind<'tcx> {
+ fn decode<D: Decoder>(d: &mut D) -> Result<Kind<'tcx>, D::Error> {
+ d.read_enum("Kind", |d| {
+ d.read_enum_variant(&["Ty", "Region"], |d, tag| {
+ match tag {
+ TYPE_TAG => Ty::decode(d).map(Kind::from),
+ REGION_TAG => <&ty::Region>::decode(d).map(Kind::from),
+ _ => Err(d.error("invalid Kind tag"))
+ }
+ })
+ })
+ }
+}
+
/// A substitution mapping type/region parameters to new values.
-#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+#[derive(Clone, PartialEq, Eq, Debug, Hash, RustcEncodable, RustcDecodable)]
pub struct Substs<'tcx> {
params: Vec<Kind<'tcx>>
}
}
}
-impl<'tcx> Encodable for &'tcx Substs<'tcx> {
- fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- cstore::tls::with_encoding_context(s, |ecx, rbml_w| {
- ecx.encode_substs(rbml_w, self);
- Ok(())
- })
- }
-}
-
-impl<'tcx> Decodable for &'tcx Substs<'tcx> {
- fn decode<D: Decoder>(d: &mut D) -> Result<&'tcx Substs<'tcx>, D::Error> {
- let substs = cstore::tls::with_decoding_context(d, |dcx, rbml_r| {
- dcx.decode_substs(rbml_r)
- });
-
- Ok(substs)
- }
-}
-
+impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Substs<'tcx> {}
///////////////////////////////////////////////////////////////////////////
// Public trait `Subst`
use ty::fast_reject;
use ty::{Ty, TyCtxt, TraitRef};
use std::cell::{Cell, RefCell};
-use syntax::ast::Name;
use hir;
use util::nodemap::FnvHashMap;
pub trait_ref: ty::TraitRef<'tcx>,
- /// A list of the associated types defined in this trait. Useful
- /// for resolving `X::Foo` type markers.
- pub associated_type_names: Vec<Name>,
-
// Impls of a trait. To allow for quicker lookup, the impls are indexed by a
// simplified version of their `Self` type: impls with a simplifiable `Self`
// are stored in `nonblanket_impls` keyed by it, while all other impls are
paren_sugar: bool,
generics: &'tcx ty::Generics<'tcx>,
trait_ref: ty::TraitRef<'tcx>,
- associated_type_names: Vec<Name>,
def_path_hash: u64)
-> TraitDef<'tcx> {
TraitDef {
unsafety: unsafety,
generics: generics,
trait_ref: trait_ref,
- associated_type_names: associated_type_names,
nonblanket_impls: RefCell::new(FnvHashMap()),
blanket_impls: RefCell::new(vec![]),
flags: Cell::new(ty::TraitFlags::NO_TRAIT_FLAGS),
pub fn enum_repr_type(self, opt_hint: Option<&attr::ReprAttr>) -> attr::IntType {
match opt_hint {
// Feed in the given type
- Some(&attr::ReprInt(_, int_t)) => int_t,
+ Some(&attr::ReprInt(int_t)) => int_t,
// ... but provide sensible default if none provided
//
// NB. Historically `fn enum_variants` generate i64 here, while
/// Creates a hash of the type `Ty` which will be the same no matter what crate
/// context it's calculated within. This is used by the `type_id` intrinsic.
pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
- let mut hasher = TypeIdHasher {
- tcx: self,
- state: SipHasher::new()
- };
+ let mut hasher = TypeIdHasher::new(self, SipHasher::new());
hasher.visit_ty(ty);
- hasher.state.finish()
+ hasher.finish()
}
/// Returns true if this ADT is a dtorck type.
}
}
-struct TypeIdHasher<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+pub struct TypeIdHasher<'a, 'gcx: 'a+'tcx, 'tcx: 'a, H> {
tcx: TyCtxt<'a, 'gcx, 'tcx>,
- state: SipHasher
+ state: H
}
-impl<'a, 'gcx, 'tcx> TypeIdHasher<'a, 'gcx, 'tcx> {
- fn hash<T: Hash>(&mut self, x: T) {
+impl<'a, 'gcx, 'tcx, H: Hasher> TypeIdHasher<'a, 'gcx, 'tcx, H> {
+ pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, state: H) -> Self {
+ TypeIdHasher {
+ tcx: tcx,
+ state: state
+ }
+ }
+
+ pub fn hash<T: Hash>(&mut self, x: T) {
x.hash(&mut self.state);
}
+ pub fn finish(self) -> u64 {
+ self.state.finish()
+ }
+
fn hash_discriminant_u8<T>(&mut self, x: &T) {
let v = unsafe {
intrinsics::discriminant_value(x)
}
}
-impl<'a, 'gcx, 'tcx> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx> {
+impl<'a, 'gcx, 'tcx, H: Hasher> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx, H> {
fn visit_ty(&mut self, ty: Ty<'tcx>) -> bool {
// Distinguish between the Ty variants uniformly.
self.hash_discriminant_u8(&ty.sty);
TyInt(i) => self.hash(i),
TyUint(u) => self.hash(u),
TyFloat(f) => self.hash(f),
- TyAdt(d, _) => self.def_id(d.did),
- TyArray(_, n) => self.hash(n),
+ TyArray(_, n) => self.hash(n as u64),
TyRawPtr(m) |
TyRef(_, m) => self.hash(m.mutbl),
TyClosure(def_id, _) |
TyAnon(def_id, _) |
TyFnDef(def_id, ..) => self.def_id(def_id),
+ TyAdt(d, _) => self.def_id(d.did),
TyFnPtr(f) => {
self.hash(f.unsafety);
self.hash(f.abi);
self.hash(f.sig.variadic());
+ self.hash(f.sig.inputs().skip_binder().len() as u64);
}
TyTrait(ref data) => {
self.def_id(data.principal.def_id());
self.hash(data.builtin_bounds);
}
TyTuple(tys) => {
- self.hash(tys.len());
+ self.hash(tys.len() as u64);
}
TyParam(p) => {
self.hash(p.idx);
TyChar |
TyStr |
TyBox(_) |
- TySlice(_) |
- TyError => {}
- TyInfer(_) => bug!()
+ TySlice(_) => {}
+
+ TyError |
+ TyInfer(_) => bug!("TypeIdHasher: unexpected type {}", ty)
}
ty.super_visit_with(self)
fn visit_region(&mut self, r: &'tcx ty::Region) -> bool {
match *r {
- ty::ReStatic | ty::ReErased => {
+ ty::ReErased => {
self.hash::<u32>(0);
}
ty::ReLateBound(db, ty::BrAnon(i)) => {
self.hash::<u32>(db.depth);
self.hash(i);
}
+ ty::ReStatic |
ty::ReEmpty |
ty::ReEarlyBound(..) |
ty::ReLateBound(..) |
ty::ReScope(..) |
ty::ReVar(..) |
ty::ReSkolemized(..) => {
- bug!("unexpected region found when hashing a type")
+ bug!("TypeIdHasher: unexpected region {:?}", r)
}
}
false
}
}
+ let rec_limit = tcx.sess.recursion_limit.get();
+ let depth = tcx.layout_depth.get();
+ if depth > rec_limit {
+ tcx.sess.fatal(
+ &format!("overflow representing the type `{}`", self));
+ }
+
+ tcx.layout_depth.set(depth+1);
let layout = Layout::compute_uncached(self, infcx)?;
if can_cache {
tcx.layout_cache.borrow_mut().insert(self, layout);
}
+ tcx.layout_depth.set(depth);
Ok(layout)
}
let mut sep = " ";
tcx.with_freevars(node_id, |freevars| {
for (freevar, upvar_ty) in freevars.iter().zip(substs.upvar_tys) {
- let node_id = freevar.def.var_id();
+ let def_id = freevar.def.def_id();
+ let node_id = tcx.map.as_local_node_id(def_id).unwrap();
write!(f,
"{}{}:{}",
sep,
extern crate serialize;
#[macro_use] extern crate log;
+extern crate serialize as rustc_serialize; // used by deriving
+
pub mod tempdir;
-pub mod rpath;
pub mod sha2;
pub mod target;
pub mod slice;
pub mod dynamic_lib;
+
+use serialize::json::{Json, ToJson};
+
+#[derive(Clone, Copy, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)]
+pub enum PanicStrategy {
+ Unwind,
+ Abort,
+}
+
+impl PanicStrategy {
+ pub fn desc(&self) -> &str {
+ match *self {
+ PanicStrategy::Unwind => "unwind",
+ PanicStrategy::Abort => "abort",
+ }
+ }
+}
+
+impl ToJson for PanicStrategy {
+ fn to_json(&self) -> Json {
+ match *self {
+ PanicStrategy::Abort => "abort".to_json(),
+ PanicStrategy::Unwind => "unwind".to_json(),
+ }
+ }
+}
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use std::collections::HashSet;
-use std::env;
-use std::path::{Path, PathBuf};
-use std::fs;
-use syntax::ast;
-
-pub struct RPathConfig<'a> {
- pub used_crates: Vec<(ast::CrateNum, Option<PathBuf>)>,
- pub out_filename: PathBuf,
- pub is_like_osx: bool,
- pub has_rpath: bool,
- pub linker_is_gnu: bool,
- pub get_install_prefix_lib_path: &'a mut FnMut() -> PathBuf,
-}
-
-pub fn get_rpath_flags(config: &mut RPathConfig) -> Vec<String> {
- // No rpath on windows
- if !config.has_rpath {
- return Vec::new();
- }
-
- let mut flags = Vec::new();
-
- debug!("preparing the RPATH!");
-
- let libs = config.used_crates.clone();
- let libs = libs.into_iter().filter_map(|(_, l)| l).collect::<Vec<_>>();
- let rpaths = get_rpaths(config, &libs[..]);
- flags.extend_from_slice(&rpaths_to_flags(&rpaths[..]));
-
- // Use DT_RUNPATH instead of DT_RPATH if available
- if config.linker_is_gnu {
- flags.push("-Wl,--enable-new-dtags".to_string());
- }
-
- flags
-}
-
-fn rpaths_to_flags(rpaths: &[String]) -> Vec<String> {
- let mut ret = Vec::new();
- for rpath in rpaths {
- ret.push(format!("-Wl,-rpath,{}", &(*rpath)));
- }
- return ret;
-}
-
-fn get_rpaths(config: &mut RPathConfig, libs: &[PathBuf]) -> Vec<String> {
- debug!("output: {:?}", config.out_filename.display());
- debug!("libs:");
- for libpath in libs {
- debug!(" {:?}", libpath.display());
- }
-
- // Use relative paths to the libraries. Binaries can be moved
- // as long as they maintain the relative relationship to the
- // crates they depend on.
- let rel_rpaths = get_rpaths_relative_to_output(config, libs);
-
- // And a final backup rpath to the global library location.
- let fallback_rpaths = vec!(get_install_prefix_rpath(config));
-
- fn log_rpaths(desc: &str, rpaths: &[String]) {
- debug!("{} rpaths:", desc);
- for rpath in rpaths {
- debug!(" {}", *rpath);
- }
- }
-
- log_rpaths("relative", &rel_rpaths[..]);
- log_rpaths("fallback", &fallback_rpaths[..]);
-
- let mut rpaths = rel_rpaths;
- rpaths.extend_from_slice(&fallback_rpaths[..]);
-
- // Remove duplicates
- let rpaths = minimize_rpaths(&rpaths[..]);
- return rpaths;
-}
-
-fn get_rpaths_relative_to_output(config: &mut RPathConfig,
- libs: &[PathBuf]) -> Vec<String> {
- libs.iter().map(|a| get_rpath_relative_to_output(config, a)).collect()
-}
-
-fn get_rpath_relative_to_output(config: &mut RPathConfig, lib: &Path) -> String {
- // Mac doesn't appear to support $ORIGIN
- let prefix = if config.is_like_osx {
- "@loader_path"
- } else {
- "$ORIGIN"
- };
-
- let cwd = env::current_dir().unwrap();
- let mut lib = fs::canonicalize(&cwd.join(lib)).unwrap_or(cwd.join(lib));
- lib.pop();
- let mut output = cwd.join(&config.out_filename);
- output.pop();
- let output = fs::canonicalize(&output).unwrap_or(output);
- let relative = path_relative_from(&lib, &output)
- .expect(&format!("couldn't create relative path from {:?} to {:?}", output, lib));
- // FIXME (#9639): This needs to handle non-utf8 paths
- format!("{}/{}", prefix,
- relative.to_str().expect("non-utf8 component in path"))
-}
-
-// This routine is adapted from the *old* Path's `path_relative_from`
-// function, which works differently from the new `relative_from` function.
-// In particular, this handles the case on unix where both paths are
-// absolute but with only the root as the common directory.
-fn path_relative_from(path: &Path, base: &Path) -> Option<PathBuf> {
- use std::path::Component;
-
- if path.is_absolute() != base.is_absolute() {
- if path.is_absolute() {
- Some(PathBuf::from(path))
- } else {
- None
- }
- } else {
- let mut ita = path.components();
- let mut itb = base.components();
- let mut comps: Vec<Component> = vec![];
- loop {
- match (ita.next(), itb.next()) {
- (None, None) => break,
- (Some(a), None) => {
- comps.push(a);
- comps.extend(ita.by_ref());
- break;
- }
- (None, _) => comps.push(Component::ParentDir),
- (Some(a), Some(b)) if comps.is_empty() && a == b => (),
- (Some(a), Some(b)) if b == Component::CurDir => comps.push(a),
- (Some(_), Some(b)) if b == Component::ParentDir => return None,
- (Some(a), Some(_)) => {
- comps.push(Component::ParentDir);
- for _ in itb {
- comps.push(Component::ParentDir);
- }
- comps.push(a);
- comps.extend(ita.by_ref());
- break;
- }
- }
- }
- Some(comps.iter().map(|c| c.as_os_str()).collect())
- }
-}
-
-
-fn get_install_prefix_rpath(config: &mut RPathConfig) -> String {
- let path = (config.get_install_prefix_lib_path)();
- let path = env::current_dir().unwrap().join(&path);
- // FIXME (#9639): This needs to handle non-utf8 paths
- path.to_str().expect("non-utf8 component in rpath").to_string()
-}
-
-fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
- let mut set = HashSet::new();
- let mut minimized = Vec::new();
- for rpath in rpaths {
- if set.insert(&rpath[..]) {
- minimized.push(rpath.clone());
- }
- }
- minimized
-}
-
-#[cfg(all(unix, test))]
-mod tests {
- use super::{RPathConfig};
- use super::{minimize_rpaths, rpaths_to_flags, get_rpath_relative_to_output};
- use std::path::{Path, PathBuf};
-
- #[test]
- fn test_rpaths_to_flags() {
- let flags = rpaths_to_flags(&[
- "path1".to_string(),
- "path2".to_string()
- ]);
- assert_eq!(flags,
- ["-Wl,-rpath,path1",
- "-Wl,-rpath,path2"]);
- }
-
- #[test]
- fn test_minimize1() {
- let res = minimize_rpaths(&[
- "rpath1".to_string(),
- "rpath2".to_string(),
- "rpath1".to_string()
- ]);
- assert!(res == [
- "rpath1",
- "rpath2",
- ]);
- }
-
- #[test]
- fn test_minimize2() {
- let res = minimize_rpaths(&[
- "1a".to_string(),
- "2".to_string(),
- "2".to_string(),
- "1a".to_string(),
- "4a".to_string(),
- "1a".to_string(),
- "2".to_string(),
- "3".to_string(),
- "4a".to_string(),
- "3".to_string()
- ]);
- assert!(res == [
- "1a",
- "2",
- "4a",
- "3",
- ]);
- }
-
- #[test]
- fn test_rpath_relative() {
- if cfg!(target_os = "macos") {
- let config = &mut RPathConfig {
- used_crates: Vec::new(),
- has_rpath: true,
- is_like_osx: true,
- linker_is_gnu: false,
- out_filename: PathBuf::from("bin/rustc"),
- get_install_prefix_lib_path: &mut || panic!(),
- };
- let res = get_rpath_relative_to_output(config,
- Path::new("lib/libstd.so"));
- assert_eq!(res, "@loader_path/../lib");
- } else {
- let config = &mut RPathConfig {
- used_crates: Vec::new(),
- out_filename: PathBuf::from("bin/rustc"),
- get_install_prefix_lib_path: &mut || panic!(),
- has_rpath: true,
- is_like_osx: false,
- linker_is_gnu: true,
- };
- let res = get_rpath_relative_to_output(config,
- Path::new("lib/libstd.so"));
- assert_eq!(res, "$ORIGIN/../lib");
- }
- }
-}
--- /dev/null
+// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use target::TargetOptions;
+use std::default::Default;
+
+pub fn opts() -> TargetOptions {
+ TargetOptions {
+ linker: "cc".to_string(),
+ dynamic_linking: true,
+ executables: true,
+ has_rpath: true,
+ linker_is_gnu: true,
+ .. Default::default()
+ }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use target::{Target, TargetResult};
+
+pub fn target() -> TargetResult {
+ let mut base = super::haiku_base::opts();
+ base.cpu = "pentium4".to_string();
+ base.max_atomic_width = 64;
+ base.pre_link_args.push("-m32".to_string());
+
+ Ok(Target {
+ llvm_target: "i686-unknown-haiku".to_string(),
+ target_endian: "little".to_string(),
+ target_pointer_width: "32".to_string(),
+ data_layout: "e-m:e-p:32:32-f64:32:64-f80:32-n8:16:32-S128".to_string(),
+ arch: "x86".to_string(),
+ target_os: "haiku".to_string(),
+ target_env: "".to_string(),
+ target_vendor: "unknown".to_string(),
+ options: base,
+ })
+}
llvm_target: "mips64-unknown-linux-gnuabi64".to_string(),
target_endian: "big".to_string(),
target_pointer_width: "64".to_string(),
- data_layout: "E-m:m-i8:8:32-i16:16:32-i64:64-n32:64-S128".to_string(),
+ data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".to_string(),
arch: "mips64".to_string(),
target_os: "linux".to_string(),
target_env: "gnu".to_string(),
llvm_target: "mips64el-unknown-linux-gnuabi64".to_string(),
target_endian: "little".to_string(),
target_pointer_width: "64".to_string(),
- data_layout: "e-m:m-i8:8:32-i16:16:32-i64:64-n32:64-S128".to_string(),
+ data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".to_string(),
arch: "mips64".to_string(),
target_os: "linux".to_string(),
target_env: "gnu".to_string(),
use std::io::prelude::*;
use syntax::abi::Abi;
+use PanicStrategy;
+
mod android_base;
mod apple_base;
mod apple_ios_base;
mod bitrig_base;
mod dragonfly_base;
mod freebsd_base;
+mod haiku_base;
mod linux_base;
mod linux_musl_base;
mod openbsd_base;
("x86_64-unknown-netbsd", x86_64_unknown_netbsd),
("x86_64-rumprun-netbsd", x86_64_rumprun_netbsd),
+ ("i686_unknown_haiku", i686_unknown_haiku),
+ ("x86_64_unknown_haiku", x86_64_unknown_haiku),
+
("x86_64-apple-darwin", x86_64_apple_darwin),
("i686-apple-darwin", i686_apple_darwin),
/// Maximum integer size in bits that this target can perform atomic
/// operations on.
pub max_atomic_width: u64,
+
+ /// Panic strategy: "unwind" or "abort"
+ pub panic_strategy: PanicStrategy,
}
impl Default for TargetOptions {
has_elf_tls: false,
obj_is_bitcode: false,
max_atomic_width: 0,
+ panic_strategy: PanicStrategy::Unwind,
}
}
}
.map(|o| o.as_u64()
.map(|s| base.options.$key_name = s));
} );
+ ($key_name:ident, PanicStrategy) => ( {
+ let name = (stringify!($key_name)).replace("_", "-");
+ obj.find(&name[..]).and_then(|o| o.as_string().and_then(|s| {
+ match s {
+ "unwind" => base.options.$key_name = PanicStrategy::Unwind,
+ "abort" => base.options.$key_name = PanicStrategy::Abort,
+ _ => return Some(Err(format!("'{}' is not a valid value for \
+ panic-strategy. Use 'unwind' or 'abort'.",
+ s))),
+ }
+ Some(Ok(()))
+ })).unwrap_or(Ok(()))
+ } );
($key_name:ident, list) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.find(&name[..]).map(|o| o.as_array()
key!(has_elf_tls, bool);
key!(obj_is_bitcode, bool);
key!(max_atomic_width, u64);
+ try!(key!(panic_strategy, PanicStrategy));
Ok(base)
}
target_option_val!(has_elf_tls);
target_option_val!(obj_is_bitcode);
target_option_val!(max_atomic_width);
+ target_option_val!(panic_strategy);
Json::Object(d)
}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use target::{Target, TargetResult};
+
+pub fn target() -> TargetResult {
+ let mut base = super::haiku_base::opts();
+ base.cpu = "x86-64".to_string();
+ base.max_atomic_width = 64;
+ base.pre_link_args.push("-m64".to_string());
+
+ Ok(Target {
+ llvm_target: "x86_64-unknown-haiku".to_string(),
+ target_endian: "little".to_string(),
+ target_pointer_width: "64".to_string(),
+ data_layout: "e-m:e-i64:64-f80:128-n8:16:32:64-S128".to_string(),
+ arch: "x86_64".to_string(),
+ target_os: "haiku".to_string(),
+ target_env: "".to_string(),
+ target_vendor: "unknown".to_string(),
+ options: base,
+ })
+}
});
}
repr::StatementKind::StorageLive(_) |
- repr::StatementKind::StorageDead(_) => {}
+ repr::StatementKind::StorageDead(_) |
+ repr::StatementKind::Nop => {}
}
}
};
assert!(args.len() == 1);
let peek_arg_lval = match args[0] {
- repr::Operand::Consume(ref lval @ repr::Lvalue::Temp(_)) => {
- lval
- }
- repr::Operand::Consume(_) |
- repr::Operand::Constant(_) => {
+ repr::Operand::Consume(ref lval @ repr::Lvalue::Local(_)) => Some(lval),
+ _ => None,
+ };
+
+ let peek_arg_lval = match peek_arg_lval {
+ Some(arg) => arg,
+ None => {
tcx.sess.diagnostic().span_err(
span, "dataflow::sanity_check cannot feed a non-temp to rustc_peek.");
return;
(lvalue, rvalue)
}
repr::StatementKind::StorageLive(_) |
- repr::StatementKind::StorageDead(_) => continue,
+ repr::StatementKind::StorageDead(_) |
+ repr::StatementKind::Nop => continue,
repr::StatementKind::SetDiscriminant{ .. } =>
span_bug!(stmt.source_info.span,
"sanity_check should run before Deaggregator inserts SetDiscriminant"),
env: &'a MoveDataParamEnv<'tcx>,
flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
- drop_flags: FnvHashMap<MovePathIndex, Temp>,
+ drop_flags: FnvHashMap<MovePathIndex, Local>,
patch: MirPatch<'tcx>,
}
}
fn drop_flag(&mut self, index: MovePathIndex) -> Option<Lvalue<'tcx>> {
- self.drop_flags.get(&index).map(|t| Lvalue::Temp(*t))
+ self.drop_flags.get(&index).map(|t| Lvalue::Local(*t))
}
/// create a patch that elaborates all drops in the input
statements.push(Statement {
source_info: c.source_info,
kind: StatementKind::Assign(
- Lvalue::Temp(flag),
+ Lvalue::Local(flag),
self.constant_bool(c.source_info.span, false)
)
});
}
let tcx = self.tcx;
- let unit_temp = Lvalue::Temp(self.patch.new_temp(tcx.mk_nil()));
+ let unit_temp = Lvalue::Local(self.patch.new_temp(tcx.mk_nil()));
let free_func = tcx.lang_items.require(lang_items::BoxFreeFnLangItem)
.unwrap_or_else(|e| tcx.sess.fatal(&e));
let substs = Substs::new(tcx, iter::once(Kind::from(ty)));
if let Some(&flag) = self.drop_flags.get(&path) {
let span = self.patch.source_info_for_location(self.mir, loc).span;
let val = self.constant_bool(span, val.value());
- self.patch.add_assign(loc, Lvalue::Temp(flag), val);
+ self.patch.add_assign(loc, Lvalue::Local(flag), val);
}
}
let span = self.patch.source_info_for_location(self.mir, loc).span;
let false_ = self.constant_bool(span, false);
for flag in self.drop_flags.values() {
- self.patch.add_assign(loc, Lvalue::Temp(*flag), false_.clone());
+ self.patch.add_assign(loc, Lvalue::Local(*flag), false_.clone());
}
}
/// Tables mapping from an l-value to its MovePathIndex.
#[derive(Debug)]
pub struct MovePathLookup<'tcx> {
- vars: IndexVec<Var, MovePathIndex>,
- temps: IndexVec<Temp, MovePathIndex>,
- args: IndexVec<Arg, MovePathIndex>,
-
- /// The move path representing the return value is constructed
- /// lazily when we first encounter it in the input MIR.
- return_ptr: Option<MovePathIndex>,
+ locals: IndexVec<Local, MovePathIndex>,
/// projections are made from a base-lvalue and a projection
/// elem. The base-lvalue will have a unique MovePathIndex; we use
moves: IndexVec::new(),
loc_map: LocationMap::new(mir),
rev_lookup: MovePathLookup {
- vars: mir.var_decls.indices().map(Lvalue::Var).map(|v| {
+ locals: mir.local_decls.indices().map(Lvalue::Local).map(|v| {
Self::new_move_path(&mut move_paths, &mut path_map, None, v)
}).collect(),
- temps: mir.temp_decls.indices().map(Lvalue::Temp).map(|t| {
- Self::new_move_path(&mut move_paths, &mut path_map, None, t)
- }).collect(),
- args: mir.arg_decls.indices().map(Lvalue::Arg).map(|a| {
- Self::new_move_path(&mut move_paths, &mut path_map, None, a)
- }).collect(),
- return_ptr: None,
projections: FnvHashMap(),
},
move_paths: move_paths,
{
debug!("lookup({:?})", lval);
match *lval {
- Lvalue::Var(var) => Ok(self.data.rev_lookup.vars[var]),
- Lvalue::Arg(arg) => Ok(self.data.rev_lookup.args[arg]),
- Lvalue::Temp(temp) => Ok(self.data.rev_lookup.temps[temp]),
+ Lvalue::Local(local) => Ok(self.data.rev_lookup.locals[local]),
// error: can't move out of a static
Lvalue::Static(..) => Err(MovePathError::IllegalMove),
- Lvalue::ReturnPointer => match self.data.rev_lookup.return_ptr {
- Some(ptr) => Ok(ptr),
- ref mut ptr @ None => {
- let path = Self::new_move_path(
- &mut self.data.move_paths,
- &mut self.data.path_map,
- None,
- lval.clone());
- *ptr = Some(path);
- Ok(path)
- }
- },
Lvalue::Projection(ref proj) => {
self.move_path_for_projection(lval, proj)
}
// parent.
pub fn find(&self, lval: &Lvalue<'tcx>) -> LookupResult {
match *lval {
- Lvalue::Var(var) => LookupResult::Exact(self.vars[var]),
- Lvalue::Temp(temp) => LookupResult::Exact(self.temps[temp]),
- Lvalue::Arg(arg) => LookupResult::Exact(self.args[arg]),
+ Lvalue::Local(local) => LookupResult::Exact(self.locals[local]),
Lvalue::Static(..) => LookupResult::Parent(None),
- Lvalue::ReturnPointer => LookupResult::Exact(self.return_ptr.unwrap()),
Lvalue::Projection(ref proj) => {
match self.find(&proj.base) {
LookupResult::Exact(base_path) => {
span_bug!(stmt.source_info.span,
"SetDiscriminant should not exist during borrowck");
}
+ StatementKind::Nop => {}
}
}
TerminatorKind::Unreachable => { }
TerminatorKind::Return => {
- self.gather_move(loc, &Lvalue::ReturnPointer);
+ self.gather_move(loc, &Lvalue::Local(RETURN_POINTER));
}
TerminatorKind::If { .. } |
where F: FnMut(MovePathIndex, DropFlagState)
{
let move_data = &ctxt.move_data;
- for (arg, _) in mir.arg_decls.iter_enumerated() {
- let lvalue = repr::Lvalue::Arg(arg);
+ for arg in mir.args_iter() {
+ let lvalue = repr::Lvalue::Local(arg);
let lookup_result = move_data.rev_lookup.find(&lvalue);
on_lookup_result_bits(tcx, mir, move_data,
lookup_result,
|moi| callback(moi, DropFlagState::Present))
}
repr::StatementKind::StorageLive(_) |
- repr::StatementKind::StorageDead(_) => {}
+ repr::StatementKind::StorageDead(_) |
+ repr::StatementKind::Nop => {}
},
None => {
debug!("drop_flag_effects: replace {:?}", block.terminator());
patch_map: IndexVec<BasicBlock, Option<TerminatorKind<'tcx>>>,
new_blocks: Vec<BasicBlockData<'tcx>>,
new_statements: Vec<(Location, StatementKind<'tcx>)>,
- new_temps: Vec<TempDecl<'tcx>>,
+ new_locals: Vec<LocalDecl<'tcx>>,
resume_block: BasicBlock,
- next_temp: usize,
+ next_local: usize,
}
impl<'tcx> MirPatch<'tcx> {
let mut result = MirPatch {
patch_map: IndexVec::from_elem(None, mir.basic_blocks()),
new_blocks: vec![],
- new_temps: vec![],
new_statements: vec![],
- next_temp: mir.temp_decls.len(),
+ new_locals: vec![],
+ next_local: mir.local_decls.len(),
resume_block: START_BLOCK
};
}
}
- pub fn new_temp(&mut self, ty: Ty<'tcx>) -> Temp {
- let index = self.next_temp;
- self.next_temp += 1;
- self.new_temps.push(TempDecl { ty: ty });
- Temp::new(index as usize)
+ pub fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
+ let index = self.next_local;
+ self.next_local += 1;
+ self.new_locals.push(LocalDecl::new_temp(ty));
+ Local::new(index as usize)
}
pub fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock {
pub fn apply(self, mir: &mut Mir<'tcx>) {
debug!("MirPatch: {:?} new temps, starting from index {}: {:?}",
- self.new_temps.len(), mir.temp_decls.len(), self.new_temps);
+ self.new_locals.len(), mir.local_decls.len(), self.new_locals);
debug!("MirPatch: {} new blocks, starting from index {}",
self.new_blocks.len(), mir.basic_blocks().len());
mir.basic_blocks_mut().extend(self.new_blocks);
- mir.temp_decls.extend(self.new_temps);
+ mir.local_decls.extend(self.new_locals);
for (src, patch) in self.patch_map.into_iter_enumerated() {
if let Some(patch) = patch {
debug!("MirPatch: patching block {:?}", src);
use rustc::middle::mem_categorization::{cmt};
use rustc::hir::pat_util::*;
use rustc::traits::Reveal;
-use rustc::ty::*;
-use rustc::ty;
+use rustc::ty::{self, Ty, TyCtxt};
use std::cmp::Ordering;
use std::fmt;
use std::iter::{FromIterator, IntoIterator, repeat};
use syntax::ast::{self, DUMMY_NODE_ID, NodeId};
use syntax::codemap::Spanned;
use syntax_pos::{Span, DUMMY_SP};
-use rustc::hir::fold::{Folder, noop_fold_pat};
use rustc::hir::print::pat_to_string;
use syntax::ptr::P;
+use syntax::util::move_map::MoveMap;
use rustc::util::common::ErrorReported;
-use rustc::util::nodemap::FnvHashMap;
pub const DUMMY_WILD_PAT: &'static Pat = &Pat {
id: DUMMY_NODE_ID,
//NOTE: appears to be the only place other then InferCtxt to contain a ParamEnv
pub struct MatchCheckCtxt<'a, 'tcx: 'a> {
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
- pub param_env: ParameterEnvironment<'tcx>,
+ pub param_env: ty::ParameterEnvironment<'tcx>,
}
#[derive(Clone, Debug, PartialEq)]
}
}
- let mut static_inliner = StaticInliner::new(cx.tcx, None);
+ let mut static_inliner = StaticInliner::new(cx.tcx);
let inlined_arms = arms.iter().map(|arm| {
(arm.pats.iter().map(|pat| {
static_inliner.fold_pat((*pat).clone())
if edef.is_enum() {
if let Def::Local(..) = cx.tcx.expect_def(p.id) {
if edef.variants.iter().any(|variant| {
- variant.name == name.node && variant.kind == VariantKind::Unit
+ variant.name == name.node && variant.kind == ty::VariantKind::Unit
}) {
let ty_path = cx.tcx.item_path_str(edef.did);
let mut err = struct_span_warn!(cx.tcx.sess, p.span, E0170,
_ => bug!()
};
P(hir::Expr {
- id: 0,
+ id: DUMMY_NODE_ID,
node: hir::ExprLit(P(Spanned { node: node, span: DUMMY_SP })),
span: DUMMY_SP,
attrs: ast::ThinVec::new(),
})
}
-pub struct StaticInliner<'a, 'tcx: 'a> {
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
- pub failed: bool,
- pub renaming_map: Option<&'a mut FnvHashMap<(NodeId, Span), NodeId>>,
+struct StaticInliner<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ failed: bool
}
impl<'a, 'tcx> StaticInliner<'a, 'tcx> {
- pub fn new<'b>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
- renaming_map: Option<&'b mut FnvHashMap<(NodeId, Span), NodeId>>)
- -> StaticInliner<'b, 'tcx> {
+ pub fn new<'b>(tcx: TyCtxt<'b, 'tcx, 'tcx>) -> StaticInliner<'b, 'tcx> {
StaticInliner {
tcx: tcx,
- failed: false,
- renaming_map: renaming_map
+ failed: false
}
}
}
-struct RenamingRecorder<'map> {
- substituted_node_id: NodeId,
- origin_span: Span,
- renaming_map: &'map mut FnvHashMap<(NodeId, Span), NodeId>
-}
-
-impl<'v, 'map> Visitor<'v> for RenamingRecorder<'map> {
- fn visit_id(&mut self, node_id: NodeId) {
- let key = (node_id, self.origin_span);
- self.renaming_map.insert(key, self.substituted_node_id);
- }
-}
-
-impl<'a, 'tcx> Folder for StaticInliner<'a, 'tcx> {
+impl<'a, 'tcx> StaticInliner<'a, 'tcx> {
fn fold_pat(&mut self, pat: P<Pat>) -> P<Pat> {
- return match pat.node {
+ match pat.node {
PatKind::Path(..) => {
match self.tcx.expect_def(pat.id) {
Def::AssociatedConst(did) | Def::Const(did) => {
let substs = Some(self.tcx.node_id_item_substs(pat.id).substs);
if let Some((const_expr, _)) = lookup_const_by_id(self.tcx, did, substs) {
match const_expr_to_pat(self.tcx, const_expr, pat.id, pat.span) {
- Ok(new_pat) => {
- if let Some(ref mut map) = self.renaming_map {
- // Record any renamings we do here
- record_renamings(const_expr, &pat, map);
- }
- new_pat
- }
+ Ok(new_pat) => return new_pat,
Err(def_id) => {
self.failed = true;
self.tcx.sess.span_err(
&format!("constants of the type `{}` \
cannot be used in patterns",
self.tcx.item_path_str(def_id)));
- pat
}
}
} else {
self.failed = true;
span_err!(self.tcx.sess, pat.span, E0158,
"statics cannot be referenced in patterns");
- pat
}
}
- _ => noop_fold_pat(pat, self)
+ _ => {}
}
}
- _ => noop_fold_pat(pat, self)
- };
+ _ => {}
+ }
- fn record_renamings(const_expr: &hir::Expr,
- substituted_pat: &hir::Pat,
- renaming_map: &mut FnvHashMap<(NodeId, Span), NodeId>) {
- let mut renaming_recorder = RenamingRecorder {
- substituted_node_id: substituted_pat.id,
- origin_span: substituted_pat.span,
- renaming_map: renaming_map,
+ pat.map(|Pat { id, node, span }| {
+ let node = match node {
+ PatKind::Binding(binding_mode, pth1, sub) => {
+ PatKind::Binding(binding_mode, pth1, sub.map(|x| self.fold_pat(x)))
+ }
+ PatKind::TupleStruct(pth, pats, ddpos) => {
+ PatKind::TupleStruct(pth, pats.move_map(|x| self.fold_pat(x)), ddpos)
+ }
+ PatKind::Struct(pth, fields, etc) => {
+ let fs = fields.move_map(|f| {
+ Spanned {
+ span: f.span,
+ node: hir::FieldPat {
+ name: f.node.name,
+ pat: self.fold_pat(f.node.pat),
+ is_shorthand: f.node.is_shorthand,
+ },
+ }
+ });
+ PatKind::Struct(pth, fs, etc)
+ }
+ PatKind::Tuple(elts, ddpos) => {
+ PatKind::Tuple(elts.move_map(|x| self.fold_pat(x)), ddpos)
+ }
+ PatKind::Box(inner) => PatKind::Box(self.fold_pat(inner)),
+ PatKind::Ref(inner, mutbl) => PatKind::Ref(self.fold_pat(inner), mutbl),
+ PatKind::Vec(before, slice, after) => {
+ PatKind::Vec(before.move_map(|x| self.fold_pat(x)),
+ slice.map(|x| self.fold_pat(x)),
+ after.move_map(|x| self.fold_pat(x)))
+ }
+ PatKind::Wild |
+ PatKind::Lit(_) |
+ PatKind::Range(..) |
+ PatKind::Path(..) => node
};
-
- renaming_recorder.visit_expr(const_expr);
- }
+ Pat {
+ id: id,
+ node: node,
+ span: span
+ }
+ })
}
}
ty::TyAdt(adt, _) => {
let v = ctor.variant_for_adt(adt);
match v.kind {
- VariantKind::Struct => {
+ ty::VariantKind::Struct => {
let field_pats: hir::HirVec<_> = v.fields.iter()
.zip(pats)
.filter(|&(_, ref pat)| pat.node != PatKind::Wild)
let has_more_fields = field_pats.len() < pats_len;
PatKind::Struct(def_to_path(cx.tcx, v.did), field_pats, has_more_fields)
}
- VariantKind::Tuple => {
+ ty::VariantKind::Tuple => {
PatKind::TupleStruct(def_to_path(cx.tcx, v.did), pats.collect(), None)
}
- VariantKind::Unit => {
+ ty::VariantKind::Unit => {
PatKind::Path(None, def_to_path(cx.tcx, v.did))
}
}
};
P(hir::Pat {
- id: 0,
+ id: DUMMY_NODE_ID,
node: pat,
span: DUMMY_SP
})
impl Constructor {
fn variant_for_adt<'tcx, 'container, 'a>(&self,
adt: &'a ty::AdtDefData<'tcx, 'container>)
- -> &'a VariantDefData<'tcx, 'container> {
+ -> &'a ty::VariantDefData<'tcx, 'container> {
match self {
&Variant(vid) => adt.variant_with_id(vid),
_ => adt.struct_variant()
match pat.node {
PatKind::Struct(..) | PatKind::TupleStruct(..) | PatKind::Path(..) =>
match cx.tcx.expect_def(pat.id) {
- Def::Variant(_, id) => vec![Variant(id)],
+ Def::Variant(id) => vec![Variant(id)],
Def::Struct(..) | Def::Union(..) |
Def::TyAlias(..) | Def::AssociatedTy(..) => vec![Single],
Def::Const(..) | Def::AssociatedConst(..) =>
let pat_ty = cx.tcx.pat_ty(pat);
(pat, Some(match pat.node {
PatKind::Binding(hir::BindByRef(..), ..) => {
- pat_ty.builtin_deref(false, NoPreference).unwrap().ty
+ pat_ty.builtin_deref(false, ty::NoPreference).unwrap().ty
}
_ => pat_ty
}))
Def::Const(..) | Def::AssociatedConst(..) =>
span_bug!(pat_span, "const pattern should've \
been rewritten"),
- Def::Variant(_, id) if *constructor != Variant(id) => None,
+ Def::Variant(id) if *constructor != Variant(id) => None,
Def::Variant(..) | Def::Struct(..) => Some(Vec::new()),
def => span_bug!(pat_span, "specialize: unexpected \
definition {:?}", def),
Def::Const(..) | Def::AssociatedConst(..) =>
span_bug!(pat_span, "const pattern should've \
been rewritten"),
- Def::Variant(_, id) if *constructor != Variant(id) => None,
+ Def::Variant(id) if *constructor != Variant(id) => None,
Def::Variant(..) | Def::Struct(..) => {
match ddpos {
Some(ddpos) => {
fn check_local(cx: &mut MatchCheckCtxt, loc: &hir::Local) {
intravisit::walk_local(cx, loc);
- let pat = StaticInliner::new(cx.tcx, None).fold_pat(loc.pat.clone());
+ let pat = StaticInliner::new(cx.tcx).fold_pat(loc.pat.clone());
check_irrefutable(cx, &pat, false);
// Check legality of move bindings and `@` patterns.
fn_id: NodeId) {
match kind {
FnKind::Closure(_) => {}
- _ => cx.param_env = ParameterEnvironment::for_item(cx.tcx, fn_id),
+ _ => cx.param_env = ty::ParameterEnvironment::for_item(cx.tcx, fn_id),
}
intravisit::walk_fn(cx, kind, decl, body, sp, fn_id);
_: NodeId,
span: Span,
_: cmt,
- _: &'tcx Region,
- kind: BorrowKind,
+ _: &'tcx ty::Region,
+ kind:ty:: BorrowKind,
_: LoanCause) {
match kind {
- MutBorrow => {
+ ty::MutBorrow => {
struct_span_err!(self.cx.tcx.sess, span, E0301,
"cannot mutably borrow in a pattern guard")
.span_label(span, &format!("borrowed mutably in pattern guard"))
.emit();
}
- ImmBorrow | UniqueImmBorrow => {}
+ ty::ImmBorrow | ty::UniqueImmBorrow => {}
}
}
fn decl_without_init(&mut self, _: NodeId, _: Span) {}
}
fn lookup_variant_by_id<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- enum_def: DefId,
variant_def: DefId)
-> Option<&'tcx Expr> {
fn variant_expr<'a>(variants: &'a [hir::Variant], id: ast::NodeId)
None
}
- if let Some(enum_node_id) = tcx.map.as_local_node_id(enum_def) {
- let variant_node_id = tcx.map.as_local_node_id(variant_def).unwrap();
+ if let Some(variant_node_id) = tcx.map.as_local_node_id(variant_def) {
+ let enum_node_id = tcx.map.get_parent(variant_node_id);
match tcx.map.find(enum_node_id) {
None => None,
Some(ast_map::NodeItem(it)) => match it.node {
}
let path = match def {
Def::Struct(def_id) => def_to_path(tcx, def_id),
- Def::Variant(_, variant_did) => def_to_path(tcx, variant_did),
+ Def::Variant(variant_did) => def_to_path(tcx, variant_did),
Def::Fn(..) | Def::Method(..) => return Ok(P(hir::Pat {
id: expr.id,
node: PatKind::Lit(P(expr.clone())),
signal!(e, NonConstPath);
}
},
- Def::Variant(enum_def, variant_def) => {
- if let Some(const_expr) = lookup_variant_by_id(tcx, enum_def, variant_def) {
+ Def::Variant(variant_def) => {
+ if let Some(const_expr) = lookup_variant_by_id(tcx, variant_def) {
match eval_const_expr_partial(tcx, const_expr, ty_hint, None) {
Ok(val) => val,
Err(err) => {
Def::Struct(..) => {
ConstVal::Struct(e.id)
}
- Def::Local(_, id) => {
+ Def::Local(def_id) => {
+ let id = tcx.map.as_local_node_id(def_id).unwrap();
debug!("Def::Local({:?}): {:?}", id, fn_args);
if let Some(val) = fn_args.and_then(|args| args.get(&id)) {
val.clone()
debug!("const call({:?})", call_args);
eval_const_expr_partial(tcx, &result, ty_hint, Some(&call_args))?
},
- hir::ExprLit(ref lit) => match lit_to_const(&lit.node, tcx, ety, lit.span) {
+ hir::ExprLit(ref lit) => match lit_to_const(&lit.node, tcx, ety) {
Ok(val) => val,
Err(err) => signal!(e, err),
},
// when constructing the inference context above.
match selection {
traits::VtableImpl(ref impl_data) => {
- match tcx.associated_consts(impl_data.impl_def_id)
- .iter().find(|ic| ic.name == ti.name) {
+ let ac = tcx.impl_or_trait_items(impl_data.impl_def_id)
+ .iter().filter_map(|&def_id| {
+ match tcx.impl_or_trait_item(def_id) {
+ ty::ConstTraitItem(ic) => Some(ic),
+ _ => None
+ }
+ }).find(|ic| ic.name == ti.name);
+ match ac {
Some(ic) => lookup_const_by_id(tcx, ic.def_id, None),
None => match ti.node {
hir::ConstTraitItem(ref ty, Some(ref expr)) => {
fn lit_to_const<'a, 'tcx>(lit: &ast::LitKind,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty_hint: Option<Ty<'tcx>>,
- span: Span)
+ ty_hint: Option<Ty<'tcx>>)
-> Result<ConstVal, ErrKind> {
use syntax::ast::*;
use syntax::ast::LitIntType::*;
},
LitKind::Float(ref n, fty) => {
- Ok(Float(parse_float(n, Some(fty), span)))
+ parse_float(n, Some(fty)).map(Float)
}
LitKind::FloatUnsuffixed(ref n) => {
let fty_hint = match ty_hint.map(|t| &t.sty) {
Some(&ty::TyFloat(fty)) => Some(fty),
_ => None
};
- Ok(Float(parse_float(n, fty_hint, span)))
+ parse_float(n, fty_hint).map(Float)
}
LitKind::Bool(b) => Ok(Bool(b)),
LitKind::Char(c) => Ok(Char(c)),
}
}
-fn parse_float(num: &str, fty_hint: Option<ast::FloatTy>, span: Span) -> ConstFloat {
+fn parse_float(num: &str, fty_hint: Option<ast::FloatTy>)
+ -> Result<ConstFloat, ErrKind> {
let val = match fty_hint {
Some(ast::FloatTy::F32) => num.parse::<f32>().map(F32),
Some(ast::FloatTy::F64) => num.parse::<f64>().map(F64),
})
}
};
- val.unwrap_or_else(|_| {
+ val.map_err(|_| {
// FIXME(#31407) this is only necessary because float parsing is buggy
- span_bug!(span, "could not evaluate float literal (see issue #31407)");
+ UnimplementedConstVal("could not evaluate float literal (see issue #31407)")
})
}
pub const F_SETLKW: libc::c_int = 9;
}
+ #[cfg(target_os = "haiku")]
+ mod os {
+ use libc;
+
+ pub struct flock {
+ pub l_type: libc::c_short,
+ pub l_whence: libc::c_short,
+ pub l_start: libc::off_t,
+ pub l_len: libc::off_t,
+ pub l_pid: libc::pid_t,
+
+ // not actually here, but brings in line with freebsd
+ pub l_sysid: libc::c_int,
+ }
+
+ pub const F_UNLCK: libc::c_short = 0x0200;
+ pub const F_WRLCK: libc::c_short = 0x0400;
+ pub const F_SETLK: libc::c_int = 0x0080;
+ pub const F_SETLKW: libc::c_int = 0x0100;
+ }
+
#[cfg(any(target_os = "macos", target_os = "ios"))]
mod os {
use libc;
use rustc_borrowck as borrowck;
use rustc_incremental::{self, IncrementalHashesMap};
use rustc_resolve::{MakeGlobMap, Resolver};
-use rustc_metadata::macro_import;
-use rustc_metadata::creader::read_local_crates;
+use rustc_metadata::creader::CrateLoader;
use rustc_metadata::cstore::CStore;
use rustc_trans::back::{link, write};
use rustc_trans as trans;
use serialize::json;
use std::env;
+use std::mem;
use std::ffi::{OsString, OsStr};
use std::fs;
use std::io::{self, Write};
}
sess.track_errors(|| sess.lint_store.borrow_mut().process_command_line(sess))?;
- let mut macro_loader =
- macro_import::MacroLoader::new(sess, &cstore, crate_name, krate.config.clone());
-
+ // Currently, we ignore the name resolution data structures for the purposes of dependency
+ // tracking. Instead we will run name resolution and include its output in the hash of each
+ // item, much like we do for macro expansion. In other words, the hash reflects not just
+ // its contents but the results of name resolution on those contents. Hopefully we'll push
+ // this back at some point.
+ let _ignore = sess.dep_graph.in_ignore();
+ let mut crate_loader = CrateLoader::new(sess, &cstore, &krate, crate_name);
let resolver_arenas = Resolver::arenas();
- let mut resolver = Resolver::new(sess, make_glob_map, &mut macro_loader, &resolver_arenas);
+ let mut resolver =
+ Resolver::new(sess, &krate, make_glob_map, &mut crate_loader, &resolver_arenas);
syntax_ext::register_builtins(&mut resolver, sess.features.borrow().quote);
krate = time(time_passes, "expansion", || {
ret
});
+ krate.exported_macros = mem::replace(&mut resolver.exported_macros, Vec::new());
+
krate = time(time_passes, "maybe building test harness", || {
syntax::test::modify_for_testing(&sess.parse_sess,
&mut resolver,
})
})?;
- // Collect defintions for def ids.
- time(sess.time_passes(), "collecting defs", || resolver.definitions.collect(&krate));
-
- time(sess.time_passes(), "external crate/lib resolution", || {
- let defs = &resolver.definitions;
- read_local_crates(sess, &cstore, defs, &krate, crate_name, &sess.dep_graph)
- });
-
time(sess.time_passes(),
"early lint checks",
|| lint::check_ast_crate(sess, &krate));
|| ast_validation::check_crate(sess, &krate));
time(sess.time_passes(), "name resolution", || -> CompileResult {
- // Currently, we ignore the name resolution data structures for the purposes of dependency
- // tracking. Instead we will run name resolution and include its output in the hash of each
- // item, much like we do for macro expansion. In other words, the hash reflects not just
- // its contents but the results of name resolution on those contents. Hopefully we'll push
- // this back at some point.
- let _ignore = sess.dep_graph.in_ignore();
- resolver.build_reduced_graph(&krate);
resolver.resolve_imports();
// Since import resolution will eventually happen in expansion,
passes.push_pass(box mir::transform::no_landing_pads::NoLandingPads);
passes.push_pass(box mir::transform::simplify_cfg::SimplifyCfg::new("no-landing-pads"));
+ // From here on out, regions are gone.
passes.push_pass(box mir::transform::erase_regions::EraseRegions);
passes.push_pass(box mir::transform::add_call_guards::AddCallGuards);
passes.push_pass(box mir::transform::no_landing_pads::NoLandingPads);
passes.push_pass(box mir::transform::simplify_cfg::SimplifyCfg::new("elaborate-drops"));
+ // No lifetime analysis based on borrowing can be done from here on out.
+ passes.push_pass(box mir::transform::instcombine::InstCombine::new());
passes.push_pass(box mir::transform::deaggregator::Deaggregator);
+ passes.push_pass(box mir::transform::copy_prop::CopyPropagation);
passes.push_pass(box mir::transform::add_call_guards::AddCallGuards);
passes.push_pass(box mir::transform::dump_mir::Marker("PreTrans"));
use rustc::dep_graph::DepGraph;
use rustc::session::{self, config, Session, build_session, CompileResult};
use rustc::session::config::{Input, PrintRequest, OutputType, ErrorOutputType};
-use rustc::session::config::{get_unstable_features_setting, nightly_options};
+use rustc::session::config::nightly_options;
+use rustc::session::early_error;
use rustc::lint::Lint;
use rustc::lint;
use rustc_metadata::loader;
use std::sync::{Arc, Mutex};
use std::thread;
-use rustc::session::early_error;
-
use syntax::{ast, json};
use syntax::codemap::{CodeMap, FileLoader, RealFileLoader};
use syntax::feature_gate::{GatedCfg, UnstableFeatures};
}
}
-pub fn run(args: Vec<String>) -> isize {
+pub fn run<F>(run_compiler: F) -> isize
+ where F: FnOnce() -> (CompileResult, Option<Session>) + Send + 'static
+{
monitor(move || {
- let (result, session) = run_compiler(&args, &mut RustcDefaultCalls);
+ let (result, session) = run_compiler();
if let Err(err_count) = result {
if err_count > 0 {
match session {
Some(sess) => sess.fatal(&abort_msg(err_count)),
None => {
let emitter =
- errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto,
- None);
+ errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto, None);
let handler = errors::Handler::with_emitter(true, false, Box::new(emitter));
handler.emit(&MultiSpan::new(),
&abort_msg(err_count),
0
}
-pub fn run_compiler<'a>(args: &[String],
- callbacks: &mut CompilerCalls<'a>)
- -> (CompileResult, Option<Session>) {
- run_compiler_with_file_loader(args, callbacks, box RealFileLoader)
-}
-
// Parse args and run the compiler. This is the primary entry point for rustc.
// See comments on CompilerCalls below for details about the callbacks argument.
// The FileLoader provides a way to load files from sources other than the file system.
-pub fn run_compiler_with_file_loader<'a, L>(args: &[String],
- callbacks: &mut CompilerCalls<'a>,
- loader: Box<L>)
- -> (CompileResult, Option<Session>)
- where L: FileLoader + 'static {
+pub fn run_compiler<'a>(args: &[String],
+ callbacks: &mut CompilerCalls<'a>,
+ file_loader: Option<Box<FileLoader + 'static>>,
+ emitter_dest: Option<Box<Write + Send>>)
+ -> (CompileResult, Option<Session>)
+{
macro_rules! do_or_return {($expr: expr, $sess: expr) => {
match $expr {
Compilation::Stop => return (Ok(()), $sess),
let dep_graph = DepGraph::new(sopts.build_dep_graph());
let cstore = Rc::new(CStore::new(&dep_graph));
+
+ let loader = file_loader.unwrap_or(box RealFileLoader);
let codemap = Rc::new(CodeMap::with_file_loader(loader));
let sess = session::build_session_with_codemap(sopts,
&dep_graph,
input_file_path,
descriptions,
cstore.clone(),
- codemap);
+ codemap,
+ emitter_dest);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let mut cfg = config::build_configuration(&sess, cfg);
target_features::add_configuration(&mut cfg, &sess);
}
}
PrintRequest::Cfg => {
- let allow_unstable_cfg = match get_unstable_features_setting() {
- UnstableFeatures::Disallow => false,
- _ => true,
- };
+ let allow_unstable_cfg = UnstableFeatures::from_environment()
+ .is_nightly_build();
for cfg in cfg {
if !allow_unstable_cfg && GatedCfg::gate(&*cfg).is_some() {
all_errors.extend_from_slice(&rustc_privacy::DIAGNOSTICS);
all_errors.extend_from_slice(&rustc_trans::DIAGNOSTICS);
all_errors.extend_from_slice(&rustc_const_eval::DIAGNOSTICS);
+ all_errors.extend_from_slice(&rustc_metadata::DIAGNOSTICS);
Registry::new(&all_errors)
}
pub fn main() {
- let result = run(env::args().collect());
+ let result = run(|| run_compiler(&env::args().collect::<Vec<_>>(),
+ &mut RustcDefaultCalls,
+ None,
+ None));
process::exit(result as i32);
}
type Err = ();
fn from_str(s: &str) -> Result<UserIdentifiedItem, ()> {
Ok(s.parse()
+ .map(ast::NodeId::new)
.map(ItemViaNode)
.unwrap_or_else(|_| ItemViaPath(s.split("::").map(|s| s.to_string()).collect())))
}
pub fn create_simple_region_hierarchy(&self) {
// creates a region hierarchy where 1 is root, 10 and 11 are
// children of 1, etc
+
+ let node = ast::NodeId::from_u32;
let dscope = self.infcx
.tcx
.region_maps
- .intern_code_extent(CodeExtentData::DestructionScope(1),
+ .intern_code_extent(CodeExtentData::DestructionScope(node(1)),
region::ROOT_CODE_EXTENT);
self.create_region_hierarchy(&RH {
- id: 1,
- sub: &[RH { id: 10, sub: &[] }, RH { id: 11, sub: &[] }],
- },
- dscope);
+ id: node(1),
+ sub: &[RH { id: node(10), sub: &[] }, RH { id: node(11), sub: &[] }],
+ }, dscope);
}
#[allow(dead_code)] // this seems like it could be useful, even if we don't use it now
self.infcx.tcx.mk_imm_ref(r, self.tcx().types.isize)
}
- pub fn t_rptr_scope(&self, id: ast::NodeId) -> Ty<'tcx> {
- let r = ty::ReScope(self.tcx().region_maps.node_extent(id));
+ pub fn t_rptr_scope(&self, id: u32) -> Ty<'tcx> {
+ let r = ty::ReScope(self.tcx().region_maps.node_extent(ast::NodeId::from_u32(id)));
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r), self.tcx().types.isize)
}
}))
}
- pub fn t_rptr_free(&self, nid: ast::NodeId, id: u32) -> Ty<'tcx> {
- let r = self.re_free(nid, id);
+ pub fn t_rptr_free(&self, nid: u32, id: u32) -> Ty<'tcx> {
+ let r = self.re_free(ast::NodeId::from_u32(nid), id);
self.infcx.tcx.mk_imm_ref(r, self.tcx().types.isize)
}
pub fn new(dst: Box<Write + Send>,
code_map: Option<Rc<CodeMapper>>)
-> EmitterWriter {
- EmitterWriter { dst: Raw(dst),
- cm: code_map}
+ EmitterWriter {
+ dst: Raw(dst),
+ cm: code_map,
+ }
}
fn preprocess_annotations(&self, msp: &MultiSpan) -> Vec<FileWithAnnotatedLines> {
if spans_updated {
children.push(SubDiagnostic {
level: Level::Note,
- message: "this error originates in a macro from the standard library".to_string(),
+ message:"this error originates in a macro outside of the current \
+ crate".to_string(),
span: MultiSpan::new(),
render_span: None
});
fn span_to_string(&self, sp: Span) -> String;
fn span_to_filename(&self, sp: Span) -> FileName;
fn macro_backtrace(&self, span: Span) -> Vec<MacroBacktrace>;
+ fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span>;
}
impl CodeSuggestion {
[dependencies]
graphviz = { path = "../libgraphviz" }
-rbml = { path = "../librbml" }
rustc = { path = "../librustc" }
rustc_data_structures = { path = "../librustc_data_structures" }
serialize = { path = "../libserialize" }
log = { path = "../liblog" }
syntax = { path = "../libsyntax" }
-syntax_pos = { path = "../libsyntax_pos" }
\ No newline at end of file
+syntax_pos = { path = "../libsyntax_pos" }
use syntax::ast;
use syntax::parse::token::InternedString;
use syntax_pos::Span;
-
-const IF_THIS_CHANGED: &'static str = "rustc_if_this_changed";
-const THEN_THIS_WOULD_NEED: &'static str = "rustc_then_this_would_need";
+use {ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED};
pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let _ignore = tcx.dep_graph.in_ignore();
assert!(tcx.sess.opts.debugging_opts.query_dep_graph,
"cannot use the `#[{}]` or `#[{}]` annotations \
without supplying `-Z query-dep-graph`",
- IF_THIS_CHANGED, THEN_THIS_WOULD_NEED);
+ ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED);
}
// Check paths.
fn process_attrs(&mut self, node_id: ast::NodeId, attrs: &[ast::Attribute]) {
let def_id = self.tcx.map.local_def_id(node_id);
for attr in attrs {
- if attr.check_name(IF_THIS_CHANGED) {
+ if attr.check_name(ATTR_IF_THIS_CHANGED) {
let dep_node_interned = self.argument(attr);
let dep_node = match dep_node_interned {
None => DepNode::Hir(def_id),
}
};
self.if_this_changed.push((attr.span, def_id, dep_node));
- } else if attr.check_name(THEN_THIS_WOULD_NEED) {
+ } else if attr.check_name(ATTR_THEN_THIS_WOULD_NEED) {
let dep_node_interned = self.argument(attr);
let dep_node = match dep_node_interned {
Some(ref n) => {
//! at the beginning.
use syntax::ast;
+use std::cell::RefCell;
use std::hash::{Hash, SipHasher, Hasher};
use rustc::dep_graph::DepNode;
use rustc::hir;
mod svh_visitor;
mod caching_codemap_view;
-pub type IncrementalHashesMap = FnvHashMap<DepNode<DefId>, u64>;
+pub struct IncrementalHashesMap {
+ hashes: FnvHashMap<DepNode<DefId>, u64>,
+
+ // These are the metadata hashes for the current crate as they were stored
+ // during the last compilation session. They are only loaded if
+ // -Z query-dep-graph was specified and are needed for auto-tests using
+ // the #[rustc_metadata_dirty] and #[rustc_metadata_clean] attributes to
+ // check whether some metadata hash has changed in between two revisions.
+ pub prev_metadata_hashes: RefCell<FnvHashMap<DefId, u64>>,
+}
+
+impl IncrementalHashesMap {
+ pub fn new() -> IncrementalHashesMap {
+ IncrementalHashesMap {
+ hashes: FnvHashMap(),
+ prev_metadata_hashes: RefCell::new(FnvHashMap()),
+ }
+ }
+
+ pub fn insert(&mut self, k: DepNode<DefId>, v: u64) -> Option<u64> {
+ self.hashes.insert(k, v)
+ }
+
+ pub fn iter<'a>(&'a self) -> ::std::collections::hash_map::Iter<'a, DepNode<DefId>, u64> {
+ self.hashes.iter()
+ }
+}
+
+impl<'a> ::std::ops::Index<&'a DepNode<DefId>> for IncrementalHashesMap {
+ type Output = u64;
+
+ fn index(&self, index: &'a DepNode<DefId>) -> &u64 {
+ &self.hashes[index]
+ }
+}
+
pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> IncrementalHashesMap {
let hash_spans = tcx.sess.opts.debuginfo != NoDebugInfo;
let mut visitor = HashItemsVisitor {
tcx: tcx,
- hashes: FnvHashMap(),
+ hashes: IncrementalHashesMap::new(),
def_path_hashes: DefPathHashes::new(tcx),
codemap: CachingCodemapView::new(tcx),
hash_spans: hash_spans,
use super::def_path_hash::DefPathHashes;
use super::caching_codemap_view::CachingCodemapView;
-const IGNORED_ATTRIBUTES: &'static [&'static str] = &["cfg",
- "rustc_clean",
- "rustc_dirty"];
+const IGNORED_ATTRIBUTES: &'static [&'static str] = &[
+ "cfg",
+ ::ATTR_IF_THIS_CHANGED,
+ ::ATTR_THEN_THIS_WOULD_NEED,
+ ::ATTR_DIRTY,
+ ::ATTR_CLEAN,
+ ::ATTR_DIRTY_METADATA,
+ ::ATTR_CLEAN_METADATA
+];
pub struct StrictVersionHashVisitor<'a, 'hash: 'a, 'tcx: 'hash> {
pub tcx: TyCtxt<'hash, 'tcx, 'tcx>,
// def-id is the same, so it suffices to hash the def-id
Def::Fn(..) |
Def::Mod(..) |
- Def::ForeignMod(..) |
Def::Static(..) |
Def::Variant(..) |
Def::Enum(..) |
#![feature(core_intrinsics)]
extern crate graphviz;
-extern crate rbml;
#[macro_use] extern crate rustc;
extern crate rustc_data_structures;
extern crate serialize as rustc_serialize;
#[macro_use] extern crate syntax;
extern crate syntax_pos;
+const ATTR_DIRTY: &'static str = "rustc_dirty";
+const ATTR_CLEAN: &'static str = "rustc_clean";
+const ATTR_DIRTY_METADATA: &'static str = "rustc_metadata_dirty";
+const ATTR_CLEAN_METADATA: &'static str = "rustc_metadata_clean";
+const ATTR_IF_THIS_CHANGED: &'static str = "rustc_if_this_changed";
+const ATTR_THEN_THIS_WOULD_NEED: &'static str = "rustc_then_this_would_need";
+
mod assert_dep_graph;
mod calculate_svh;
mod persist;
use rustc::dep_graph::{DepNode, WorkProduct, WorkProductId};
use rustc::hir::def_id::DefIndex;
use std::sync::Arc;
+use rustc_data_structures::fnv::FnvHashMap;
use super::directory::DefPathIndex;
/// a `DefPathIndex` that gets retracted to the current `DefId`
/// (matching the one found in this structure).
pub hashes: Vec<SerializedMetadataHash>,
+
+ /// For each DefIndex (as it occurs in SerializedMetadataHash), this
+ /// map stores the DefPathIndex (as it occurs in DefIdDirectory), so
+ /// that we can find the new DefId for a SerializedMetadataHash in a
+ /// subsequent compilation session.
+ ///
+ /// This map is only needed for running auto-tests using the
+ /// #[rustc_metadata_dirty] and #[rustc_metadata_clean] attributes, and
+ /// is only populated if -Z query-dep-graph is specified. It will be
+ /// empty otherwise. Importing crates are perfectly happy with just having
+ /// the DefIndex.
+ pub index_map: FnvHashMap<DefIndex, DefPathIndex>
}
/// The hash for some metadata that (when saving) will be exported
use rustc::dep_graph::DepNode;
use rustc::hir::map::DefPath;
-use rustc::hir::def_id::DefId;
-use rustc::middle::cstore::LOCAL_CRATE;
+use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::ty::TyCtxt;
use rustc::util::nodemap::DefIdMap;
use std::fmt::{self, Debug};
use std::iter::once;
-use syntax::ast;
+use std::collections::HashMap;
/// Index into the DefIdDirectory
#[derive(Copy, Clone, Debug, PartialOrd, Ord, Hash, PartialEq, Eq,
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct CrateInfo {
- krate: ast::CrateNum,
+ krate: CrateNum,
name: String,
disambiguator: String,
}
DefIdDirectory { paths: vec![], krates: krates }
}
- fn max_current_crate(&self, tcx: TyCtxt) -> ast::CrateNum {
+ fn max_current_crate(&self, tcx: TyCtxt) -> CrateNum {
tcx.sess.cstore.crates()
.into_iter()
.max()
pub fn krate_still_valid(&self,
tcx: TyCtxt,
- max_current_crate: ast::CrateNum,
- krate: ast::CrateNum) -> bool {
+ max_current_crate: CrateNum,
+ krate: CrateNum) -> bool {
// Check that the crate-number still matches. For now, if it
// doesn't, just return None. We could do better, such as
// finding the new number.
if krate > max_current_crate {
false
} else {
- let old_info = &self.krates[krate as usize];
+ let old_info = &self.krates[krate.as_usize()];
assert_eq!(old_info.krate, krate);
let old_name: &str = &old_info.name;
let old_disambiguator: &str = &old_info.disambiguator;
}
pub fn retrace(&self, tcx: TyCtxt) -> RetracedDefIdDirectory {
- let max_current_crate = self.max_current_crate(tcx);
+
+ fn make_key(name: &str, disambiguator: &str) -> String {
+ format!("{}/{}", name, disambiguator)
+ }
+
+ let new_krates: HashMap<_, _> =
+ once(LOCAL_CRATE)
+ .chain(tcx.sess.cstore.crates())
+ .map(|krate| (make_key(&tcx.crate_name(krate),
+ &tcx.crate_disambiguator(krate)), krate))
+ .collect();
let ids = self.paths.iter()
.map(|path| {
- if self.krate_still_valid(tcx, max_current_crate, path.krate) {
- tcx.retrace_path(path)
+ let old_krate_id = path.krate.as_usize();
+ assert!(old_krate_id < self.krates.len());
+ let old_crate_info = &self.krates[old_krate_id];
+ let old_crate_key = make_key(&old_crate_info.name,
+ &old_crate_info.disambiguator);
+ if let Some(&new_crate_key) = new_krates.get(&old_crate_key) {
+ tcx.retrace_path(new_crate_key, &path.data)
} else {
- debug!("crate {} changed from {:?} to {:?}/{:?}",
- path.krate,
- self.krates[path.krate as usize],
- tcx.crate_name(path.krate),
- tcx.crate_disambiguator(path.krate));
+ debug!("crate {:?} no longer exists", old_crate_key);
None
}
})
&self.directory.paths[id.index as usize]
}
-
pub fn map(&mut self, node: &DepNode<DefId>) -> DepNode<DefPathIndex> {
node.map_def(|&def_id| Some(self.add(def_id))).unwrap()
}
// except according to those terms.
//! Debugging code to test the state of the dependency graph just
-//! after it is loaded from disk. For each node marked with
-//! `#[rustc_clean]` or `#[rustc_dirty]`, we will check that a
-//! suitable node for that item either appears or does not appear in
-//! the dep-graph, as appropriate:
+//! after it is loaded from disk and just after it has been saved.
+//! For each node marked with `#[rustc_clean]` or `#[rustc_dirty]`,
+//! we will check that a suitable node for that item either appears
+//! or does not appear in the dep-graph, as appropriate:
//!
//! - `#[rustc_dirty(label="TypeckItemBody", cfg="rev2")]` if we are
//! in `#[cfg(rev2)]`, then there MUST NOT be a node
//!
//! Errors are reported if we are in the suitable configuration but
//! the required condition is not met.
+//!
+//! The `#[rustc_metadata_dirty]` and `#[rustc_metadata_clean]` attributes
+//! can be used to check the incremental compilation hash (ICH) values of
+//! metadata exported in rlibs.
+//!
+//! - If a node is marked with `#[rustc_metadata_clean(cfg="rev2")]` we
+//! check that the metadata hash for that node is the same for "rev2"
+//! it was for "rev1".
+//! - If a node is marked with `#[rustc_metadata_dirty(cfg="rev2")]` we
+//! check that the metadata hash for that node is *different* for "rev2"
+//! than it was for "rev1".
+//!
+//! Note that the metadata-testing attributes must never specify the
+//! first revision. This would lead to a crash since there is no
+//! previous revision to compare things to.
+//!
use super::directory::RetracedDefIdDirectory;
use super::load::DirtyNodes;
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::hir::intravisit::Visitor;
-use rustc_data_structures::fnv::FnvHashSet;
use syntax::ast::{self, Attribute, NestedMetaItem};
+use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap};
use syntax::parse::token::InternedString;
+use syntax_pos::Span;
use rustc::ty::TyCtxt;
-const DIRTY: &'static str = "rustc_dirty";
-const CLEAN: &'static str = "rustc_clean";
+use {ATTR_DIRTY, ATTR_CLEAN, ATTR_DIRTY_METADATA, ATTR_CLEAN_METADATA};
+
const LABEL: &'static str = "label";
const CFG: &'static str = "cfg";
}
impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
- fn expect_associated_value(&self, item: &NestedMetaItem) -> InternedString {
- if let Some(value) = item.value_str() {
- value
- } else {
- let msg = if let Some(name) = item.name() {
- format!("associated value expected for `{}`", name)
- } else {
- "expected an associated value".to_string()
- };
-
- self.tcx.sess.span_fatal(item.span, &msg);
- }
- }
-
- /// Given a `#[rustc_dirty]` or `#[rustc_clean]` attribute, scan
- /// for a `cfg="foo"` attribute and check whether we have a cfg
- /// flag called `foo`.
- fn check_config(&self, attr: &ast::Attribute) -> bool {
- debug!("check_config(attr={:?})", attr);
- let config = &self.tcx.map.krate().config;
- debug!("check_config: config={:?}", config);
- for item in attr.meta_item_list().unwrap_or(&[]) {
- if item.check_name(CFG) {
- let value = self.expect_associated_value(item);
- debug!("check_config: searching for cfg {:?}", value);
- for cfg in &config[..] {
- if cfg.check_name(&value[..]) {
- debug!("check_config: matched {:?}", cfg);
- return true;
- }
- }
- return false;
- }
- }
-
- self.tcx.sess.span_fatal(
- attr.span,
- &format!("no cfg attribute"));
- }
fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode<DefId> {
for item in attr.meta_item_list().unwrap_or(&[]) {
if item.check_name(LABEL) {
- let value = self.expect_associated_value(item);
+ let value = expect_associated_value(self.tcx, item);
match DepNode::from_label_string(&value[..], def_id) {
Ok(def_id) => return def_id,
Err(()) => {
fn visit_item(&mut self, item: &'tcx hir::Item) {
let def_id = self.tcx.map.local_def_id(item.id);
for attr in self.tcx.get_attrs(def_id).iter() {
- if attr.check_name(DIRTY) {
- if self.check_config(attr) {
+ if attr.check_name(ATTR_DIRTY) {
+ if check_config(self.tcx, attr) {
self.assert_dirty(item, self.dep_node(attr, def_id));
}
- } else if attr.check_name(CLEAN) {
- if self.check_config(attr) {
+ } else if attr.check_name(ATTR_CLEAN) {
+ if check_config(self.tcx, attr) {
self.assert_clean(item, self.dep_node(attr, def_id));
}
}
}
}
+pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ prev_metadata_hashes: &FnvHashMap<DefId, u64>,
+ current_metadata_hashes: &FnvHashMap<DefId, u64>) {
+ if !tcx.sess.opts.debugging_opts.query_dep_graph {
+ return;
+ }
+
+ tcx.dep_graph.with_ignore(||{
+ let krate = tcx.map.krate();
+ krate.visit_all_items(&mut DirtyCleanMetadataVisitor {
+ tcx: tcx,
+ prev_metadata_hashes: prev_metadata_hashes,
+ current_metadata_hashes: current_metadata_hashes,
+ });
+ });
+}
+
+pub struct DirtyCleanMetadataVisitor<'a, 'tcx:'a, 'm> {
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ prev_metadata_hashes: &'m FnvHashMap<DefId, u64>,
+ current_metadata_hashes: &'m FnvHashMap<DefId, u64>,
+}
+
+impl<'a, 'tcx, 'm> Visitor<'tcx> for DirtyCleanMetadataVisitor<'a, 'tcx, 'm> {
+ fn visit_item(&mut self, item: &'tcx hir::Item) {
+ let def_id = self.tcx.map.local_def_id(item.id);
+
+ for attr in self.tcx.get_attrs(def_id).iter() {
+ if attr.check_name(ATTR_DIRTY_METADATA) {
+ if check_config(self.tcx, attr) {
+ self.assert_state(false, def_id, item.span);
+ }
+ } else if attr.check_name(ATTR_CLEAN_METADATA) {
+ if check_config(self.tcx, attr) {
+ self.assert_state(true, def_id, item.span);
+ }
+ }
+ }
+ }
+}
+
+impl<'a, 'tcx, 'm> DirtyCleanMetadataVisitor<'a, 'tcx, 'm> {
+
+ fn assert_state(&self, should_be_clean: bool, def_id: DefId, span: Span) {
+ let item_path = self.tcx.item_path_str(def_id);
+ debug!("assert_state({})", item_path);
+
+ if let Some(&prev_hash) = self.prev_metadata_hashes.get(&def_id) {
+ let hashes_are_equal = prev_hash == self.current_metadata_hashes[&def_id];
+
+ if should_be_clean && !hashes_are_equal {
+ self.tcx.sess.span_err(
+ span,
+ &format!("Metadata hash of `{}` is dirty, but should be clean",
+ item_path));
+ }
+
+ let should_be_dirty = !should_be_clean;
+ if should_be_dirty && hashes_are_equal {
+ self.tcx.sess.span_err(
+ span,
+ &format!("Metadata hash of `{}` is clean, but should be dirty",
+ item_path));
+ }
+ } else {
+ self.tcx.sess.span_err(
+ span,
+ &format!("Could not find previous metadata hash of `{}`",
+ item_path));
+ }
+ }
+}
+
+/// Given a `#[rustc_dirty]` or `#[rustc_clean]` attribute, scan
+/// for a `cfg="foo"` attribute and check whether we have a cfg
+/// flag called `foo`.
+fn check_config(tcx: TyCtxt, attr: &ast::Attribute) -> bool {
+ debug!("check_config(attr={:?})", attr);
+ let config = &tcx.map.krate().config;
+ debug!("check_config: config={:?}", config);
+ for item in attr.meta_item_list().unwrap_or(&[]) {
+ if item.check_name(CFG) {
+ let value = expect_associated_value(tcx, item);
+ debug!("check_config: searching for cfg {:?}", value);
+ for cfg in &config[..] {
+ if cfg.check_name(&value[..]) {
+ debug!("check_config: matched {:?}", cfg);
+ return true;
+ }
+ }
+ return false;
+ }
+ }
+
+ tcx.sess.span_fatal(
+ attr.span,
+ &format!("no cfg attribute"));
+}
+
+fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> InternedString {
+ if let Some(value) = item.value_str() {
+ value
+ } else {
+ let msg = if let Some(name) = item.name() {
+ format!("associated value expected for `{}`", name)
+ } else {
+ "expected an associated value".to_string()
+ };
+
+ tcx.sess.span_fatal(item.span, &msg);
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module defines a generic file format that allows to check if a given
+//! file generated by incremental compilation was generated by a compatible
+//! compiler version. This file format is used for the on-disk version of the
+//! dependency graph and the exported metadata hashes.
+//!
+//! In practice "compatible compiler version" means "exactly the same compiler
+//! version", since the header encodes the git commit hash of the compiler.
+//! Since we can always just ignore the incremental compilation cache and
+//! compiler versions don't change frequently for the typical user, being
+//! conservative here practically has no downside.
+
+use std::io::{self, Read};
+use std::path::Path;
+use std::fs::File;
+use std::env;
+
+use rustc::session::config::nightly_options;
+
+/// The first few bytes of files generated by incremental compilation
+const FILE_MAGIC: &'static [u8] = b"RSIC";
+
+/// Change this if the header format changes
+const HEADER_FORMAT_VERSION: u16 = 0;
+
+/// A version string that hopefully is always different for compiler versions
+/// with different encodings of incremental compilation artifacts. Contains
+/// the git commit hash.
+const RUSTC_VERSION: Option<&'static str> = option_env!("CFG_VERSION");
+
+pub fn write_file_header<W: io::Write>(stream: &mut W) -> io::Result<()> {
+ stream.write_all(FILE_MAGIC)?;
+ stream.write_all(&[(HEADER_FORMAT_VERSION >> 0) as u8,
+ (HEADER_FORMAT_VERSION >> 8) as u8])?;
+
+ let rustc_version = rustc_version();
+ assert_eq!(rustc_version.len(), (rustc_version.len() as u8) as usize);
+ stream.write_all(&[rustc_version.len() as u8])?;
+ stream.write_all(rustc_version.as_bytes())?;
+
+ Ok(())
+}
+
+/// Reads the contents of a file with a file header as defined in this module.
+///
+/// - Returns `Ok(Some(data))` if the file existed and was generated by a
+/// compatible compiler version. `data` is the entire contents of the file
+/// *after* the header.
+/// - Returns `Ok(None)` if the file did not exist or was generated by an
+/// incompatible version of the compiler.
+/// - Returns `Err(..)` if some kind of IO error occurred while reading the
+/// file.
+pub fn read_file(path: &Path) -> io::Result<Option<Vec<u8>>> {
+ if !path.exists() {
+ return Ok(None);
+ }
+
+ let mut file = File::open(path)?;
+
+ // Check FILE_MAGIC
+ {
+ debug_assert!(FILE_MAGIC.len() == 4);
+ let mut file_magic = [0u8; 4];
+ file.read_exact(&mut file_magic)?;
+ if file_magic != FILE_MAGIC {
+ return Ok(None)
+ }
+ }
+
+ // Check HEADER_FORMAT_VERSION
+ {
+ debug_assert!(::std::mem::size_of_val(&HEADER_FORMAT_VERSION) == 2);
+ let mut header_format_version = [0u8; 2];
+ file.read_exact(&mut header_format_version)?;
+ let header_format_version = (header_format_version[0] as u16) |
+ ((header_format_version[1] as u16) << 8);
+
+ if header_format_version != HEADER_FORMAT_VERSION {
+ return Ok(None)
+ }
+ }
+
+ // Check RUSTC_VERSION
+ {
+ let mut rustc_version_str_len = [0u8; 1];
+ file.read_exact(&mut rustc_version_str_len)?;
+ let rustc_version_str_len = rustc_version_str_len[0] as usize;
+ let mut buffer = Vec::with_capacity(rustc_version_str_len);
+ buffer.resize(rustc_version_str_len, 0);
+ file.read_exact(&mut buffer[..])?;
+
+ if &buffer[..] != rustc_version().as_bytes() {
+ return Ok(None);
+ }
+ }
+
+ let mut data = vec![];
+ file.read_to_end(&mut data)?;
+
+ Ok(Some(data))
+}
+
+fn rustc_version() -> String {
+ if nightly_options::is_nightly_build() {
+ if let Some(val) = env::var_os("RUSTC_FORCE_INCR_COMP_ARTIFACT_HEADER") {
+ return val.to_string_lossy().into_owned()
+ }
+ }
+
+ RUSTC_VERSION.expect("Cannot use rustc without explicit version for \
+ incremental compilation")
+ .to_string()
+}
//! unsupported file system and emit a warning in that case. This is not yet
//! implemented.
+use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc::hir::svh::Svh;
-use rustc::middle::cstore::LOCAL_CRATE;
use rustc::session::Session;
use rustc::ty::TyCtxt;
use rustc::util::fs as fs_util;
use std::path::{Path, PathBuf};
use std::time::{UNIX_EPOCH, SystemTime, Duration};
use std::__rand::{thread_rng, Rng};
-use syntax::ast;
const LOCK_FILE_EXT: &'static str = ".lock";
const DEP_GRAPH_FILENAME: &'static str = "dep-graph.bin";
let _ = garbage_collect_session_directories(sess);
}
+pub fn delete_all_session_dir_contents(sess: &Session) -> io::Result<()> {
+ let sess_dir_iterator = sess.incr_comp_session_dir().read_dir()?;
+ for entry in sess_dir_iterator {
+ let entry = entry?;
+ safe_remove_file(&entry.path())?
+ }
+ Ok(())
+}
+
fn copy_files(target_dir: &Path,
source_dir: &Path,
print_stats_on_success: bool)
Ok(UNIX_EPOCH + duration)
}
-fn crate_path_tcx(tcx: TyCtxt, cnum: ast::CrateNum) -> PathBuf {
+fn crate_path_tcx(tcx: TyCtxt, cnum: CrateNum) -> PathBuf {
crate_path(tcx.sess, &tcx.crate_name(cnum), &tcx.crate_disambiguator(cnum))
}
/// crate's (name, disambiguator) pair. The metadata hashes are only valid for
/// the exact version of the binary we are reading from now (i.e. the hashes
/// are part of the dependency graph of a specific compilation session).
-pub fn find_metadata_hashes_for(tcx: TyCtxt, cnum: ast::CrateNum) -> Option<PathBuf> {
+pub fn find_metadata_hashes_for(tcx: TyCtxt, cnum: CrateNum) -> Option<PathBuf> {
let crate_directory = crate_path_tcx(tcx, cnum);
if !crate_directory.exists() {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rbml::Error;
-use rbml::opaque::Decoder;
use rustc::dep_graph::DepNode;
-use rustc::hir::def_id::DefId;
+use rustc::hir::def_id::{CrateNum, DefId};
use rustc::hir::svh::Svh;
use rustc::ty::TyCtxt;
use rustc_data_structures::fnv::FnvHashMap;
use rustc_data_structures::flock;
use rustc_serialize::Decodable;
-use std::io::{ErrorKind, Read};
-use std::fs::File;
-use syntax::ast;
+use rustc_serialize::opaque::Decoder;
use IncrementalHashesMap;
use super::data::*;
use super::fs::*;
+use super::file_format;
pub struct HashContext<'a, 'tcx: 'a> {
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: &'a IncrementalHashesMap,
item_metadata_hashes: FnvHashMap<DefId, u64>,
- crate_hashes: FnvHashMap<ast::CrateNum, Svh>,
+ crate_hashes: FnvHashMap<CrateNum, Svh>,
}
impl<'a, 'tcx> HashContext<'a, 'tcx> {
}
}
- fn load_data(&mut self, cnum: ast::CrateNum) {
+ fn load_data(&mut self, cnum: CrateNum) {
debug!("load_data(cnum={})", cnum);
let svh = self.tcx.sess.cstore.crate_hash(cnum);
let hashes_file_path = metadata_hash_import_path(&session_dir);
- let mut data = vec![];
- match
- File::open(&hashes_file_path)
- .and_then(|mut file| file.read_to_end(&mut data))
+ match file_format::read_file(&hashes_file_path)
{
- Ok(_) => {
+ Ok(Some(data)) => {
match self.load_from_data(cnum, &data, svh) {
Ok(()) => { }
Err(err) => {
}
}
}
+ Ok(None) => {
+ // If the file is not found, that's ok.
+ }
Err(err) => {
- match err.kind() {
- ErrorKind::NotFound => {
- // If the file is not found, that's ok.
- }
- _ => {
- self.tcx.sess.err(
- &format!("could not load dep information from `{}`: {}",
- hashes_file_path.display(), err));
- return;
- }
- }
+ self.tcx.sess.err(
+ &format!("could not load dep information from `{}`: {}",
+ hashes_file_path.display(), err));
}
}
}
}
fn load_from_data(&mut self,
- cnum: ast::CrateNum,
+ cnum: CrateNum,
data: &[u8],
- expected_svh: Svh) -> Result<(), Error> {
+ expected_svh: Svh) -> Result<(), String> {
debug!("load_from_data(cnum={})", cnum);
// Load up the hashes for the def-ids from this crate.
//! Code to save/load the dep-graph from files.
-use rbml::Error;
-use rbml::opaque::Decoder;
use rustc::dep_graph::DepNode;
use rustc::hir::def_id::DefId;
+use rustc::hir::svh::Svh;
use rustc::session::Session;
use rustc::ty::TyCtxt;
-use rustc_data_structures::fnv::FnvHashSet;
+use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap};
use rustc_serialize::Decodable as RustcDecodable;
-use std::io::Read;
-use std::fs::{self, File};
+use rustc_serialize::opaque::Decoder;
+use std::fs;
use std::path::{Path};
use IncrementalHashesMap;
use super::dirty_clean;
use super::hash::*;
use super::fs::*;
+use super::file_format;
pub type DirtyNodes = FnvHashSet<DepNode<DefPathIndex>>;
}
fn load_data(sess: &Session, path: &Path) -> Option<Vec<u8>> {
- if !path.exists() {
- return None;
- }
-
- let mut data = vec![];
- match
- File::open(path)
- .and_then(|mut file| file.read_to_end(&mut data))
- {
- Ok(_) => {
- Some(data)
+ match file_format::read_file(path) {
+ Ok(Some(data)) => return Some(data),
+ Ok(None) => {
+ // The file either didn't exist or was produced by an incompatible
+ // compiler version. Neither is an error.
}
Err(err) => {
sess.err(
&format!("could not load dep-graph from `{}`: {}",
path.display(), err));
- None
}
}
+
+ if let Err(err) = delete_all_session_dir_contents(sess) {
+ sess.err(&format!("could not clear incompatible incremental \
+ compilation session directory `{}`: {}",
+ path.display(), err));
+ }
+
+ None
}
/// Decode the dep graph and load the edges/nodes that are still clean
incremental_hashes_map: &IncrementalHashesMap,
dep_graph_data: &[u8],
work_products_data: &[u8])
- -> Result<(), Error>
+ -> Result<(), String>
{
// Decode the list of work_products
let mut work_product_decoder = Decoder::new(work_products_data, 0);
dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_source_nodes, &retraced);
+ load_prev_metadata_hashes(tcx,
+ &retraced,
+ &mut *incremental_hashes_map.prev_metadata_hashes.borrow_mut());
Ok(())
}
if let Some(dep_node) = retraced.map(&hash.dep_node) {
let current_hash = hcx.hash(&dep_node).unwrap();
if current_hash == hash.hash {
+ debug!("initial_dirty_nodes: {:?} is clean (hash={:?})",
+ dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(),
+ current_hash);
continue;
}
debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}",
}
}
}
+
+fn load_prev_metadata_hashes(tcx: TyCtxt,
+ retraced: &RetracedDefIdDirectory,
+ output: &mut FnvHashMap<DefId, u64>) {
+ if !tcx.sess.opts.debugging_opts.query_dep_graph {
+ return
+ }
+
+ debug!("load_prev_metadata_hashes() - Loading previous metadata hashes");
+
+ let file_path = metadata_hash_export_path(tcx.sess);
+
+ if !file_path.exists() {
+ debug!("load_prev_metadata_hashes() - Couldn't find file containing \
+ hashes at `{}`", file_path.display());
+ return
+ }
+
+ debug!("load_prev_metadata_hashes() - File: {}", file_path.display());
+
+ let data = match file_format::read_file(&file_path) {
+ Ok(Some(data)) => data,
+ Ok(None) => {
+ debug!("load_prev_metadata_hashes() - File produced by incompatible \
+ compiler version: {}", file_path.display());
+ return
+ }
+ Err(err) => {
+ debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}",
+ file_path.display(), err);
+ return
+ }
+ };
+
+ debug!("load_prev_metadata_hashes() - Decoding hashes");
+ let mut decoder = Decoder::new(&data, 0);
+ let _ = Svh::decode(&mut decoder).unwrap();
+ let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap();
+
+ debug!("load_prev_metadata_hashes() - Mapping DefIds");
+
+ assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.hashes.len());
+ for serialized_hash in serialized_hashes.hashes {
+ let def_path_index = serialized_hashes.index_map[&serialized_hash.def_index];
+ if let Some(def_id) = retraced.def_id(def_path_index) {
+ let old = output.insert(def_id, serialized_hash.hash);
+ assert!(old.is_none(), "already have hash for {:?}", def_id);
+ }
+ }
+
+ debug!("load_prev_metadata_hashes() - successfully loaded {} hashes",
+ serialized_hashes.index_map.len());
+}
+
mod preds;
mod save;
mod work_product;
+mod file_format;
pub use self::fs::finalize_session_directory;
pub use self::fs::in_incr_comp_dir;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rbml::opaque::Encoder;
use rustc::dep_graph::DepNode;
use rustc::hir::def_id::DefId;
use rustc::hir::svh::Svh;
use rustc::ty::TyCtxt;
use rustc_data_structures::fnv::FnvHashMap;
use rustc_serialize::Encodable as RustcEncodable;
+use rustc_serialize::opaque::Encoder;
use std::hash::{Hash, Hasher, SipHasher};
use std::io::{self, Cursor, Write};
use std::fs::{self, File};
use super::hash::*;
use super::preds::*;
use super::fs::*;
+use super::dirty_clean;
+use super::file_format;
pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: &IncrementalHashesMap,
if sess.opts.incremental.is_none() {
return;
}
- let mut hcx = HashContext::new(tcx, incremental_hashes_map);
+
let mut builder = DefIdDirectoryBuilder::new(tcx);
let query = tcx.dep_graph.query();
+ let mut hcx = HashContext::new(tcx, incremental_hashes_map);
let preds = Predecessors::new(&query, &mut hcx);
+ let mut current_metadata_hashes = FnvHashMap();
+
+ // IMPORTANT: We are saving the metadata hashes *before* the dep-graph,
+ // since metadata-encoding might add new entries to the
+ // DefIdDirectory (which is saved in the dep-graph file).
+ save_in(sess,
+ metadata_hash_export_path(sess),
+ |e| encode_metadata_hashes(tcx,
+ svh,
+ &preds,
+ &mut builder,
+ &mut current_metadata_hashes,
+ e));
save_in(sess,
dep_graph_path(sess),
|e| encode_dep_graph(&preds, &mut builder, e));
- save_in(sess,
- metadata_hash_export_path(sess),
- |e| encode_metadata_hashes(tcx, svh, &preds, &mut builder, e));
+
+ let prev_metadata_hashes = incremental_hashes_map.prev_metadata_hashes.borrow();
+ dirty_clean::check_dirty_clean_metadata(tcx,
+ &*prev_metadata_hashes,
+ ¤t_metadata_hashes);
}
pub fn save_work_products(sess: &Session) {
fn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F)
where F: FnOnce(&mut Encoder) -> io::Result<()>
{
+ debug!("save: storing data in {}", path_buf.display());
+
// delete the old dep-graph, if any
// Note: It's important that we actually delete the old file and not just
// truncate and overwrite it, since it might be a shared hard-link, the
// underlying data of which we don't want to modify
if path_buf.exists() {
match fs::remove_file(&path_buf) {
- Ok(()) => {}
+ Ok(()) => {
+ debug!("save: remove old file");
+ }
Err(err) => {
sess.err(&format!("unable to delete old dep-graph at `{}`: {}",
path_buf.display(),
// generate the data in a memory buffer
let mut wr = Cursor::new(Vec::new());
+ file_format::write_file_header(&mut wr).unwrap();
match encode(&mut Encoder::new(&mut wr)) {
Ok(()) => {}
Err(err) => {
// write the data out
let data = wr.into_inner();
match File::create(&path_buf).and_then(|mut file| file.write_all(&data)) {
- Ok(_) => {}
+ Ok(_) => {
+ debug!("save: data written to disk successfully");
+ }
Err(err) => {
sess.err(&format!("failed to write dep-graph to `{}`: {}",
path_buf.display(),
svh: Svh,
preds: &Predecessors,
builder: &mut DefIdDirectoryBuilder,
+ current_metadata_hashes: &mut FnvHashMap<DefId, u64>,
encoder: &mut Encoder)
-> io::Result<()> {
- let mut def_id_hashes = FnvHashMap();
- let mut def_id_hash = |def_id: DefId| -> u64 {
- *def_id_hashes.entry(def_id)
- .or_insert_with(|| {
- let index = builder.add(def_id);
- let path = builder.lookup_def_path(index);
- path.deterministic_hash(tcx)
- })
- };
-
// For each `MetaData(X)` node where `X` is local, accumulate a
// hash. These are the metadata items we export. Downstream
// crates will want to see a hash that tells them whether we might
// compiled.
//
// (I initially wrote this with an iterator, but it seemed harder to read.)
- let mut serialized_hashes = SerializedMetadataHashes { hashes: vec![] };
+ let mut serialized_hashes = SerializedMetadataHashes {
+ hashes: vec![],
+ index_map: FnvHashMap()
+ };
+
+ let mut def_id_hashes = FnvHashMap();
+
for (&target, sources) in &preds.inputs {
let def_id = match *target {
DepNode::MetaData(def_id) => {
_ => continue,
};
+ let mut def_id_hash = |def_id: DefId| -> u64 {
+ *def_id_hashes.entry(def_id)
+ .or_insert_with(|| {
+ let index = builder.add(def_id);
+ let path = builder.lookup_def_path(index);
+ path.deterministic_hash(tcx)
+ })
+ };
+
// To create the hash for each item `X`, we don't hash the raw
// bytes of the metadata (though in principle we
// could). Instead, we walk the predecessors of `MetaData(X)`
});
}
+ if tcx.sess.opts.debugging_opts.query_dep_graph {
+ for serialized_hash in &serialized_hashes.hashes {
+ let def_id = DefId::local(serialized_hash.def_index);
+
+ // Store entry in the index_map
+ let def_path_index = builder.add(def_id);
+ serialized_hashes.index_map.insert(def_id.index, def_path_index);
+
+ // Record hash in current_metadata_hashes
+ current_metadata_hashes.insert(def_id, serialized_hash.hash);
+ }
+
+ debug!("save: stored index_map (len={}) for serialized hashes",
+ serialized_hashes.index_map.len());
+ }
+
// Encode everything.
svh.encode(encoder)?;
serialized_hashes.encode(encoder)?;
impl TypeLimits {
pub fn new() -> TypeLimits {
TypeLimits {
- negated_expr_id: !0,
+ negated_expr_id: ast::DUMMY_NODE_ID,
}
}
}
fn main() {
println!("cargo:rustc-cfg=cargobuild");
- let target = env::var("TARGET").unwrap();
+ let target = env::var("TARGET").expect("TARGET was not set");
let llvm_config = env::var_os("LLVM_CONFIG")
.map(PathBuf::from)
.unwrap_or_else(|| {
// can't trust all the output of llvm-config becaues it might be targeted
// for the host rather than the target. As a result a bunch of blocks below
// are gated on `if !is_crossed`
- let target = env::var("TARGET").unwrap();
- let host = env::var("HOST").unwrap();
+ let target = env::var("TARGET").expect("TARGET was not set");
+ let host = env::var("HOST").expect("HOST was not set");
let is_crossed = target != host;
let optional_components = ["x86", "arm", "aarch64", "mips", "powerpc", "pnacl", "systemz"];
__internal::with_parse_sess(|sess| {
let src = src.to_string();
let cfg = Vec::new();
- let name = "rustc-macro source code".to_string();
+ let name = "<rustc-macro source code>".to_string();
let mut parser = parse::new_parser_from_source_str(sess, cfg, name,
src);
let mut ret = TokenStream { inner: Vec::new() };
[dependencies]
flate = { path = "../libflate" }
log = { path = "../liblog" }
-rbml = { path = "../librbml" }
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
-rustc_bitflags = { path = "../librustc_bitflags" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![allow(non_camel_case_types)]
-// FIXME: remove this after snapshot, and Results are handled
-#![allow(unused_must_use)]
-
use rustc::hir::map as ast_map;
-use rustc::session::Session;
-use rustc::hir;
-use rustc::hir::fold;
-use rustc::hir::fold::Folder;
use rustc::hir::intravisit::{Visitor, IdRangeComputingVisitor, IdRange};
-use common as c;
-use cstore;
-use decoder;
-use encoder as e;
-use tydecode;
-use tyencode;
+use cstore::CrateMetadata;
+use encoder::EncodeContext;
+use schema::*;
-use middle::cstore::{InlinedItem, InlinedItemRef};
-use rustc::ty::adjustment;
-use rustc::ty::cast;
-use middle::const_qualif::ConstQualif;
+use rustc::middle::cstore::{InlinedItem, InlinedItemRef};
+use rustc::middle::const_qualif::ConstQualif;
use rustc::hir::def::{self, Def};
use rustc::hir::def_id::DefId;
-use middle::region;
-use rustc::ty::subst::Substs;
-use rustc::ty::{self, Ty, TyCtxt};
+use rustc::ty::{self, TyCtxt, Ty};
use syntax::ast;
-use syntax::ptr::P;
-use syntax_pos;
-
-use std::cell::Cell;
-use std::io::SeekFrom;
-use std::io::prelude::*;
-use std::fmt::Debug;
-
-use rbml::reader;
-use rbml::writer::Encoder;
-use rbml;
-use rustc_serialize as serialize;
-use rustc_serialize::{Decodable, Decoder, DecoderHelpers};
-use rustc_serialize::{Encodable, EncoderHelpers};
-
-#[cfg(test)] use std::io::Cursor;
-#[cfg(test)] use syntax::parse;
-#[cfg(test)] use rustc::hir::print as pprust;
-#[cfg(test)] use rustc::hir::lowering::{LoweringContext, DummyResolver};
-
-struct DecodeContext<'a, 'b, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cdata: &'b cstore::CrateMetadata,
- from_id_range: IdRange,
- to_id_range: IdRange,
- // Cache the last used filemap for translating spans as an optimization.
- last_filemap_index: Cell<usize>,
-}
-
-trait tr {
- fn tr(&self, dcx: &DecodeContext) -> Self;
-}
-
-// ______________________________________________________________________
-// Top-level methods.
-
-pub fn encode_inlined_item(ecx: &e::EncodeContext,
- rbml_w: &mut Encoder,
- ii: InlinedItemRef) {
- let id = match ii {
- InlinedItemRef::Item(_, i) => i.id,
- InlinedItemRef::TraitItem(_, ti) => ti.id,
- InlinedItemRef::ImplItem(_, ii) => ii.id,
- };
- debug!("> Encoding inlined item: {} ({:?})",
- ecx.tcx.node_path_str(id),
- rbml_w.writer.seek(SeekFrom::Current(0)));
-
- // Folding could be avoided with a smarter encoder.
- let (ii, expected_id_range) = simplify_ast(ii);
- let id_range = inlined_item_id_range(&ii);
- assert_eq!(expected_id_range, id_range);
-
- rbml_w.start_tag(c::tag_ast as usize);
- id_range.encode(rbml_w);
- encode_ast(rbml_w, &ii);
- encode_side_tables_for_ii(ecx, rbml_w, &ii);
- rbml_w.end_tag();
-
- debug!("< Encoded inlined fn: {} ({:?})",
- ecx.tcx.node_path_str(id),
- rbml_w.writer.seek(SeekFrom::Current(0)));
-}
-
-impl<'a, 'b, 'c, 'tcx> ast_map::FoldOps for &'a DecodeContext<'b, 'c, 'tcx> {
- fn new_id(&self, id: ast::NodeId) -> ast::NodeId {
- if id == ast::DUMMY_NODE_ID {
- // Used by ast_map to map the NodeInlinedParent.
- self.tcx.sess.next_node_id()
- } else {
- self.tr_id(id)
- }
- }
- fn new_def_id(&self, def_id: DefId) -> DefId {
- self.tr_def_id(def_id)
- }
- fn new_span(&self, span: syntax_pos::Span) -> syntax_pos::Span {
- self.tr_span(span)
- }
-}
-
-/// Decodes an item from its AST in the cdata's metadata and adds it to the
-/// ast-map.
-pub fn decode_inlined_item<'a, 'tcx>(cdata: &cstore::CrateMetadata,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- parent_def_path: ast_map::DefPath,
- parent_did: DefId,
- ast_doc: rbml::Doc,
- orig_did: DefId)
- -> &'tcx InlinedItem {
- debug!("> Decoding inlined fn: {:?}", tcx.item_path_str(orig_did));
- let mut ast_dsr = reader::Decoder::new(ast_doc);
- let from_id_range = Decodable::decode(&mut ast_dsr).unwrap();
- let to_id_range = reserve_id_range(&tcx.sess, from_id_range);
- let dcx = &DecodeContext {
- cdata: cdata,
- tcx: tcx,
- from_id_range: from_id_range,
- to_id_range: to_id_range,
- last_filemap_index: Cell::new(0)
- };
- let ii = ast_map::map_decoded_item(&dcx.tcx.map,
- parent_def_path,
- parent_did,
- decode_ast(ast_doc),
- dcx);
- let name = match *ii {
- InlinedItem::Item(_, ref i) => i.name,
- InlinedItem::TraitItem(_, ref ti) => ti.name,
- InlinedItem::ImplItem(_, ref ii) => ii.name
- };
- debug!("Fn named: {}", name);
- debug!("< Decoded inlined fn: {}::{}",
- tcx.item_path_str(parent_did),
- name);
- region::resolve_inlined_item(&tcx.sess, &tcx.region_maps, ii);
- decode_side_tables(dcx, ast_doc);
- copy_item_types(dcx, ii, orig_did);
- if let InlinedItem::Item(_, ref i) = *ii {
- debug!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<",
- ::rustc::hir::print::item_to_string(&i));
- }
-
- ii
-}
-
-// ______________________________________________________________________
-// Enumerating the IDs which appear in an AST
-
-fn reserve_id_range(sess: &Session,
- from_id_range: IdRange) -> IdRange {
- // Handle the case of an empty range:
- if from_id_range.empty() { return from_id_range; }
- let cnt = from_id_range.max - from_id_range.min;
- let to_id_min = sess.reserve_node_ids(cnt);
- let to_id_max = to_id_min + cnt;
- IdRange { min: to_id_min, max: to_id_max }
-}
-
-impl<'a, 'b, 'tcx> DecodeContext<'a, 'b, 'tcx> {
- /// Translates an internal id, meaning a node id that is known to refer to some part of the
- /// item currently being inlined, such as a local variable or argument. All naked node-ids
- /// that appear in types have this property, since if something might refer to an external item
- /// we would use a def-id to allow for the possibility that the item resides in another crate.
- pub fn tr_id(&self, id: ast::NodeId) -> ast::NodeId {
- // from_id_range should be non-empty
- assert!(!self.from_id_range.empty());
- // Make sure that translating the NodeId will actually yield a
- // meaningful result
- assert!(self.from_id_range.contains(id));
-
- // Use wrapping arithmetic because otherwise it introduces control flow.
- // Maybe we should just have the control flow? -- aatch
- (id.wrapping_sub(self.from_id_range.min).wrapping_add(self.to_id_range.min))
- }
-
- /// Translates an EXTERNAL def-id, converting the crate number from the one used in the encoded
- /// data to the current crate numbers.. By external, I mean that it be translated to a
- /// reference to the item in its original crate, as opposed to being translated to a reference
- /// to the inlined version of the item. This is typically, but not always, what you want,
- /// because most def-ids refer to external things like types or other fns that may or may not
- /// be inlined. Note that even when the inlined function is referencing itself recursively, we
- /// would want `tr_def_id` for that reference--- conceptually the function calls the original,
- /// non-inlined version, and trans deals with linking that recursive call to the inlined copy.
- pub fn tr_def_id(&self, did: DefId) -> DefId {
- decoder::translate_def_id(self.cdata, did)
- }
-
- /// Translates a `Span` from an extern crate to the corresponding `Span`
- /// within the local crate's codemap.
- pub fn tr_span(&self, span: syntax_pos::Span) -> syntax_pos::Span {
- decoder::translate_span(self.cdata,
- self.tcx.sess.codemap(),
- &self.last_filemap_index,
- span)
- }
-}
-
-impl tr for DefId {
- fn tr(&self, dcx: &DecodeContext) -> DefId {
- dcx.tr_def_id(*self)
- }
-}
-
-impl tr for Option<DefId> {
- fn tr(&self, dcx: &DecodeContext) -> Option<DefId> {
- self.map(|d| dcx.tr_def_id(d))
- }
-}
-
-impl tr for syntax_pos::Span {
- fn tr(&self, dcx: &DecodeContext) -> syntax_pos::Span {
- dcx.tr_span(*self)
- }
-}
-
-trait def_id_encoder_helpers {
- fn emit_def_id(&mut self, did: DefId);
-}
-
-impl<S:serialize::Encoder> def_id_encoder_helpers for S
- where <S as serialize::Encoder>::Error: Debug
-{
- fn emit_def_id(&mut self, did: DefId) {
- did.encode(self).unwrap()
- }
-}
-
-trait def_id_decoder_helpers {
- fn read_def_id(&mut self, dcx: &DecodeContext) -> DefId;
- fn read_def_id_nodcx(&mut self,
- cdata: &cstore::CrateMetadata) -> DefId;
-}
-
-impl<D:serialize::Decoder> def_id_decoder_helpers for D
- where <D as serialize::Decoder>::Error: Debug
-{
- fn read_def_id(&mut self, dcx: &DecodeContext) -> DefId {
- let did: DefId = Decodable::decode(self).unwrap();
- did.tr(dcx)
- }
-
- fn read_def_id_nodcx(&mut self,
- cdata: &cstore::CrateMetadata)
- -> DefId {
- let did: DefId = Decodable::decode(self).unwrap();
- decoder::translate_def_id(cdata, did)
- }
-}
-
-// ______________________________________________________________________
-// Encoding and decoding the AST itself
-//
-// When decoding, we have to renumber the AST so that the node ids that
-// appear within are disjoint from the node ids in our existing ASTs.
-// We also have to adjust the spans: for now we just insert a dummy span,
-// but eventually we should add entries to the local codemap as required.
-
-fn encode_ast(rbml_w: &mut Encoder, item: &InlinedItem) {
- rbml_w.start_tag(c::tag_tree as usize);
- rbml_w.emit_opaque(|this| item.encode(this));
- rbml_w.end_tag();
-}
-
-struct NestedItemsDropper {
- id_range: IdRange
-}
-
-impl Folder for NestedItemsDropper {
-
- // The unit tests below run on HIR with NodeIds not properly assigned. That
- // causes an integer overflow. So we just don't track the id_range when
- // building the unit tests.
- #[cfg(not(test))]
- fn new_id(&mut self, id: ast::NodeId) -> ast::NodeId {
- // Record the range of NodeIds we are visiting, so we can do a sanity
- // check later
- self.id_range.add(id);
- id
- }
-
- fn fold_block(&mut self, blk: P<hir::Block>) -> P<hir::Block> {
- blk.and_then(|hir::Block {id, stmts, expr, rules, span, ..}| {
- let stmts_sans_items = stmts.into_iter().filter_map(|stmt| {
- let use_stmt = match stmt.node {
- hir::StmtExpr(..) | hir::StmtSemi(..) => true,
- hir::StmtDecl(ref decl, _) => {
- match decl.node {
- hir::DeclLocal(_) => true,
- hir::DeclItem(_) => false,
- }
- }
- };
- if use_stmt {
- Some(stmt)
- } else {
- None
- }
- }).collect();
- let blk_sans_items = P(hir::Block {
- stmts: stmts_sans_items,
- expr: expr,
- id: id,
- rules: rules,
- span: span,
- });
- fold::noop_fold_block(blk_sans_items, self)
- })
- }
-}
-
-// Produces a simplified copy of the AST which does not include things
-// that we do not need to or do not want to export. For example, we
-// do not include any nested items: if these nested items are to be
-// inlined, their AST will be exported separately (this only makes
-// sense because, in Rust, nested items are independent except for
-// their visibility).
-//
-// As it happens, trans relies on the fact that we do not export
-// nested items, as otherwise it would get confused when translating
-// inlined items.
-fn simplify_ast(ii: InlinedItemRef) -> (InlinedItem, IdRange) {
- let mut fld = NestedItemsDropper {
- id_range: IdRange::max()
- };
-
- let ii = match ii {
- // HACK we're not dropping items.
- InlinedItemRef::Item(d, i) => {
- InlinedItem::Item(d, P(fold::noop_fold_item(i.clone(), &mut fld)))
- }
- InlinedItemRef::TraitItem(d, ti) => {
- InlinedItem::TraitItem(d, P(fold::noop_fold_trait_item(ti.clone(), &mut fld)))
- }
- InlinedItemRef::ImplItem(d, ii) => {
- InlinedItem::ImplItem(d, P(fold::noop_fold_impl_item(ii.clone(), &mut fld)))
- }
- };
-
- (ii, fld.id_range)
-}
-
-fn decode_ast(item_doc: rbml::Doc) -> InlinedItem {
- let chi_doc = item_doc.get(c::tag_tree as usize);
- let mut rbml_r = reader::Decoder::new(chi_doc);
- rbml_r.read_opaque(|decoder, _| Decodable::decode(decoder)).unwrap()
-}
-
-// ______________________________________________________________________
-// Encoding and decoding of ast::def
-
-fn decode_def(dcx: &DecodeContext, dsr: &mut reader::Decoder) -> Def {
- let def: Def = Decodable::decode(dsr).unwrap();
- def.tr(dcx)
-}
-
-impl tr for Def {
- fn tr(&self, dcx: &DecodeContext) -> Def {
- match *self {
- Def::Fn(did) => Def::Fn(did.tr(dcx)),
- Def::Method(did) => Def::Method(did.tr(dcx)),
- Def::SelfTy(opt_did, impl_id) => {
- // Since the impl_id will never lie within the reserved range of
- // imported NodeIds, it does not make sense to translate it.
- // The result would not make any sense within the importing crate.
- // We also don't allow for impl items to be inlined (just their
- // members), so even if we had a DefId here, we wouldn't be able
- // to do much with it.
- // So, we set the id to DUMMY_NODE_ID. That way we make it
- // explicit that this is no usable NodeId.
- Def::SelfTy(opt_did.map(|did| did.tr(dcx)),
- impl_id.map(|_| ast::DUMMY_NODE_ID))
- }
- Def::Mod(did) => { Def::Mod(did.tr(dcx)) }
- Def::ForeignMod(did) => { Def::ForeignMod(did.tr(dcx)) }
- Def::Static(did, m) => { Def::Static(did.tr(dcx), m) }
- Def::Const(did) => { Def::Const(did.tr(dcx)) }
- Def::AssociatedConst(did) => Def::AssociatedConst(did.tr(dcx)),
- Def::Local(_, nid) => {
- let nid = dcx.tr_id(nid);
- let did = dcx.tcx.map.local_def_id(nid);
- Def::Local(did, nid)
- }
- Def::Variant(e_did, v_did) => Def::Variant(e_did.tr(dcx), v_did.tr(dcx)),
- Def::Trait(did) => Def::Trait(did.tr(dcx)),
- Def::Enum(did) => Def::Enum(did.tr(dcx)),
- Def::TyAlias(did) => Def::TyAlias(did.tr(dcx)),
- Def::AssociatedTy(trait_did, did) =>
- Def::AssociatedTy(trait_did.tr(dcx), did.tr(dcx)),
- Def::PrimTy(p) => Def::PrimTy(p),
- Def::TyParam(did) => Def::TyParam(did.tr(dcx)),
- Def::Upvar(_, nid1, index, nid2) => {
- let nid1 = dcx.tr_id(nid1);
- let nid2 = dcx.tr_id(nid2);
- let did1 = dcx.tcx.map.local_def_id(nid1);
- Def::Upvar(did1, nid1, index, nid2)
- }
- Def::Struct(did) => Def::Struct(did.tr(dcx)),
- Def::Union(did) => Def::Union(did.tr(dcx)),
- Def::Label(nid) => Def::Label(dcx.tr_id(nid)),
- Def::Err => Def::Err,
- }
- }
-}
-
-// ______________________________________________________________________
-// Encoding and decoding of freevar information
-
-fn encode_freevar_entry(rbml_w: &mut Encoder, fv: &hir::Freevar) {
- (*fv).encode(rbml_w).unwrap();
-}
-
-trait rbml_decoder_helper {
- fn read_freevar_entry(&mut self, dcx: &DecodeContext)
- -> hir::Freevar;
- fn read_capture_mode(&mut self) -> hir::CaptureClause;
-}
-
-impl<'a> rbml_decoder_helper for reader::Decoder<'a> {
- fn read_freevar_entry(&mut self, dcx: &DecodeContext)
- -> hir::Freevar {
- let fv: hir::Freevar = Decodable::decode(self).unwrap();
- fv.tr(dcx)
- }
-
- fn read_capture_mode(&mut self) -> hir::CaptureClause {
- let cm: hir::CaptureClause = Decodable::decode(self).unwrap();
- cm
- }
-}
-
-impl tr for hir::Freevar {
- fn tr(&self, dcx: &DecodeContext) -> hir::Freevar {
- hir::Freevar {
- def: self.def.tr(dcx),
- span: self.span.tr(dcx),
- }
- }
-}
-
-// ______________________________________________________________________
-// Encoding and decoding of MethodCallee
-
-trait read_method_callee_helper<'tcx> {
- fn read_method_callee<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
- -> (u32, ty::MethodCallee<'tcx>);
-}
-
-fn encode_method_callee<'a, 'tcx>(ecx: &e::EncodeContext<'a, 'tcx>,
- rbml_w: &mut Encoder,
- autoderef: u32,
- method: &ty::MethodCallee<'tcx>) {
- use rustc_serialize::Encoder;
-
- rbml_w.emit_struct("MethodCallee", 4, |rbml_w| {
- rbml_w.emit_struct_field("autoderef", 0, |rbml_w| {
- autoderef.encode(rbml_w)
- });
- rbml_w.emit_struct_field("def_id", 1, |rbml_w| {
- Ok(rbml_w.emit_def_id(method.def_id))
- });
- rbml_w.emit_struct_field("ty", 2, |rbml_w| {
- Ok(rbml_w.emit_ty(ecx, method.ty))
- });
- rbml_w.emit_struct_field("substs", 3, |rbml_w| {
- Ok(rbml_w.emit_substs(ecx, &method.substs))
- })
- }).unwrap();
-}
-
-impl<'a, 'tcx> read_method_callee_helper<'tcx> for reader::Decoder<'a> {
- fn read_method_callee<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
- -> (u32, ty::MethodCallee<'tcx>) {
-
- self.read_struct("MethodCallee", 4, |this| {
- let autoderef = this.read_struct_field("autoderef", 0,
- Decodable::decode).unwrap();
- Ok((autoderef, ty::MethodCallee {
- def_id: this.read_struct_field("def_id", 1, |this| {
- Ok(this.read_def_id(dcx))
- }).unwrap(),
- ty: this.read_struct_field("ty", 2, |this| {
- Ok(this.read_ty(dcx))
- }).unwrap(),
- substs: this.read_struct_field("substs", 3, |this| {
- Ok(this.read_substs(dcx))
- }).unwrap()
- }))
- }).unwrap()
- }
-}
-
-pub fn encode_cast_kind(ebml_w: &mut Encoder, kind: cast::CastKind) {
- kind.encode(ebml_w).unwrap();
-}
-
-// ______________________________________________________________________
-// Encoding and decoding the side tables
-
-trait rbml_writer_helpers<'tcx> {
- fn emit_region(&mut self, ecx: &e::EncodeContext, r: &'tcx ty::Region);
- fn emit_ty<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, ty: Ty<'tcx>);
- fn emit_substs<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
- substs: &Substs<'tcx>);
- fn emit_upvar_capture(&mut self, ecx: &e::EncodeContext, capture: &ty::UpvarCapture);
- fn emit_auto_adjustment<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
- adj: &adjustment::AutoAdjustment<'tcx>);
- fn emit_autoref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
- autoref: &adjustment::AutoRef<'tcx>);
- fn emit_auto_deref_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
- auto_deref_ref: &adjustment::AutoDerefRef<'tcx>);
-}
-
-impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> {
- fn emit_region(&mut self, ecx: &e::EncodeContext, r: &'tcx ty::Region) {
- self.emit_opaque(|this| Ok(tyencode::enc_region(&mut this.cursor,
- &ecx.ty_str_ctxt(),
- r)));
- }
-
- fn emit_ty<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, ty: Ty<'tcx>) {
- self.emit_opaque(|this| Ok(tyencode::enc_ty(&mut this.cursor,
- &ecx.ty_str_ctxt(),
- ty)));
- }
-
- fn emit_upvar_capture(&mut self, ecx: &e::EncodeContext, capture: &ty::UpvarCapture) {
- use rustc_serialize::Encoder;
-
- self.emit_enum("UpvarCapture", |this| {
- match *capture {
- ty::UpvarCapture::ByValue => {
- this.emit_enum_variant("ByValue", 1, 0, |_| Ok(()))
- }
- ty::UpvarCapture::ByRef(ty::UpvarBorrow { kind, region }) => {
- this.emit_enum_variant("ByRef", 2, 0, |this| {
- this.emit_enum_variant_arg(0,
- |this| kind.encode(this));
- this.emit_enum_variant_arg(1,
- |this| Ok(this.emit_region(ecx, region)))
- })
- }
- }
- }).unwrap()
- }
-
- fn emit_substs<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
- substs: &Substs<'tcx>) {
- self.emit_opaque(|this| Ok(tyencode::enc_substs(&mut this.cursor,
- &ecx.ty_str_ctxt(),
- substs)));
- }
-
- fn emit_auto_adjustment<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
- adj: &adjustment::AutoAdjustment<'tcx>) {
- use rustc_serialize::Encoder;
-
- self.emit_enum("AutoAdjustment", |this| {
- match *adj {
- adjustment::AdjustReifyFnPointer => {
- this.emit_enum_variant("AdjustReifyFnPointer", 1, 0, |_| Ok(()))
- }
-
- adjustment::AdjustUnsafeFnPointer => {
- this.emit_enum_variant("AdjustUnsafeFnPointer", 2, 0, |_| {
- Ok(())
- })
- }
-
- adjustment::AdjustMutToConstPointer => {
- this.emit_enum_variant("AdjustMutToConstPointer", 3, 0, |_| {
- Ok(())
- })
- }
-
- adjustment::AdjustDerefRef(ref auto_deref_ref) => {
- this.emit_enum_variant("AdjustDerefRef", 4, 2, |this| {
- this.emit_enum_variant_arg(0,
- |this| Ok(this.emit_auto_deref_ref(ecx, auto_deref_ref)))
- })
- }
- adjustment::AdjustNeverToAny(ref ty) => {
- this.emit_enum_variant("AdjustNeverToAny", 5, 1, |this| {
- this.emit_enum_variant_arg(0, |this| Ok(this.emit_ty(ecx, ty)))
- })
- }
- }
- });
- }
-
- fn emit_autoref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
- autoref: &adjustment::AutoRef<'tcx>) {
- use rustc_serialize::Encoder;
-
- self.emit_enum("AutoRef", |this| {
- match autoref {
- &adjustment::AutoPtr(r, m) => {
- this.emit_enum_variant("AutoPtr", 0, 2, |this| {
- this.emit_enum_variant_arg(0,
- |this| Ok(this.emit_region(ecx, r)));
- this.emit_enum_variant_arg(1, |this| m.encode(this))
- })
- }
- &adjustment::AutoUnsafe(m) => {
- this.emit_enum_variant("AutoUnsafe", 1, 1, |this| {
- this.emit_enum_variant_arg(0, |this| m.encode(this))
- })
- }
- }
- });
- }
-
- fn emit_auto_deref_ref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
- auto_deref_ref: &adjustment::AutoDerefRef<'tcx>) {
- use rustc_serialize::Encoder;
-
- self.emit_struct("AutoDerefRef", 2, |this| {
- this.emit_struct_field("autoderefs", 0, |this| auto_deref_ref.autoderefs.encode(this));
+use rustc_serialize::Encodable;
- this.emit_struct_field("autoref", 1, |this| {
- this.emit_option(|this| {
- match auto_deref_ref.autoref {
- None => this.emit_option_none(),
- Some(ref a) => this.emit_option_some(|this| Ok(this.emit_autoref(ecx, a))),
- }
- })
- });
-
- this.emit_struct_field("unsize", 2, |this| {
- this.emit_option(|this| {
- match auto_deref_ref.unsize {
- None => this.emit_option_none(),
- Some(target) => this.emit_option_some(|this| {
- Ok(this.emit_ty(ecx, target))
- })
- }
- })
- })
- });
- }
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct Ast<'tcx> {
+ id_range: IdRange,
+ item: Lazy<InlinedItem>,
+ side_tables: LazySeq<(ast::NodeId, TableEntry<'tcx>)>
}
-trait write_tag_and_id {
- fn tag<F>(&mut self, tag_id: c::astencode_tag, f: F) where F: FnOnce(&mut Self);
- fn id(&mut self, id: ast::NodeId);
+#[derive(RustcEncodable, RustcDecodable)]
+enum TableEntry<'tcx> {
+ Def(Def),
+ NodeType(Ty<'tcx>),
+ ItemSubsts(ty::ItemSubsts<'tcx>),
+ Adjustment(ty::adjustment::AutoAdjustment<'tcx>),
+ ConstQualif(ConstQualif)
}
-impl<'a> write_tag_and_id for Encoder<'a> {
- fn tag<F>(&mut self,
- tag_id: c::astencode_tag,
- f: F) where
- F: FnOnce(&mut Encoder<'a>),
- {
- self.start_tag(tag_id as usize);
- f(self);
- self.end_tag();
- }
-
- fn id(&mut self, id: ast::NodeId) {
- id.encode(self).unwrap();
- }
-}
-
-struct SideTableEncodingIdVisitor<'a, 'b:'a, 'c:'a, 'tcx:'c> {
- ecx: &'a e::EncodeContext<'c, 'tcx>,
- rbml_w: &'a mut Encoder<'b>,
-}
-
-impl<'a, 'b, 'c, 'tcx, 'v> Visitor<'v> for
- SideTableEncodingIdVisitor<'a, 'b, 'c, 'tcx> {
- fn visit_id(&mut self, id: ast::NodeId) {
- encode_side_tables_for_id(self.ecx, self.rbml_w, id)
- }
-}
-
-fn encode_side_tables_for_ii(ecx: &e::EncodeContext,
- rbml_w: &mut Encoder,
- ii: &InlinedItem) {
- rbml_w.start_tag(c::tag_table as usize);
- ii.visit(&mut SideTableEncodingIdVisitor {
- ecx: ecx,
- rbml_w: rbml_w
- });
- rbml_w.end_tag();
-}
-
-fn encode_side_tables_for_id(ecx: &e::EncodeContext,
- rbml_w: &mut Encoder,
- id: ast::NodeId) {
- let tcx = ecx.tcx;
-
- debug!("Encoding side tables for id {}", id);
-
- if let Some(def) = tcx.expect_def_or_none(id) {
- rbml_w.tag(c::tag_table_def, |rbml_w| {
- rbml_w.id(id);
- def.encode(rbml_w).unwrap();
- })
- }
-
- if let Some(ty) = tcx.node_types().get(&id) {
- rbml_w.tag(c::tag_table_node_type, |rbml_w| {
- rbml_w.id(id);
- rbml_w.emit_ty(ecx, *ty);
- })
- }
-
- if let Some(item_substs) = tcx.tables.borrow().item_substs.get(&id) {
- rbml_w.tag(c::tag_table_item_subst, |rbml_w| {
- rbml_w.id(id);
- rbml_w.emit_substs(ecx, &item_substs.substs);
- })
- }
-
- if let Some(fv) = tcx.freevars.borrow().get(&id) {
- rbml_w.tag(c::tag_table_freevars, |rbml_w| {
- rbml_w.id(id);
- rbml_w.emit_from_vec(fv, |rbml_w, fv_entry| {
- Ok(encode_freevar_entry(rbml_w, fv_entry))
- });
- });
-
- for freevar in fv {
- rbml_w.tag(c::tag_table_upvar_capture_map, |rbml_w| {
- rbml_w.id(id);
-
- let var_id = freevar.def.var_id();
- let upvar_id = ty::UpvarId {
- var_id: var_id,
- closure_expr_id: id
- };
- let upvar_capture = tcx.tables
- .borrow()
- .upvar_capture_map
- .get(&upvar_id)
- .unwrap()
- .clone();
- var_id.encode(rbml_w);
- rbml_w.emit_upvar_capture(ecx, &upvar_capture);
- })
+impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
+ pub fn encode_inlined_item(&mut self, ii: InlinedItemRef) -> Lazy<Ast<'tcx>> {
+ let mut id_visitor = IdRangeComputingVisitor::new();
+ match ii {
+ InlinedItemRef::Item(_, i) => id_visitor.visit_item(i),
+ InlinedItemRef::TraitItem(_, ti) => id_visitor.visit_trait_item(ti),
+ InlinedItemRef::ImplItem(_, ii) => id_visitor.visit_impl_item(ii)
}
- }
- let method_call = ty::MethodCall::expr(id);
- if let Some(method) = tcx.tables.borrow().method_map.get(&method_call) {
- rbml_w.tag(c::tag_table_method_map, |rbml_w| {
- rbml_w.id(id);
- encode_method_callee(ecx, rbml_w, method_call.autoderef, method)
- })
- }
-
- if let Some(adjustment) = tcx.tables.borrow().adjustments.get(&id) {
- match *adjustment {
- adjustment::AdjustDerefRef(ref adj) => {
- for autoderef in 0..adj.autoderefs {
- let method_call = ty::MethodCall::autoderef(id, autoderef as u32);
- if let Some(method) = tcx.tables.borrow().method_map.get(&method_call) {
- rbml_w.tag(c::tag_table_method_map, |rbml_w| {
- rbml_w.id(id);
- encode_method_callee(ecx, rbml_w,
- method_call.autoderef, method)
- })
- }
- }
+ let ii_pos = self.position();
+ ii.encode(self).unwrap();
+
+ let tables_pos = self.position();
+ let tables_count = {
+ let mut visitor = SideTableEncodingIdVisitor {
+ ecx: self,
+ count: 0
+ };
+ match ii {
+ InlinedItemRef::Item(_, i) => visitor.visit_item(i),
+ InlinedItemRef::TraitItem(_, ti) => visitor.visit_trait_item(ti),
+ InlinedItemRef::ImplItem(_, ii) => visitor.visit_impl_item(ii)
}
- _ => {}
- }
-
- rbml_w.tag(c::tag_table_adjustments, |rbml_w| {
- rbml_w.id(id);
- rbml_w.emit_auto_adjustment(ecx, adjustment);
- })
- }
-
- if let Some(cast_kind) = tcx.cast_kinds.borrow().get(&id) {
- rbml_w.tag(c::tag_table_cast_kinds, |rbml_w| {
- rbml_w.id(id);
- encode_cast_kind(rbml_w, *cast_kind)
- })
- }
+ visitor.count
+ };
- if let Some(qualif) = tcx.const_qualif_map.borrow().get(&id) {
- rbml_w.tag(c::tag_table_const_qualif, |rbml_w| {
- rbml_w.id(id);
- qualif.encode(rbml_w).unwrap()
+ self.lazy(&Ast {
+ id_range: id_visitor.result(),
+ item: Lazy::with_position(ii_pos),
+ side_tables: LazySeq::with_position_and_length(tables_pos, tables_count)
})
}
}
-trait doc_decoder_helpers: Sized {
- fn as_int(&self) -> isize;
- fn opt_child(&self, tag: c::astencode_tag) -> Option<Self>;
+struct SideTableEncodingIdVisitor<'a, 'b:'a, 'tcx:'b> {
+ ecx: &'a mut EncodeContext<'b, 'tcx>,
+ count: usize
}
-impl<'a> doc_decoder_helpers for rbml::Doc<'a> {
- fn as_int(&self) -> isize { reader::doc_as_u64(*self) as isize }
- fn opt_child(&self, tag: c::astencode_tag) -> Option<rbml::Doc<'a>> {
- reader::maybe_get_doc(*self, tag as usize)
- }
-}
-
-trait rbml_decoder_decoder_helpers<'tcx> {
- fn read_ty_encoded<'a, 'b, F, R>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>,
- f: F) -> R
- where F: for<'x> FnOnce(&mut tydecode::TyDecoder<'x, 'tcx>) -> R;
-
- fn read_region<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> &'tcx ty::Region;
- fn read_ty<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Ty<'tcx>;
- fn read_tys<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Vec<Ty<'tcx>>;
- fn read_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
- -> ty::TraitRef<'tcx>;
- fn read_poly_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
- -> ty::PolyTraitRef<'tcx>;
- fn read_predicate<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
- -> ty::Predicate<'tcx>;
- fn read_substs<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
- -> &'tcx Substs<'tcx>;
- fn read_upvar_capture<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
- -> ty::UpvarCapture<'tcx>;
- fn read_auto_adjustment<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
- -> adjustment::AutoAdjustment<'tcx>;
- fn read_cast_kind<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
- -> cast::CastKind;
- fn read_auto_deref_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
- -> adjustment::AutoDerefRef<'tcx>;
- fn read_autoref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
- -> adjustment::AutoRef<'tcx>;
-
- // Versions of the type reading functions that don't need the full
- // DecodeContext.
- fn read_ty_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cdata: &cstore::CrateMetadata) -> Ty<'tcx>;
- fn read_tys_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cdata: &cstore::CrateMetadata) -> Vec<Ty<'tcx>>;
- fn read_substs_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cdata: &cstore::CrateMetadata)
- -> &'tcx Substs<'tcx>;
-}
-
-impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> {
- fn read_ty_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>,
- cdata: &cstore::CrateMetadata)
- -> Ty<'tcx> {
- self.read_opaque(|_, doc| {
- Ok(
- tydecode::TyDecoder::with_doc(tcx, cdata.cnum, doc,
- &mut |id| decoder::translate_def_id(cdata, id))
- .parse_ty())
- }).unwrap()
- }
-
- fn read_tys_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>,
- cdata: &cstore::CrateMetadata) -> Vec<Ty<'tcx>> {
- self.read_to_vec(|this| Ok(this.read_ty_nodcx(tcx, cdata)) )
- .unwrap()
- .into_iter()
- .collect()
- }
-
- fn read_substs_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>,
- cdata: &cstore::CrateMetadata)
- -> &'tcx Substs<'tcx>
- {
- self.read_opaque(|_, doc| {
- Ok(
- tydecode::TyDecoder::with_doc(tcx, cdata.cnum, doc,
- &mut |id| decoder::translate_def_id(cdata, id))
- .parse_substs())
- }).unwrap()
- }
-
- fn read_ty_encoded<'b, 'c, F, R>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>, op: F) -> R
- where F: for<'x> FnOnce(&mut tydecode::TyDecoder<'x,'tcx>) -> R
- {
- return self.read_opaque(|_, doc| {
- debug!("read_ty_encoded({})", type_string(doc));
- Ok(op(
- &mut tydecode::TyDecoder::with_doc(
- dcx.tcx, dcx.cdata.cnum, doc,
- &mut |d| convert_def_id(dcx, d))))
- }).unwrap();
+impl<'a, 'b, 'tcx, 'v> Visitor<'v> for SideTableEncodingIdVisitor<'a, 'b, 'tcx> {
+ fn visit_id(&mut self, id: ast::NodeId) {
+ debug!("Encoding side tables for id {}", id);
- fn type_string(doc: rbml::Doc) -> String {
- let mut str = String::new();
- for i in doc.start..doc.end {
- str.push(doc.data[i] as char);
+ let tcx = self.ecx.tcx;
+ let mut encode = |entry: Option<TableEntry>| {
+ if let Some(entry) = entry {
+ (id, entry).encode(self.ecx).unwrap();
+ self.count += 1;
}
- str
- }
- }
- fn read_region<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) -> &'tcx ty::Region {
- // Note: regions types embed local node ids. In principle, we
- // should translate these node ids into the new decode
- // context. However, we do not bother, because region types
- // are not used during trans. This also applies to read_ty.
- return self.read_ty_encoded(dcx, |decoder| decoder.parse_region());
- }
- fn read_ty<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) -> Ty<'tcx> {
- return self.read_ty_encoded(dcx, |decoder| decoder.parse_ty());
- }
+ };
- fn read_tys<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
- -> Vec<Ty<'tcx>> {
- self.read_to_vec(|this| Ok(this.read_ty(dcx))).unwrap().into_iter().collect()
- }
-
- fn read_trait_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
- -> ty::TraitRef<'tcx> {
- self.read_ty_encoded(dcx, |decoder| decoder.parse_trait_ref())
- }
-
- fn read_poly_trait_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
- -> ty::PolyTraitRef<'tcx> {
- ty::Binder(self.read_ty_encoded(dcx, |decoder| decoder.parse_trait_ref()))
- }
-
- fn read_predicate<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
- -> ty::Predicate<'tcx>
- {
- self.read_ty_encoded(dcx, |decoder| decoder.parse_predicate())
- }
-
- fn read_substs<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
- -> &'tcx Substs<'tcx> {
- self.read_opaque(|_, doc| {
- Ok(tydecode::TyDecoder::with_doc(dcx.tcx, dcx.cdata.cnum, doc,
- &mut |d| convert_def_id(dcx, d))
- .parse_substs())
- }).unwrap()
- }
- fn read_upvar_capture<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
- -> ty::UpvarCapture<'tcx> {
- self.read_enum("UpvarCapture", |this| {
- let variants = ["ByValue", "ByRef"];
- this.read_enum_variant(&variants, |this, i| {
- Ok(match i {
- 1 => ty::UpvarCapture::ByValue,
- 2 => ty::UpvarCapture::ByRef(ty::UpvarBorrow {
- kind: this.read_enum_variant_arg(0,
- |this| Decodable::decode(this)).unwrap(),
- region: this.read_enum_variant_arg(1,
- |this| Ok(this.read_region(dcx))).unwrap()
- }),
- _ => bug!("bad enum variant for ty::UpvarCapture")
- })
- })
- }).unwrap()
- }
- fn read_auto_adjustment<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
- -> adjustment::AutoAdjustment<'tcx> {
- self.read_enum("AutoAdjustment", |this| {
- let variants = ["AdjustReifyFnPointer", "AdjustUnsafeFnPointer",
- "AdjustMutToConstPointer", "AdjustDerefRef",
- "AdjustNeverToAny"];
- this.read_enum_variant(&variants, |this, i| {
- Ok(match i {
- 1 => adjustment::AdjustReifyFnPointer,
- 2 => adjustment::AdjustUnsafeFnPointer,
- 3 => adjustment::AdjustMutToConstPointer,
- 4 => {
- let auto_deref_ref: adjustment::AutoDerefRef =
- this.read_enum_variant_arg(0,
- |this| Ok(this.read_auto_deref_ref(dcx))).unwrap();
-
- adjustment::AdjustDerefRef(auto_deref_ref)
- }
- 5 => {
- let ty: Ty<'tcx> = this.read_enum_variant_arg(0, |this| {
- Ok(this.read_ty(dcx))
- }).unwrap();
-
- adjustment::AdjustNeverToAny(ty)
- }
- _ => bug!("bad enum variant for adjustment::AutoAdjustment")
- })
- })
- }).unwrap()
- }
-
- fn read_auto_deref_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
- -> adjustment::AutoDerefRef<'tcx> {
- self.read_struct("AutoDerefRef", 2, |this| {
- Ok(adjustment::AutoDerefRef {
- autoderefs: this.read_struct_field("autoderefs", 0, |this| {
- Decodable::decode(this)
- }).unwrap(),
- autoref: this.read_struct_field("autoref", 1, |this| {
- this.read_option(|this, b| {
- if b {
- Ok(Some(this.read_autoref(dcx)))
- } else {
- Ok(None)
- }
- })
- }).unwrap(),
- unsize: this.read_struct_field("unsize", 2, |this| {
- this.read_option(|this, b| {
- if b {
- Ok(Some(this.read_ty(dcx)))
- } else {
- Ok(None)
- }
- })
- }).unwrap(),
- })
- }).unwrap()
- }
-
- fn read_autoref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
- -> adjustment::AutoRef<'tcx> {
- self.read_enum("AutoRef", |this| {
- let variants = ["AutoPtr", "AutoUnsafe"];
- this.read_enum_variant(&variants, |this, i| {
- Ok(match i {
- 0 => {
- let r: &'tcx ty::Region =
- this.read_enum_variant_arg(0, |this| {
- Ok(this.read_region(dcx))
- }).unwrap();
- let m: hir::Mutability =
- this.read_enum_variant_arg(1, |this| {
- Decodable::decode(this)
- }).unwrap();
-
- adjustment::AutoPtr(r, m)
- }
- 1 => {
- let m: hir::Mutability =
- this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap();
-
- adjustment::AutoUnsafe(m)
- }
- _ => bug!("bad enum variant for adjustment::AutoRef")
- })
- })
- }).unwrap()
- }
-
- fn read_cast_kind<'b, 'c>(&mut self, _dcx: &DecodeContext<'b, 'c, 'tcx>)
- -> cast::CastKind
- {
- Decodable::decode(self).unwrap()
+ encode(tcx.expect_def_or_none(id).map(TableEntry::Def));
+ encode(tcx.node_types().get(&id).cloned().map(TableEntry::NodeType));
+ encode(tcx.tables.borrow().item_substs.get(&id).cloned().map(TableEntry::ItemSubsts));
+ encode(tcx.tables.borrow().adjustments.get(&id).cloned().map(TableEntry::Adjustment));
+ encode(tcx.const_qualif_map.borrow().get(&id).cloned().map(TableEntry::ConstQualif));
}
}
-// Converts a def-id that appears in a type. The correct
-// translation will depend on what kind of def-id this is.
-// This is a subtle point: type definitions are not
-// inlined into the current crate, so if the def-id names
-// a nominal type or type alias, then it should be
-// translated to refer to the source crate.
-//
-// However, *type parameters* are cloned along with the function
-// they are attached to. So we should translate those def-ids
-// to refer to the new, cloned copy of the type parameter.
-// We only see references to free type parameters in the body of
-// an inlined function. In such cases, we need the def-id to
-// be a local id so that the TypeContents code is able to lookup
-// the relevant info in the ty_param_defs table.
-//
-// *Region parameters*, unfortunately, are another kettle of fish.
-// In such cases, def_id's can appear in types to distinguish
-// shadowed bound regions and so forth. It doesn't actually
-// matter so much what we do to these, since regions are erased
-// at trans time, but it's good to keep them consistent just in
-// case. We translate them with `tr_def_id()` which will map
-// the crate numbers back to the original source crate.
-//
-// Scopes will end up as being totally bogus. This can actually
-// be fixed though.
-//
-// Unboxed closures are cloned along with the function being
-// inlined, and all side tables use interned node IDs, so we
-// translate their def IDs accordingly.
-//
-// It'd be really nice to refactor the type repr to not include
-// def-ids so that all these distinctions were unnecessary.
-fn convert_def_id(dcx: &DecodeContext,
- did: DefId)
- -> DefId {
- let r = dcx.tr_def_id(did);
- debug!("convert_def_id(did={:?})={:?}", did, r);
- return r;
-}
-
-fn decode_side_tables(dcx: &DecodeContext,
- ast_doc: rbml::Doc) {
- let tbl_doc = ast_doc.get(c::tag_table as usize);
- for (tag, entry_doc) in reader::docs(tbl_doc) {
- let mut entry_dsr = reader::Decoder::new(entry_doc);
- let id0: ast::NodeId = Decodable::decode(&mut entry_dsr).unwrap();
- let id = dcx.tr_id(id0);
-
- debug!(">> Side table document with tag 0x{:x} \
- found for id {} (orig {})",
- tag, id, id0);
- let tag = tag as u32;
- let decoded_tag: Option<c::astencode_tag> = c::astencode_tag::from_u32(tag);
- match decoded_tag {
- None => {
- bug!("unknown tag found in side tables: {:x}", tag);
- }
- Some(value) => {
- let val_dsr = &mut entry_dsr;
-
- match value {
- c::tag_table_def => {
- let def = decode_def(dcx, val_dsr);
- dcx.tcx.def_map.borrow_mut().insert(id, def::PathResolution::new(def));
- }
- c::tag_table_node_type => {
- let ty = val_dsr.read_ty(dcx);
- debug!("inserting ty for node {}: {:?}",
- id, ty);
- dcx.tcx.node_type_insert(id, ty);
- }
- c::tag_table_item_subst => {
- let item_substs = ty::ItemSubsts {
- substs: val_dsr.read_substs(dcx)
- };
- dcx.tcx.tables.borrow_mut().item_substs.insert(
- id, item_substs);
- }
- c::tag_table_freevars => {
- let fv_info = val_dsr.read_to_vec(|val_dsr| {
- Ok(val_dsr.read_freevar_entry(dcx))
- }).unwrap().into_iter().collect();
- dcx.tcx.freevars.borrow_mut().insert(id, fv_info);
- }
- c::tag_table_upvar_capture_map => {
- let var_id: ast::NodeId = Decodable::decode(val_dsr).unwrap();
- let upvar_id = ty::UpvarId {
- var_id: dcx.tr_id(var_id),
- closure_expr_id: id
- };
- let ub = val_dsr.read_upvar_capture(dcx);
- dcx.tcx.tables.borrow_mut().upvar_capture_map.insert(upvar_id, ub);
- }
- c::tag_table_method_map => {
- let (autoderef, method) = val_dsr.read_method_callee(dcx);
- let method_call = ty::MethodCall {
- expr_id: id,
- autoderef: autoderef
- };
- dcx.tcx.tables.borrow_mut().method_map.insert(method_call, method);
- }
- c::tag_table_adjustments => {
- let adj =
- val_dsr.read_auto_adjustment(dcx);
- dcx.tcx.tables.borrow_mut().adjustments.insert(id, adj);
- }
- c::tag_table_cast_kinds => {
- let cast_kind =
- val_dsr.read_cast_kind(dcx);
- dcx.tcx.cast_kinds.borrow_mut().insert(id, cast_kind);
- }
- c::tag_table_const_qualif => {
- let qualif: ConstQualif = Decodable::decode(val_dsr).unwrap();
- dcx.tcx.const_qualif_map.borrow_mut().insert(id, qualif);
- }
- _ => {
- bug!("unknown tag found in side tables: {:x}", tag);
- }
- }
- }
- }
+/// Decodes an item from its AST in the cdata's metadata and adds it to the
+/// ast-map.
+pub fn decode_inlined_item<'a, 'tcx>(cdata: &CrateMetadata,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ parent_def_path: ast_map::DefPath,
+ parent_did: DefId,
+ ast: Ast<'tcx>,
+ orig_did: DefId)
+ -> &'tcx InlinedItem {
+ debug!("> Decoding inlined fn: {:?}", tcx.item_path_str(orig_did));
- debug!(">< Side table doc loaded");
- }
-}
+ let cnt = ast.id_range.max.as_usize() - ast.id_range.min.as_usize();
+ let start = tcx.sess.reserve_node_ids(cnt);
+ let id_ranges = [ast.id_range, IdRange {
+ min: start,
+ max: ast::NodeId::new(start.as_usize() + cnt)
+ }];
-// copy the tcache entries from the original item to the new
-// inlined item
-fn copy_item_types(dcx: &DecodeContext, ii: &InlinedItem, orig_did: DefId) {
- fn copy_item_type(dcx: &DecodeContext,
- inlined_id: ast::NodeId,
- remote_did: DefId) {
- let inlined_did = dcx.tcx.map.local_def_id(inlined_id);
- dcx.tcx.register_item_type(inlined_did,
- dcx.tcx.lookup_item_type(remote_did));
+ let ii = ast.item.decode((cdata, tcx, id_ranges));
+ let ii = ast_map::map_decoded_item(&tcx.map,
+ parent_def_path,
+ parent_did,
+ ii,
+ tcx.sess.next_node_id());
- }
- // copy the entry for the item itself
let item_node_id = match ii {
&InlinedItem::Item(_, ref i) => i.id,
&InlinedItem::TraitItem(_, ref ti) => ti.id,
&InlinedItem::ImplItem(_, ref ii) => ii.id
};
- copy_item_type(dcx, item_node_id, orig_did);
+ let inlined_did = tcx.map.local_def_id(item_node_id);
+ tcx.register_item_type(inlined_did, tcx.lookup_item_type(orig_did));
- // copy the entries of inner items
- if let &InlinedItem::Item(_, ref item) = ii {
- match item.node {
- hir::ItemEnum(ref def, _) => {
- let orig_def = dcx.tcx.lookup_adt_def(orig_did);
- for (i_variant, orig_variant) in
- def.variants.iter().zip(orig_def.variants.iter())
- {
- debug!("astencode: copying variant {:?} => {:?}",
- orig_variant.did, i_variant.node.data.id());
- copy_item_type(dcx, i_variant.node.data.id(), orig_variant.did);
- }
+ for (id, entry) in ast.side_tables.decode((cdata, tcx, id_ranges)) {
+ match entry {
+ TableEntry::Def(def) => {
+ tcx.def_map.borrow_mut().insert(id, def::PathResolution::new(def));
}
- hir::ItemStruct(ref def, _) => {
- if !def.is_struct() {
- let ctor_did = dcx.tcx.lookup_adt_def(orig_did)
- .struct_variant().did;
- debug!("astencode: copying ctor {:?} => {:?}", ctor_did,
- def.id());
- copy_item_type(dcx, def.id(), ctor_did);
- }
+ TableEntry::NodeType(ty) => {
+ tcx.node_type_insert(id, ty);
}
- _ => {}
- }
- }
-}
-
-fn inlined_item_id_range(ii: &InlinedItem) -> IdRange {
- let mut visitor = IdRangeComputingVisitor::new();
- ii.visit(&mut visitor);
- visitor.result()
-}
-
-// ______________________________________________________________________
-// Testing of astencode_gen
-
-#[cfg(test)]
-fn encode_item_ast(rbml_w: &mut Encoder, item: &hir::Item) {
- rbml_w.start_tag(c::tag_tree as usize);
- (*item).encode(rbml_w);
- rbml_w.end_tag();
-}
-
-#[cfg(test)]
-fn decode_item_ast(item_doc: rbml::Doc) -> hir::Item {
- let chi_doc = item_doc.get(c::tag_tree as usize);
- let mut d = reader::Decoder::new(chi_doc);
- Decodable::decode(&mut d).unwrap()
-}
-
-#[cfg(test)]
-trait FakeExtCtxt {
- fn call_site(&self) -> syntax_pos::Span;
- fn cfg(&self) -> ast::CrateConfig;
- fn ident_of(&self, st: &str) -> ast::Ident;
- fn name_of(&self, st: &str) -> ast::Name;
- fn parse_sess(&self) -> &parse::ParseSess;
-}
-
-#[cfg(test)]
-impl FakeExtCtxt for parse::ParseSess {
- fn call_site(&self) -> syntax_pos::Span {
- syntax_pos::Span {
- lo: syntax_pos::BytePos(0),
- hi: syntax_pos::BytePos(0),
- expn_id: syntax_pos::NO_EXPANSION,
- }
- }
- fn cfg(&self) -> ast::CrateConfig { Vec::new() }
- fn ident_of(&self, st: &str) -> ast::Ident {
- parse::token::str_to_ident(st)
- }
- fn name_of(&self, st: &str) -> ast::Name {
- parse::token::intern(st)
- }
- fn parse_sess(&self) -> &parse::ParseSess { self }
-}
-
-#[cfg(test)]
-fn mk_ctxt() -> parse::ParseSess {
- parse::ParseSess::new()
-}
-
-#[cfg(test)]
-fn with_testing_context<T, F: FnOnce(&mut LoweringContext) -> T>(f: F) -> T {
- let mut resolver = DummyResolver;
- let mut lcx = LoweringContext::testing_context(&mut resolver);
- f(&mut lcx)
-}
-
-#[cfg(test)]
-fn roundtrip(in_item: hir::Item) {
- let mut wr = Cursor::new(Vec::new());
- encode_item_ast(&mut Encoder::new(&mut wr), &in_item);
- let rbml_doc = rbml::Doc::new(wr.get_ref());
- let out_item = decode_item_ast(rbml_doc);
-
- assert!(in_item == out_item);
-}
-
-#[test]
-fn test_basic() {
- let cx = mk_ctxt();
- with_testing_context(|lcx| {
- roundtrip(lcx.lower_item("e_item!(&cx,
- fn foo() {}
- ).unwrap()));
- });
-}
-
-#[test]
-fn test_smalltalk() {
- let cx = mk_ctxt();
- with_testing_context(|lcx| {
- roundtrip(lcx.lower_item("e_item!(&cx,
- fn foo() -> isize { 3 + 4 } // first smalltalk program ever executed.
- ).unwrap()));
- });
-}
-
-#[test]
-fn test_more() {
- let cx = mk_ctxt();
- with_testing_context(|lcx| {
- roundtrip(lcx.lower_item("e_item!(&cx,
- fn foo(x: usize, y: usize) -> usize {
- let z = x + y;
- return z;
+ TableEntry::ItemSubsts(item_substs) => {
+ tcx.tables.borrow_mut().item_substs.insert(id, item_substs);
}
- ).unwrap()));
- });
-}
-
-#[test]
-fn test_simplification() {
- use middle::cstore::LOCAL_CRATE;
- use rustc::hir::def_id::CRATE_DEF_INDEX;
-
- let cx = mk_ctxt();
- let item = quote_item!(&cx,
- fn new_int_alist<B>() -> alist<isize, B> {
- fn eq_int(a: isize, b: isize) -> bool { a == b }
- return alist {eq_fn: eq_int, data: Vec::new()};
- }
- ).unwrap();
- let cx = mk_ctxt();
- with_testing_context(|lcx| {
- let hir_item = lcx.lower_item(&item);
- let def_id = DefId { krate: LOCAL_CRATE, index: CRATE_DEF_INDEX }; // dummy
- let item_in = InlinedItemRef::Item(def_id, &hir_item);
- let (item_out, _) = simplify_ast(item_in);
- let item_exp = InlinedItem::Item(def_id, P(lcx.lower_item("e_item!(&cx,
- fn new_int_alist<B>() -> alist<isize, B> {
- return alist {eq_fn: eq_int, data: Vec::new()};
+ TableEntry::Adjustment(adj) => {
+ tcx.tables.borrow_mut().adjustments.insert(id, adj);
}
- ).unwrap())));
- match (item_out, item_exp) {
- (InlinedItem::Item(_, item_out), InlinedItem::Item(_, item_exp)) => {
- assert!(pprust::item_to_string(&item_out) ==
- pprust::item_to_string(&item_exp));
+ TableEntry::ConstQualif(qualif) => {
+ tcx.const_qualif_map.borrow_mut().insert(id, qualif);
}
- _ => bug!()
}
- });
+ }
+
+ ii
}
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![allow(non_camel_case_types, non_upper_case_globals)]
-
-pub use self::astencode_tag::*;
-
-// RBML enum definitions and utils shared by the encoder and decoder
-//
-// 0x00..0x1f: reserved for RBML generic type tags
-// 0x20..0xef: free for use, preferred for frequent tags
-// 0xf0..0xff: internally used by RBML to encode 0x100..0xfff in two bytes
-// 0x100..0xfff: free for use, preferred for infrequent tags
-
-pub const tag_items: usize = 0x100; // top-level only
-
-pub const tag_paths_data_name: usize = 0x20;
-
-pub const tag_def_id: usize = 0x21;
-
-pub const tag_items_data: usize = 0x22;
-
-pub const tag_items_data_item: usize = 0x23;
-
-pub const tag_items_data_item_family: usize = 0x24;
-
-pub const tag_items_data_item_type: usize = 0x25;
-
-// GAP 0x26
-
-pub const tag_items_data_item_variant: usize = 0x27;
-
-pub const tag_items_data_parent_item: usize = 0x28;
-
-pub const tag_items_data_item_is_tuple_struct_ctor: usize = 0x29;
-
-pub const tag_items_closure_kind: usize = 0x2a;
-pub const tag_items_closure_ty: usize = 0x2b;
-pub const tag_def_key: usize = 0x2c;
-
-// GAP 0x2d 0x34
-
-pub const tag_index: usize = 0x110; // top-level only
-pub const tag_xref_index: usize = 0x111; // top-level only
-pub const tag_xref_data: usize = 0x112; // top-level only
-pub const tag_attributes: usize = 0x101; // top-level only
-
-// The list of crates that this crate depends on
-pub const tag_crate_deps: usize = 0x102; // top-level only
-
-// A single crate dependency
-pub const tag_crate_dep: usize = 0x35;
-
-pub const tag_crate_hash: usize = 0x103; // top-level only
-pub const tag_crate_crate_name: usize = 0x104; // top-level only
-pub const tag_crate_disambiguator: usize = 0x113; // top-level only
-
-pub const tag_crate_dep_crate_name: usize = 0x36;
-pub const tag_crate_dep_hash: usize = 0x37;
-pub const tag_crate_dep_explicitly_linked: usize = 0x38; // top-level only
-
-pub const tag_item_trait_item: usize = 0x3a;
-
-pub const tag_item_trait_ref: usize = 0x3b;
-
-// discriminator value for variants
-pub const tag_disr_val: usize = 0x3c;
-
-// GAP 0x3d, 0x3e, 0x3f, 0x40
-
-pub const tag_item_field: usize = 0x41;
-// GAP 0x42
-pub const tag_item_variances: usize = 0x43;
-/*
- trait items contain tag_item_trait_item elements,
- impl items contain tag_item_impl_item elements, and classes
- have both. That's because some code treats classes like traits,
- and other code treats them like impls. Because classes can contain
- both, tag_item_trait_item and tag_item_impl_item have to be two
- different tags.
- */
-pub const tag_item_impl_item: usize = 0x44;
-pub const tag_item_trait_method_explicit_self: usize = 0x45;
-
-
-// Reexports are found within module tags. Each reexport contains def_ids
-// and names.
-pub const tag_items_data_item_reexport: usize = 0x46;
-pub const tag_items_data_item_reexport_def_id: usize = 0x47;
-pub const tag_items_data_item_reexport_name: usize = 0x48;
-
-// used to encode crate_ctxt side tables
-enum_from_u32! {
- #[derive(Copy, Clone, PartialEq)]
- #[repr(usize)]
- pub enum astencode_tag { // Reserves 0x50 -- 0x6f
- tag_ast = 0x50,
-
- tag_tree = 0x51,
-
- tag_mir = 0x52,
-
- tag_table = 0x53,
- // GAP 0x54, 0x55
- tag_table_def = 0x56,
- tag_table_node_type = 0x57,
- tag_table_item_subst = 0x58,
- tag_table_freevars = 0x59,
- // GAP 0x5a, 0x5b, 0x5c, 0x5d, 0x5e
- tag_table_method_map = 0x5f,
- // GAP 0x60
- tag_table_adjustments = 0x61,
- // GAP 0x62, 0x63, 0x64, 0x65
- tag_table_upvar_capture_map = 0x66,
- // GAP 0x67, 0x68
- tag_table_const_qualif = 0x69,
- tag_table_cast_kinds = 0x6a,
- }
-}
-
-pub const tag_item_trait_item_sort: usize = 0x70;
-
-pub const tag_crate_triple: usize = 0x105; // top-level only
-
-pub const tag_dylib_dependency_formats: usize = 0x106; // top-level only
-
-// Language items are a top-level directory (for speed). Hierarchy:
-//
-// tag_lang_items
-// - tag_lang_items_item
-// - tag_lang_items_item_id: u32
-// - tag_lang_items_item_index: u32
-
-pub const tag_lang_items: usize = 0x107; // top-level only
-pub const tag_lang_items_item: usize = 0x73;
-pub const tag_lang_items_item_id: usize = 0x74;
-pub const tag_lang_items_item_index: usize = 0x75;
-pub const tag_lang_items_missing: usize = 0x76;
-
-pub const tag_item_unnamed_field: usize = 0x77;
-pub const tag_items_data_item_visibility: usize = 0x78;
-pub const tag_items_data_item_inherent_impl: usize = 0x79;
-// GAP 0x7a
-pub const tag_mod_child: usize = 0x7b;
-// GAP 0x7c
-
-// GAP 0x108
-pub const tag_impls: usize = 0x109; // top-level only
-pub const tag_impls_trait: usize = 0x7d;
-pub const tag_impls_trait_impl: usize = 0x7e;
-
-// GAP 0x7f, 0x80, 0x81
-
-pub const tag_native_libraries: usize = 0x10a; // top-level only
-pub const tag_native_libraries_lib: usize = 0x82;
-pub const tag_native_libraries_name: usize = 0x83;
-pub const tag_native_libraries_kind: usize = 0x84;
-
-pub const tag_plugin_registrar_fn: usize = 0x10b; // top-level only
-
-pub const tag_method_argument_names: usize = 0x85;
-pub const tag_method_argument_name: usize = 0x86;
-
-pub const tag_reachable_ids: usize = 0x10c; // top-level only
-pub const tag_reachable_id: usize = 0x87;
-
-pub const tag_items_data_item_stability: usize = 0x88;
-
-pub const tag_items_data_item_repr: usize = 0x89;
-
-pub const tag_struct_fields: usize = 0x10d; // top-level only
-pub const tag_struct_field: usize = 0x8a;
-
-pub const tag_items_data_item_struct_ctor: usize = 0x8b;
-pub const tag_attribute_is_sugared_doc: usize = 0x8c;
-// GAP 0x8d
-pub const tag_items_data_region: usize = 0x8e;
-
-pub const tag_item_generics: usize = 0x8f;
-// GAP 0x90, 0x91, 0x92, 0x93, 0x94
-
-pub const tag_item_predicates: usize = 0x95;
-// GAP 0x96
-
-pub const tag_predicate: usize = 0x97;
-// GAP 0x98, 0x99
-
-pub const tag_unsafety: usize = 0x9a;
-
-pub const tag_associated_type_names: usize = 0x9b;
-pub const tag_associated_type_name: usize = 0x9c;
-
-pub const tag_polarity: usize = 0x9d;
-
-pub const tag_macro_defs: usize = 0x10e; // top-level only
-pub const tag_macro_def: usize = 0x9e;
-pub const tag_macro_def_body: usize = 0x9f;
-pub const tag_macro_def_span_lo: usize = 0xa8;
-pub const tag_macro_def_span_hi: usize = 0xa9;
-
-pub const tag_paren_sugar: usize = 0xa0;
-
-pub const tag_codemap: usize = 0xa1;
-pub const tag_codemap_filemap: usize = 0xa2;
-
-pub const tag_item_super_predicates: usize = 0xa3;
-
-pub const tag_defaulted_trait: usize = 0xa4;
-
-pub const tag_impl_coerce_unsized_kind: usize = 0xa5;
-
-pub const tag_items_data_item_constness: usize = 0xa6;
-
-pub const tag_items_data_item_deprecation: usize = 0xa7;
-
-pub const tag_items_data_item_defaultness: usize = 0xa8;
-
-pub const tag_items_data_parent_impl: usize = 0xa9;
-
-pub const tag_rustc_version: usize = 0x10f;
-pub fn rustc_version() -> String {
- format!(
- "rustc {}",
- option_env!("CFG_VERSION").unwrap_or("unknown version")
- )
-}
-
-pub const tag_panic_strategy: usize = 0x114;
-
-pub const tag_macro_derive_registrar: usize = 0x115;
-
-// NB: increment this if you change the format of metadata such that
-// rustc_version can't be found.
-pub const metadata_encoding_version : &'static [u8] = &[b'r', b'u', b's', b't', 0, 0, 0, 2];
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![allow(non_camel_case_types)]
-
//! Validates all used crates and extern libraries and loads their metadata
use cstore::{self, CStore, CrateSource, MetadataBlob};
-use decoder;
use loader::{self, CratePaths};
+use macro_import;
+use schema::CrateRoot;
-use rustc::hir::def_id::DefIndex;
+use rustc::hir::def_id::{CrateNum, DefIndex};
use rustc::hir::svh::Svh;
-use rustc::dep_graph::{DepGraph, DepNode};
+use rustc::middle::cstore::LoadedMacro;
use rustc::session::{config, Session};
-use rustc::session::config::PanicStrategy;
+use rustc_back::PanicStrategy;
use rustc::session::search_paths::PathKind;
+use rustc::middle;
use rustc::middle::cstore::{CrateStore, validate_crate_name, ExternCrate};
use rustc::util::nodemap::{FnvHashMap, FnvHashSet};
use rustc::hir::map as hir_map;
use std::cell::{RefCell, Cell};
+use std::ops::Deref;
use std::path::PathBuf;
use std::rc::Rc;
use std::fs;
use syntax::ast;
use syntax::abi::Abi;
-use syntax::codemap;
use syntax::parse;
use syntax::attr;
use syntax::parse::token::InternedString;
-use syntax::visit;
-use syntax_pos::{self, Span, mk_sp, Pos};
+use syntax_pos::{self, Span, mk_sp};
use log;
-struct LocalCrateReader<'a> {
- sess: &'a Session,
+pub struct CrateLoader<'a> {
+ pub sess: &'a Session,
+ pub creader: CrateReader<'a>,
cstore: &'a CStore,
- creader: CrateReader<'a>,
- krate: &'a ast::Crate,
- definitions: &'a hir_map::Definitions,
}
pub struct CrateReader<'a> {
sess: &'a Session,
cstore: &'a CStore,
- next_crate_num: ast::CrateNum,
+ next_crate_num: CrateNum,
foreign_item_map: FnvHashMap<String, Vec<ast::NodeId>>,
local_crate_name: String,
local_crate_config: ast::CrateConfig,
}
-impl<'a> visit::Visitor for LocalCrateReader<'a> {
- fn visit_item(&mut self, a: &ast::Item) {
- self.process_item(a);
- visit::walk_item(self, a);
- }
-}
-
fn dump_crates(cstore: &CStore) {
info!("resolved crates:");
cstore.iter_crate_data_origins(|_, data, opt_source| {
}
#[derive(Debug)]
-struct CrateInfo {
+struct ExternCrateInfo {
ident: String,
name: String,
id: ast::NodeId,
Owned(loader::Library),
}
-impl PMDSource {
- pub fn as_slice<'a>(&'a self) -> &'a [u8] {
+impl Deref for PMDSource {
+ type Target = MetadataBlob;
+
+ fn deref(&self) -> &MetadataBlob {
match *self {
- PMDSource::Registered(ref cmd) => cmd.data(),
- PMDSource::Owned(ref lib) => lib.metadata.as_slice(),
+ PMDSource::Registered(ref cmd) => &cmd.blob,
+ PMDSource::Owned(ref lib) => &lib.metadata
}
}
}
enum LoadResult {
- Previous(ast::CrateNum),
+ Previous(CrateNum),
Loaded(loader::Library),
}
}
}
- fn extract_crate_info(&self, i: &ast::Item) -> Option<CrateInfo> {
+ fn extract_crate_info(&self, i: &ast::Item) -> Option<ExternCrateInfo> {
match i.node {
ast::ItemKind::ExternCrate(ref path_opt) => {
debug!("resolving extern crate stmt. ident: {} path_opt: {:?}",
}
None => i.ident.to_string(),
};
- Some(CrateInfo {
+ Some(ExternCrateInfo {
ident: i.ident.to_string(),
name: name,
id: i.id,
}
fn existing_match(&self, name: &str, hash: Option<&Svh>, kind: PathKind)
- -> Option<ast::CrateNum> {
+ -> Option<CrateNum> {
let mut ret = None;
self.cstore.iter_crate_data(|cnum, data| {
if data.name != name { return }
fn verify_no_symbol_conflicts(&self,
span: Span,
- metadata: &MetadataBlob) {
- let disambiguator = decoder::get_crate_disambiguator(metadata.as_slice());
- let crate_name = decoder::get_crate_name(metadata.as_slice());
-
+ root: &CrateRoot) {
// Check for (potential) conflicts with the local crate
- if self.local_crate_name == crate_name &&
- self.sess.local_crate_disambiguator() == disambiguator {
+ if self.local_crate_name == root.name &&
+ self.sess.local_crate_disambiguator() == &root.disambiguator[..] {
span_fatal!(self.sess, span, E0519,
"the current crate is indistinguishable from one of its \
dependencies: it has the same crate-name `{}` and was \
compiled with the same `-C metadata` arguments. This \
will result in symbol conflicts between the two.",
- crate_name)
+ root.name)
}
- let svh = decoder::get_crate_hash(metadata.as_slice());
// Check for conflicts with any crate loaded so far
self.cstore.iter_crate_data(|_, other| {
- if other.name() == crate_name && // same crate-name
- other.disambiguator() == disambiguator && // same crate-disambiguator
- other.hash() != svh { // but different SVH
+ if other.name() == root.name && // same crate-name
+ other.disambiguator() == root.disambiguator && // same crate-disambiguator
+ other.hash() != root.hash { // but different SVH
span_fatal!(self.sess, span, E0523,
"found two different crates with name `{}` that are \
not distinguished by differing `-C metadata`. This \
will result in symbol conflicts between the two.",
- crate_name)
+ root.name)
}
});
}
span: Span,
lib: loader::Library,
explicitly_linked: bool)
- -> (ast::CrateNum, Rc<cstore::CrateMetadata>,
+ -> (CrateNum, Rc<cstore::CrateMetadata>,
cstore::CrateSource) {
info!("register crate `extern crate {} as {}`", name, ident);
- self.verify_no_symbol_conflicts(span, &lib.metadata);
+ let crate_root = lib.metadata.get_root();
+ self.verify_no_symbol_conflicts(span, &crate_root);
// Claim this crate number and cache it
let cnum = self.next_crate_num;
- self.next_crate_num += 1;
+ self.next_crate_num = CrateNum::from_u32(cnum.as_u32() + 1);
// Stash paths for top-most crate locally if necessary.
let crate_paths = if root.is_none() {
let loader::Library { dylib, rlib, metadata } = lib;
- let cnum_map = self.resolve_crate_deps(root, metadata.as_slice(), cnum, span);
- let staged_api = self.is_staged_api(metadata.as_slice());
+ let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span);
+
+ if crate_root.macro_derive_registrar.is_some() {
+ self.sess.span_err(span, "crates of the `rustc-macro` crate type \
+ cannot be linked at runtime");
+ }
let cmeta = Rc::new(cstore::CrateMetadata {
name: name.to_string(),
extern_crate: Cell::new(None),
- index: decoder::load_index(metadata.as_slice()),
- xref_index: decoder::load_xrefs(metadata.as_slice()),
- key_map: decoder::load_key_map(metadata.as_slice()),
- data: metadata,
+ key_map: metadata.load_key_map(crate_root.index),
+ root: crate_root,
+ blob: metadata,
cnum_map: RefCell::new(cnum_map),
cnum: cnum,
codemap_import_info: RefCell::new(vec![]),
- staged_api: staged_api,
explicitly_linked: Cell::new(explicitly_linked),
});
- if decoder::get_derive_registrar_fn(cmeta.data.as_slice()).is_some() {
- self.sess.span_err(span, "crates of the `rustc-macro` crate type \
- cannot be linked at runtime");
- }
-
let source = cstore::CrateSource {
dylib: dylib,
rlib: rlib,
(cnum, cmeta, source)
}
- fn is_staged_api(&self, data: &[u8]) -> bool {
- let attrs = decoder::get_crate_attributes(data);
- for attr in &attrs {
- if attr.name() == "stable" || attr.name() == "unstable" {
- return true
- }
- }
- false
- }
-
fn resolve_crate(&mut self,
root: &Option<CratePaths>,
ident: &str,
span: Span,
kind: PathKind,
explicitly_linked: bool)
- -> (ast::CrateNum, Rc<cstore::CrateMetadata>, cstore::CrateSource) {
+ -> (CrateNum, Rc<cstore::CrateMetadata>, cstore::CrateSource) {
info!("resolving crate `extern crate {} as {}`", name, ident);
let result = match self.existing_match(name, hash, kind) {
Some(cnum) => LoadResult::Previous(cnum),
// Note that we only do this for target triple crates, though, as we
// don't want to match a host crate against an equivalent target one
// already loaded.
+ let root = library.metadata.get_root();
if loader.triple == self.sess.opts.target_triple {
- let meta_hash = decoder::get_crate_hash(library.metadata.as_slice());
- let meta_name = decoder::get_crate_name(library.metadata.as_slice())
- .to_string();
let mut result = LoadResult::Loaded(library);
self.cstore.iter_crate_data(|cnum, data| {
- if data.name() == meta_name && meta_hash == data.hash() {
+ if data.name() == root.name && root.hash == data.hash() {
assert!(loader.hash.is_none());
info!("load success, going to previous cnum: {}", cnum);
result = LoadResult::Previous(cnum);
}
fn update_extern_crate(&mut self,
- cnum: ast::CrateNum,
+ cnum: CrateNum,
mut extern_crate: ExternCrate,
- visited: &mut FnvHashSet<(ast::CrateNum, bool)>)
+ visited: &mut FnvHashSet<(CrateNum, bool)>)
{
if !visited.insert((cnum, extern_crate.direct)) { return }
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(&mut self,
root: &Option<CratePaths>,
- cdata: &[u8],
- krate: ast::CrateNum,
+ crate_root: &CrateRoot,
+ metadata: &MetadataBlob,
+ krate: CrateNum,
span: Span)
-> cstore::CrateNumMap {
debug!("resolving deps of external crate");
// The map from crate numbers in the crate we're resolving to local crate
// numbers
- let map: FnvHashMap<_, _> = decoder::get_crate_deps(cdata).iter().map(|dep| {
+ let deps = crate_root.crate_deps.decode(metadata);
+ let map: FnvHashMap<_, _> = deps.enumerate().map(|(crate_num, dep)| {
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
let (local_cnum, ..) = self.resolve_crate(root,
- &dep.name,
- &dep.name,
+ &dep.name.as_str(),
+ &dep.name.as_str(),
Some(&dep.hash),
span,
PathKind::Dependency,
dep.explicitly_linked);
- (dep.cnum, local_cnum)
+ (CrateNum::new(crate_num + 1), local_cnum)
}).collect();
- let max_cnum = map.values().cloned().max().unwrap_or(0);
+ let max_cnum = map.values().cloned().max().map(|cnum| cnum.as_u32()).unwrap_or(0);
// we map 0 and all other holes in the map to our parent crate. The "additional"
// self-dependencies should be harmless.
- (0..max_cnum+1).map(|cnum| map.get(&cnum).cloned().unwrap_or(krate)).collect()
+ (0..max_cnum+1).map(|cnum| {
+ map.get(&CrateNum::from_u32(cnum)).cloned().unwrap_or(krate)
+ }).collect()
}
- fn read_extension_crate(&mut self, span: Span, info: &CrateInfo) -> ExtensionCrate {
+ fn read_extension_crate(&mut self, span: Span, info: &ExternCrateInfo) -> ExtensionCrate {
info!("read extension crate {} `extern crate {} as {}` linked={}",
info.id, info.name, info.ident, info.should_link);
let target_triple = &self.sess.opts.target_triple[..];
let ci = self.extract_crate_info(item).unwrap();
let ekrate = self.read_extension_crate(item.span, &ci);
+ let root = ekrate.metadata.get_root();
let source_name = format!("<{} macros>", item.ident);
let mut ret = Macros {
macro_rules: Vec::new(),
custom_derive_registrar: None,
- svh: decoder::get_crate_hash(ekrate.metadata.as_slice()),
+ svh: root.hash,
dylib: None,
};
- decoder::each_exported_macro(ekrate.metadata.as_slice(),
- |name, attrs, span, body| {
+ for def in root.macro_defs.decode(&*ekrate.metadata) {
// NB: Don't use parse::parse_tts_from_source_str because it parses with
// quote_depth > 0.
let mut p = parse::new_parser_from_source_str(&self.sess.parse_sess,
self.local_crate_config.clone(),
source_name.clone(),
- body);
+ def.body);
let lo = p.span.lo;
let body = match p.parse_all_token_trees() {
Ok(body) => body,
let local_span = mk_sp(lo, p.last_span.hi);
// Mark the attrs as used
- for attr in &attrs {
+ for attr in &def.attrs {
attr::mark_used(attr);
}
ret.macro_rules.push(ast::MacroDef {
- ident: ast::Ident::with_empty_ctxt(name),
- attrs: attrs,
+ ident: ast::Ident::with_empty_ctxt(def.name),
+ attrs: def.attrs,
id: ast::DUMMY_NODE_ID,
span: local_span,
imported_from: Some(item.ident),
body: body,
});
self.sess.imported_macro_spans.borrow_mut()
- .insert(local_span, (name.as_str().to_string(), span));
- true
- });
+ .insert(local_span, (def.name.as_str().to_string(), def.span));
+ }
- match decoder::get_derive_registrar_fn(ekrate.metadata.as_slice()) {
+ match root.macro_derive_registrar {
Some(id) => ret.custom_derive_registrar = Some(id),
// If this crate is not a rustc-macro crate then we might be able to
/// SVH and DefIndex of the registrar function.
pub fn find_plugin_registrar(&mut self, span: Span, name: &str)
-> Option<(PathBuf, Svh, DefIndex)> {
- let ekrate = self.read_extension_crate(span, &CrateInfo {
+ let ekrate = self.read_extension_crate(span, &ExternCrateInfo {
name: name.to_string(),
ident: name.to_string(),
id: ast::DUMMY_NODE_ID,
span_fatal!(self.sess, span, E0456, "{}", &message[..]);
}
- let svh = decoder::get_crate_hash(ekrate.metadata.as_slice());
- let registrar =
- decoder::get_plugin_registrar_fn(ekrate.metadata.as_slice());
-
- match (ekrate.dylib.as_ref(), registrar) {
+ let root = ekrate.metadata.get_root();
+ match (ekrate.dylib.as_ref(), root.plugin_registrar_fn) {
(Some(dylib), Some(reg)) => {
- Some((dylib.to_path_buf(), svh, reg))
+ Some((dylib.to_path_buf(), root.hash, reg))
}
(None, Some(_)) => {
span_err!(self.sess, span, E0457,
// The logic for finding the panic runtime here is pretty much the same
// as the allocator case with the only addition that the panic strategy
// compilation mode also comes into play.
- let desired_strategy = self.sess.opts.cg.panic.clone();
+ let desired_strategy = self.sess.panic_strategy();
let mut runtime_found = false;
let mut needs_panic_runtime = attr::contains_name(&krate.attrs,
"needs_panic_runtime");
}
fn inject_dependency_if(&self,
- krate: ast::CrateNum,
+ krate: CrateNum,
what: &str,
needs_dep: &Fn(&cstore::CrateMetadata) -> bool) {
// don't perform this validation if the session has errors, as one of
}
}
-impl<'a> LocalCrateReader<'a> {
- fn new(sess: &'a Session,
- cstore: &'a CStore,
- defs: &'a hir_map::Definitions,
- krate: &'a ast::Crate,
- local_crate_name: &str)
- -> LocalCrateReader<'a> {
- LocalCrateReader {
+impl<'a> CrateLoader<'a> {
+ pub fn new(sess: &'a Session, cstore: &'a CStore, krate: &ast::Crate, crate_name: &str)
+ -> Self {
+ let loader = CrateLoader {
sess: sess,
cstore: cstore,
- creader: CrateReader::new(sess, cstore, local_crate_name, krate.config.clone()),
- krate: krate,
- definitions: defs,
- }
- }
-
- // Traverses an AST, reading all the information about use'd crates and
- // extern libraries necessary for later resolving, typechecking, linking,
- // etc.
- fn read_crates(&mut self, dep_graph: &DepGraph) {
- let _task = dep_graph.in_task(DepNode::CrateReader);
-
- self.process_crate(self.krate);
- visit::walk_crate(self, self.krate);
- self.creader.inject_allocator_crate();
- self.creader.inject_panic_runtime(self.krate);
-
- if log_enabled!(log::INFO) {
- dump_crates(&self.cstore);
- }
-
- for &(ref name, kind) in &self.sess.opts.libs {
- register_native_lib(self.sess, self.cstore, None, name.clone(), kind);
- }
- self.creader.register_statically_included_foreign_items();
- }
+ creader: CrateReader::new(sess, cstore, crate_name, krate.config.clone()),
+ };
- fn process_crate(&self, c: &ast::Crate) {
- for a in c.attrs.iter().filter(|m| m.name() == "link_args") {
- if let Some(ref linkarg) = a.value_str() {
- self.cstore.add_used_link_args(&linkarg);
+ for attr in krate.attrs.iter().filter(|m| m.name() == "link_args") {
+ if let Some(ref linkarg) = attr.value_str() {
+ loader.cstore.add_used_link_args(&linkarg);
}
}
- }
-
- fn process_item(&mut self, i: &ast::Item) {
- match i.node {
- ast::ItemKind::ExternCrate(_) => {
- // If this `extern crate` item has `#[macro_use]` then we can
- // safely skip it. These annotations were processed during macro
- // expansion and are already loaded (if necessary) into our
- // crate store.
- //
- // Note that it's important we *don't* fall through below as
- // some `#[macro_use]` crate are explicitly not linked (e.g.
- // macro crates) so we want to ensure we avoid `resolve_crate`
- // with those.
- if attr::contains_name(&i.attrs, "macro_use") {
- if self.cstore.was_used_for_derive_macros(i) {
- return
- }
- }
- if let Some(info) = self.creader.extract_crate_info(i) {
- if !info.should_link {
- return;
- }
- let (cnum, ..) = self.creader.resolve_crate(&None,
- &info.ident,
- &info.name,
- None,
- i.span,
- PathKind::Crate,
- true);
-
- let def_id = self.definitions.opt_local_def_id(i.id).unwrap();
- let len = self.definitions.def_path(def_id.index).data.len();
-
- self.creader.update_extern_crate(cnum,
- ExternCrate {
- def_id: def_id,
- span: i.span,
- direct: true,
- path_len: len,
- },
- &mut FnvHashSet());
- self.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
- }
- }
- ast::ItemKind::ForeignMod(ref fm) => self.process_foreign_mod(i, fm),
- _ => { }
- }
+ loader
}
fn process_foreign_mod(&mut self, i: &ast::Item, fm: &ast::ForeignMod) {
}
}
-/// Traverses an AST, reading all the information about use'd crates and extern
-/// libraries necessary for later resolving, typechecking, linking, etc.
-pub fn read_local_crates(sess: & Session,
- cstore: & CStore,
- defs: & hir_map::Definitions,
- krate: & ast::Crate,
- local_crate_name: &str,
- dep_graph: &DepGraph) {
- LocalCrateReader::new(sess, cstore, defs, krate, local_crate_name).read_crates(dep_graph)
-}
-
-/// Imports the codemap from an external crate into the codemap of the crate
-/// currently being compiled (the "local crate").
-///
-/// The import algorithm works analogous to how AST items are inlined from an
-/// external crate's metadata:
-/// For every FileMap in the external codemap an 'inline' copy is created in the
-/// local codemap. The correspondence relation between external and local
-/// FileMaps is recorded in the `ImportedFileMap` objects returned from this
-/// function. When an item from an external crate is later inlined into this
-/// crate, this correspondence information is used to translate the span
-/// information of the inlined item so that it refers the correct positions in
-/// the local codemap (see `astencode::DecodeContext::tr_span()`).
-///
-/// The import algorithm in the function below will reuse FileMaps already
-/// existing in the local codemap. For example, even if the FileMap of some
-/// source file of libstd gets imported many times, there will only ever be
-/// one FileMap object for the corresponding file in the local codemap.
-///
-/// Note that imported FileMaps do not actually contain the source code of the
-/// file they represent, just information about length, line breaks, and
-/// multibyte characters. This information is enough to generate valid debuginfo
-/// for items inlined from other crates.
-pub fn import_codemap(local_codemap: &codemap::CodeMap,
- metadata: &MetadataBlob)
- -> Vec<cstore::ImportedFileMap> {
- let external_codemap = decoder::get_imported_filemaps(metadata.as_slice());
-
- let imported_filemaps = external_codemap.into_iter().map(|filemap_to_import| {
- // Try to find an existing FileMap that can be reused for the filemap to
- // be imported. A FileMap is reusable if it is exactly the same, just
- // positioned at a different offset within the codemap.
- let reusable_filemap = {
- local_codemap.files
- .borrow()
- .iter()
- .find(|fm| are_equal_modulo_startpos(&fm, &filemap_to_import))
- .map(|rc| rc.clone())
- };
-
- match reusable_filemap {
- Some(fm) => {
- cstore::ImportedFileMap {
- original_start_pos: filemap_to_import.start_pos,
- original_end_pos: filemap_to_import.end_pos,
- translated_filemap: fm
- }
- }
- None => {
- // We can't reuse an existing FileMap, so allocate a new one
- // containing the information we need.
- let syntax_pos::FileMap {
- name,
- abs_path,
- start_pos,
- end_pos,
- lines,
- multibyte_chars,
- ..
- } = filemap_to_import;
-
- let source_length = (end_pos - start_pos).to_usize();
-
- // Translate line-start positions and multibyte character
- // position into frame of reference local to file.
- // `CodeMap::new_imported_filemap()` will then translate those
- // coordinates to their new global frame of reference when the
- // offset of the FileMap is known.
- let mut lines = lines.into_inner();
- for pos in &mut lines {
- *pos = *pos - start_pos;
- }
- let mut multibyte_chars = multibyte_chars.into_inner();
- for mbc in &mut multibyte_chars {
- mbc.pos = mbc.pos - start_pos;
- }
+impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> {
+ fn postprocess(&mut self, krate: &ast::Crate) {
+ self.creader.inject_allocator_crate();
+ self.creader.inject_panic_runtime(krate);
- let local_version = local_codemap.new_imported_filemap(name,
- abs_path,
- source_length,
- lines,
- multibyte_chars);
- cstore::ImportedFileMap {
- original_start_pos: start_pos,
- original_end_pos: end_pos,
- translated_filemap: local_version
- }
- }
+ if log_enabled!(log::INFO) {
+ dump_crates(&self.cstore);
}
- }).collect();
- return imported_filemaps;
-
- fn are_equal_modulo_startpos(fm1: &syntax_pos::FileMap,
- fm2: &syntax_pos::FileMap)
- -> bool {
- if fm1.name != fm2.name {
- return false;
+ for &(ref name, kind) in &self.sess.opts.libs {
+ register_native_lib(self.sess, self.cstore, None, name.clone(), kind);
}
+ self.creader.register_statically_included_foreign_items();
+ }
- let lines1 = fm1.lines.borrow();
- let lines2 = fm2.lines.borrow();
-
- if lines1.len() != lines2.len() {
- return false;
+ fn process_item(&mut self, item: &ast::Item, definitions: &hir_map::Definitions) {
+ match item.node {
+ ast::ItemKind::ExternCrate(_) => {}
+ ast::ItemKind::ForeignMod(ref fm) => return self.process_foreign_mod(item, fm),
+ _ => return,
}
- for (&line1, &line2) in lines1.iter().zip(lines2.iter()) {
- if (line1 - fm1.start_pos) != (line2 - fm2.start_pos) {
- return false;
+ // If this `extern crate` item has `#[macro_use]` then we can safely skip it.
+ // These annotations were processed during macro expansion and are already loaded
+ // (if necessary) into our crate store.
+ //
+ // Note that it's important we *don't* fall through below as some `#[macro_use]`
+ // crates are explicitly not linked (e.g. macro crates) so we want to ensure
+ // we avoid `resolve_crate` with those.
+ if attr::contains_name(&item.attrs, "macro_use") {
+ if self.cstore.was_used_for_derive_macros(item) {
+ return
}
}
- let multibytes1 = fm1.multibyte_chars.borrow();
- let multibytes2 = fm2.multibyte_chars.borrow();
+ if let Some(info) = self.creader.extract_crate_info(item) {
+ if !info.should_link {
+ return;
+ }
- if multibytes1.len() != multibytes2.len() {
- return false;
- }
+ let (cnum, ..) = self.creader.resolve_crate(
+ &None, &info.ident, &info.name, None, item.span, PathKind::Crate, true,
+ );
- for (mb1, mb2) in multibytes1.iter().zip(multibytes2.iter()) {
- if (mb1.bytes != mb2.bytes) ||
- ((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) {
- return false;
- }
+ let def_id = definitions.opt_local_def_id(item.id).unwrap();
+ let len = definitions.def_path(def_id.index).data.len();
+
+ let extern_crate =
+ ExternCrate { def_id: def_id, span: item.span, direct: true, path_len: len };
+ self.creader.update_extern_crate(cnum, extern_crate, &mut FnvHashSet());
+
+ self.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
}
+ }
- true
+ fn load_macros(&mut self, extern_crate: &ast::Item, allows_macros: bool) -> Vec<LoadedMacro> {
+ macro_import::load_macros(self, extern_crate, allows_macros)
}
}
// except according to those terms.
use cstore;
-use common;
-use decoder;
use encoder;
use loader;
+use schema;
-use middle::cstore::{InlinedItem, CrateStore, CrateSource, ChildItem, ExternCrate, DefLike};
-use middle::cstore::{NativeLibraryKind, LinkMeta, LinkagePreference};
-use rustc::hir::def;
-use middle::lang_items;
-use rustc::ty::{self, Ty, TyCtxt, VariantKind};
-use rustc::hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX};
+use rustc::middle::cstore::{InlinedItem, CrateStore, CrateSource, ExternCrate};
+use rustc::middle::cstore::{NativeLibraryKind, LinkMeta, LinkagePreference};
+use rustc::hir::def::{self, Def};
+use rustc::middle::lang_items;
+use rustc::ty::{self, Ty, TyCtxt};
+use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX};
use rustc::dep_graph::DepNode;
use rustc::hir::map as hir_map;
use rustc::hir::map::DefKey;
use rustc::mir::repr::Mir;
use rustc::mir::mir_map::MirMap;
-use rustc::util::nodemap::{FnvHashMap, NodeSet, DefIdMap};
-use rustc::session::config::PanicStrategy;
+use rustc::util::nodemap::{NodeSet, DefIdMap};
+use rustc_back::PanicStrategy;
-use std::cell::RefCell;
-use std::rc::Rc;
use std::path::PathBuf;
use syntax::ast;
use syntax::attr;
use rustc::hir;
impl<'tcx> CrateStore<'tcx> for cstore::CStore {
+ fn describe_def(&self, def: DefId) -> Option<Def> {
+ self.dep_graph.read(DepNode::MetaData(def));
+ self.get_crate_data(def.krate).get_def(def.index)
+ }
+
fn stability(&self, def: DefId) -> Option<attr::Stability> {
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_stability(&cdata, def.index)
+ self.get_crate_data(def.krate).get_stability(def.index)
}
fn deprecation(&self, def: DefId) -> Option<attr::Deprecation> {
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_deprecation(&cdata, def.index)
+ self.get_crate_data(def.krate).get_deprecation(def.index)
}
fn visibility(&self, def: DefId) -> ty::Visibility {
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_visibility(&cdata, def.index)
+ self.get_crate_data(def.krate).get_visibility(def.index)
}
fn closure_kind(&self, def_id: DefId) -> ty::ClosureKind
{
assert!(!def_id.is_local());
self.dep_graph.read(DepNode::MetaData(def_id));
- let cdata = self.get_crate_data(def_id.krate);
- decoder::closure_kind(&cdata, def_id.index)
+ self.get_crate_data(def_id.krate).closure_kind(def_id.index)
}
fn closure_ty<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> ty::ClosureTy<'tcx> {
assert!(!def_id.is_local());
self.dep_graph.read(DepNode::MetaData(def_id));
- let cdata = self.get_crate_data(def_id.krate);
- decoder::closure_ty(&cdata, def_id.index, tcx)
+ self.get_crate_data(def_id.krate).closure_ty(def_id.index, tcx)
}
fn item_variances(&self, def: DefId) -> Vec<ty::Variance> {
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_item_variances(&cdata, def.index)
- }
-
- fn repr_attrs(&self, def: DefId) -> Vec<attr::ReprAttr> {
- self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_repr_attrs(&cdata, def.index)
+ self.get_crate_data(def.krate).get_item_variances(def.index)
}
fn item_type<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> Ty<'tcx>
{
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_type(&cdata, def.index, tcx)
+ self.get_crate_data(def.krate).get_type(def.index, tcx)
}
fn item_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> ty::GenericPredicates<'tcx>
{
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_predicates(&cdata, def.index, tcx)
+ self.get_crate_data(def.krate).get_predicates(def.index, tcx)
}
fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> ty::GenericPredicates<'tcx>
{
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_super_predicates(&cdata, def.index, tcx)
+ self.get_crate_data(def.krate).get_super_predicates(def.index, tcx)
}
fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
- -> &'tcx ty::Generics<'tcx>
+ -> ty::Generics<'tcx>
{
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_generics(&cdata, def.index, tcx)
+ self.get_crate_data(def.krate).get_generics(def.index, tcx)
}
fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute>
{
self.dep_graph.read(DepNode::MetaData(def_id));
- let cdata = self.get_crate_data(def_id.krate);
- decoder::get_item_attrs(&cdata, def_id.index)
+ self.get_crate_data(def_id.krate).get_item_attrs(def_id.index)
}
fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::TraitDef<'tcx>
{
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_trait_def(&cdata, def.index, tcx)
+ self.get_crate_data(def.krate).get_trait_def(def.index, tcx)
}
fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx>
{
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_adt_def(&cdata, def.index, tcx)
+ self.get_crate_data(def.krate).get_adt_def(def.index, tcx)
}
- fn method_arg_names(&self, did: DefId) -> Vec<String>
+ fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>
{
self.dep_graph.read(DepNode::MetaData(did));
- let cdata = self.get_crate_data(did.krate);
- decoder::get_method_arg_names(&cdata, did.index)
- }
-
- fn item_name(&self, def: DefId) -> ast::Name {
- self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_item_name(&cdata, def.index)
- }
-
- fn opt_item_name(&self, def: DefId) -> Option<ast::Name> {
- self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::maybe_get_item_name(&cdata, def.index)
+ self.get_crate_data(did.krate).get_fn_arg_names(did.index)
}
fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId>
{
self.dep_graph.read(DepNode::MetaData(def_id));
- let mut result = vec![];
- let cdata = self.get_crate_data(def_id.krate);
- decoder::each_inherent_implementation_for_type(&cdata, def_id.index,
- |iid| result.push(iid));
- result
+ self.get_crate_data(def_id.krate).get_inherent_implementations_for_type(def_id.index)
}
- fn implementations_of_trait(&self, def_id: DefId) -> Vec<DefId>
+ fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>
{
- self.dep_graph.read(DepNode::MetaData(def_id));
+ if let Some(def_id) = filter {
+ self.dep_graph.read(DepNode::MetaData(def_id));
+ }
let mut result = vec![];
self.iter_crate_data(|_, cdata| {
- decoder::each_implementation_for_trait(cdata, def_id, &mut |iid| {
- result.push(iid)
- })
+ cdata.get_implementations_for_trait(filter, &mut result)
});
result
}
- fn provided_trait_methods<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
- -> Vec<Rc<ty::Method<'tcx>>>
- {
- self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_provided_trait_methods(&cdata, def.index, tcx)
- }
-
- fn trait_item_def_ids(&self, def: DefId)
- -> Vec<ty::ImplOrTraitItemId>
- {
- self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_trait_item_def_ids(&cdata, def.index)
- }
-
- fn impl_items(&self, impl_def_id: DefId) -> Vec<ty::ImplOrTraitItemId>
- {
- self.dep_graph.read(DepNode::MetaData(impl_def_id));
- let cdata = self.get_crate_data(impl_def_id.krate);
- decoder::get_impl_items(&cdata, impl_def_id.index)
+ fn impl_or_trait_items(&self, def_id: DefId) -> Vec<DefId> {
+ self.dep_graph.read(DepNode::MetaData(def_id));
+ let mut result = vec![];
+ self.get_crate_data(def_id.krate)
+ .each_child_of_item(def_id.index, |child| result.push(child.def_id));
+ result
}
- fn impl_polarity(&self, def: DefId) -> Option<hir::ImplPolarity>
+ fn impl_polarity(&self, def: DefId) -> hir::ImplPolarity
{
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_impl_polarity(&cdata, def.index)
+ self.get_crate_data(def.krate).get_impl_polarity(def.index)
}
fn impl_trait_ref<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> Option<ty::TraitRef<'tcx>>
{
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_impl_trait(&cdata, def.index, tcx)
+ self.get_crate_data(def.krate).get_impl_trait(def.index, tcx)
}
fn custom_coerce_unsized_kind(&self, def: DefId)
-> Option<ty::adjustment::CustomCoerceUnsized>
{
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_custom_coerce_unsized_kind(&cdata, def.index)
- }
-
- // FIXME: killme
- fn associated_consts<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
- -> Vec<Rc<ty::AssociatedConst<'tcx>>> {
- self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_associated_consts(&cdata, def.index, tcx)
+ self.get_crate_data(def.krate).get_custom_coerce_unsized_kind(def.index)
}
fn impl_parent(&self, impl_def: DefId) -> Option<DefId> {
self.dep_graph.read(DepNode::MetaData(impl_def));
- let cdata = self.get_crate_data(impl_def.krate);
- decoder::get_parent_impl(&*cdata, impl_def.index)
+ self.get_crate_data(impl_def.krate).get_parent_impl(impl_def.index)
}
fn trait_of_item(&self, def_id: DefId) -> Option<DefId> {
self.dep_graph.read(DepNode::MetaData(def_id));
- let cdata = self.get_crate_data(def_id.krate);
- decoder::get_trait_of_item(&cdata, def_id.index)
+ self.get_crate_data(def_id.krate).get_trait_of_item(def_id.index)
}
fn impl_or_trait_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> Option<ty::ImplOrTraitItem<'tcx>>
{
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_impl_or_trait_item(&cdata, def.index, tcx)
+ self.get_crate_data(def.krate).get_impl_or_trait_item(def.index, tcx)
}
fn is_const_fn(&self, did: DefId) -> bool
{
self.dep_graph.read(DepNode::MetaData(did));
- let cdata = self.get_crate_data(did.krate);
- decoder::is_const_fn(&cdata, did.index)
+ self.get_crate_data(did.krate).is_const_fn(did.index)
}
fn is_defaulted_trait(&self, trait_def_id: DefId) -> bool
{
self.dep_graph.read(DepNode::MetaData(trait_def_id));
- let cdata = self.get_crate_data(trait_def_id.krate);
- decoder::is_defaulted_trait(&cdata, trait_def_id.index)
- }
-
- fn is_impl(&self, did: DefId) -> bool
- {
- self.dep_graph.read(DepNode::MetaData(did));
- let cdata = self.get_crate_data(did.krate);
- decoder::is_impl(&cdata, did.index)
+ self.get_crate_data(trait_def_id.krate).is_defaulted_trait(trait_def_id.index)
}
fn is_default_impl(&self, impl_did: DefId) -> bool {
self.dep_graph.read(DepNode::MetaData(impl_did));
- let cdata = self.get_crate_data(impl_did.krate);
- decoder::is_default_impl(&cdata, impl_did.index)
+ self.get_crate_data(impl_did.krate).is_default_impl(impl_did.index)
}
fn is_extern_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId) -> bool {
self.dep_graph.read(DepNode::MetaData(did));
- let cdata = self.get_crate_data(did.krate);
- decoder::is_extern_item(&cdata, did.index, tcx)
+ self.get_crate_data(did.krate).is_extern_item(did.index, tcx)
}
fn is_foreign_item(&self, did: DefId) -> bool {
- let cdata = self.get_crate_data(did.krate);
- decoder::is_foreign_item(&cdata, did.index)
+ self.get_crate_data(did.krate).is_foreign_item(did.index)
}
fn is_statically_included_foreign_item(&self, id: ast::NodeId) -> bool
self.do_is_statically_included_foreign_item(id)
}
- fn is_typedef(&self, did: DefId) -> bool {
- self.dep_graph.read(DepNode::MetaData(did));
- let cdata = self.get_crate_data(did.krate);
- decoder::is_typedef(&cdata, did.index)
- }
-
- fn dylib_dependency_formats(&self, cnum: ast::CrateNum)
- -> Vec<(ast::CrateNum, LinkagePreference)>
+ fn dylib_dependency_formats(&self, cnum: CrateNum)
+ -> Vec<(CrateNum, LinkagePreference)>
{
- let cdata = self.get_crate_data(cnum);
- decoder::get_dylib_dependency_formats(&cdata)
+ self.get_crate_data(cnum).get_dylib_dependency_formats()
}
- fn lang_items(&self, cnum: ast::CrateNum) -> Vec<(DefIndex, usize)>
+ fn lang_items(&self, cnum: CrateNum) -> Vec<(DefIndex, usize)>
{
- let mut result = vec![];
- let crate_data = self.get_crate_data(cnum);
- decoder::each_lang_item(&crate_data, |did, lid| {
- result.push((did, lid)); true
- });
- result
+ self.get_crate_data(cnum).get_lang_items()
}
- fn missing_lang_items(&self, cnum: ast::CrateNum)
+ fn missing_lang_items(&self, cnum: CrateNum)
-> Vec<lang_items::LangItem>
{
- let cdata = self.get_crate_data(cnum);
- decoder::get_missing_lang_items(&cdata)
+ self.get_crate_data(cnum).get_missing_lang_items()
}
- fn is_staged_api(&self, cnum: ast::CrateNum) -> bool
+ fn is_staged_api(&self, cnum: CrateNum) -> bool
{
- self.get_crate_data(cnum).staged_api
+ self.get_crate_data(cnum).is_staged_api()
}
- fn is_explicitly_linked(&self, cnum: ast::CrateNum) -> bool
+ fn is_explicitly_linked(&self, cnum: CrateNum) -> bool
{
self.get_crate_data(cnum).explicitly_linked.get()
}
- fn is_allocator(&self, cnum: ast::CrateNum) -> bool
+ fn is_allocator(&self, cnum: CrateNum) -> bool
{
self.get_crate_data(cnum).is_allocator()
}
- fn is_panic_runtime(&self, cnum: ast::CrateNum) -> bool
+ fn is_panic_runtime(&self, cnum: CrateNum) -> bool
{
self.get_crate_data(cnum).is_panic_runtime()
}
- fn is_compiler_builtins(&self, cnum: ast::CrateNum) -> bool {
+ fn is_compiler_builtins(&self, cnum: CrateNum) -> bool {
self.get_crate_data(cnum).is_compiler_builtins()
}
- fn panic_strategy(&self, cnum: ast::CrateNum) -> PanicStrategy {
+ fn panic_strategy(&self, cnum: CrateNum) -> PanicStrategy {
self.get_crate_data(cnum).panic_strategy()
}
- fn crate_attrs(&self, cnum: ast::CrateNum) -> Vec<ast::Attribute>
- {
- decoder::get_crate_attributes(self.get_crate_data(cnum).data())
- }
-
- fn crate_name(&self, cnum: ast::CrateNum) -> token::InternedString
+ fn crate_name(&self, cnum: CrateNum) -> token::InternedString
{
token::intern_and_get_ident(&self.get_crate_data(cnum).name[..])
}
- fn original_crate_name(&self, cnum: ast::CrateNum) -> token::InternedString
+ fn original_crate_name(&self, cnum: CrateNum) -> token::InternedString
{
token::intern_and_get_ident(&self.get_crate_data(cnum).name())
}
- fn extern_crate(&self, cnum: ast::CrateNum) -> Option<ExternCrate>
+ fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate>
{
self.get_crate_data(cnum).extern_crate.get()
}
- fn crate_hash(&self, cnum: ast::CrateNum) -> Svh
+ fn crate_hash(&self, cnum: CrateNum) -> Svh
{
- let cdata = self.get_crate_data(cnum);
- decoder::get_crate_hash(cdata.data())
+ self.get_crate_hash(cnum)
}
- fn crate_disambiguator(&self, cnum: ast::CrateNum) -> token::InternedString
+ fn crate_disambiguator(&self, cnum: CrateNum) -> token::InternedString
{
- let cdata = self.get_crate_data(cnum);
- token::intern_and_get_ident(decoder::get_crate_disambiguator(cdata.data()))
+ token::intern_and_get_ident(&self.get_crate_data(cnum).disambiguator())
}
- fn crate_struct_field_attrs(&self, cnum: ast::CrateNum)
- -> FnvHashMap<DefId, Vec<ast::Attribute>>
+ fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>
{
- decoder::get_struct_field_attrs(&self.get_crate_data(cnum))
- }
-
- fn plugin_registrar_fn(&self, cnum: ast::CrateNum) -> Option<DefId>
- {
- let cdata = self.get_crate_data(cnum);
- decoder::get_plugin_registrar_fn(cdata.data()).map(|index| DefId {
+ self.get_crate_data(cnum).root.plugin_registrar_fn.map(|index| DefId {
krate: cnum,
index: index
})
}
- fn native_libraries(&self, cnum: ast::CrateNum) -> Vec<(NativeLibraryKind, String)>
+ fn native_libraries(&self, cnum: CrateNum) -> Vec<(NativeLibraryKind, String)>
{
- let cdata = self.get_crate_data(cnum);
- decoder::get_native_libraries(&cdata)
+ self.get_crate_data(cnum).get_native_libraries()
}
- fn reachable_ids(&self, cnum: ast::CrateNum) -> Vec<DefId>
+ fn reachable_ids(&self, cnum: CrateNum) -> Vec<DefId>
{
- let cdata = self.get_crate_data(cnum);
- decoder::get_reachable_ids(&cdata)
+ self.get_crate_data(cnum).get_reachable_ids()
}
- fn is_no_builtins(&self, cnum: ast::CrateNum) -> bool {
- attr::contains_name(&self.crate_attrs(cnum), "no_builtins")
+ fn is_no_builtins(&self, cnum: CrateNum) -> bool {
+ self.get_crate_data(cnum).is_no_builtins()
}
fn def_index_for_def_key(&self,
- cnum: ast::CrateNum,
+ cnum: CrateNum,
def: DefKey)
-> Option<DefIndex> {
let cdata = self.get_crate_data(cnum);
// canonical name for an item.
//
// self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::def_key(&cdata, def.index)
+ self.get_crate_data(def.krate).def_key(def.index)
}
fn relative_def_path(&self, def: DefId) -> Option<hir_map::DefPath> {
// commented out:
//
// self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::def_path(&cdata, def.index)
+ self.get_crate_data(def.krate).def_path(def.index)
}
- fn variant_kind(&self, def_id: DefId) -> Option<VariantKind> {
+ fn variant_kind(&self, def_id: DefId) -> Option<ty::VariantKind>
+ {
self.dep_graph.read(DepNode::MetaData(def_id));
- let cdata = self.get_crate_data(def_id.krate);
- decoder::get_variant_kind(&cdata, def_id.index)
+ self.get_crate_data(def_id.krate).get_variant_kind(def_id.index)
}
fn struct_ctor_def_id(&self, struct_def_id: DefId) -> Option<DefId>
{
self.dep_graph.read(DepNode::MetaData(struct_def_id));
- let cdata = self.get_crate_data(struct_def_id.krate);
- decoder::get_struct_ctor_def_id(&cdata, struct_def_id.index)
- }
-
- fn tuple_struct_definition_if_ctor(&self, did: DefId) -> Option<DefId>
- {
- self.dep_graph.read(DepNode::MetaData(did));
- let cdata = self.get_crate_data(did.krate);
- decoder::get_tuple_struct_definition_if_ctor(&cdata, did.index)
+ self.get_crate_data(struct_def_id.krate).get_struct_ctor_def_id(struct_def_id.index)
}
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name>
{
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::get_struct_field_names(&cdata, def.index)
+ self.get_crate_data(def.krate).get_struct_field_names(def.index)
}
- fn item_children(&self, def_id: DefId) -> Vec<ChildItem>
+ fn item_children(&self, def_id: DefId) -> Vec<def::Export>
{
self.dep_graph.read(DepNode::MetaData(def_id));
let mut result = vec![];
- let crate_data = self.get_crate_data(def_id.krate);
- let get_crate_data = |cnum| self.get_crate_data(cnum);
- decoder::each_child_of_item(&crate_data, def_id.index, get_crate_data, |def, name, vis| {
- result.push(ChildItem { def: def, name: name, vis: vis });
- });
- result
- }
-
- fn crate_top_level_items(&self, cnum: ast::CrateNum) -> Vec<ChildItem>
- {
- let mut result = vec![];
- let crate_data = self.get_crate_data(cnum);
- let get_crate_data = |cnum| self.get_crate_data(cnum);
- decoder::each_top_level_item_of_crate(&crate_data, get_crate_data, |def, name, vis| {
- result.push(ChildItem { def: def, name: name, vis: vis });
- });
+ self.get_crate_data(def_id.krate)
+ .each_child_of_item(def_id.index, |child| result.push(child));
result
}
debug!("maybe_get_item_ast({}): inlining item", tcx.item_path_str(def_id));
- let cdata = self.get_crate_data(def_id.krate);
- let inlined = decoder::maybe_get_item_ast(&cdata, tcx, def_id.index);
+ let inlined = self.get_crate_data(def_id.krate).maybe_get_item_ast(tcx, def_id.index);
let cache_inlined_item = |original_def_id, inlined_item_id, inlined_root_node_id| {
let cache_entry = cstore::CachedInlinedItem {
};
match inlined {
- decoder::FoundAst::NotFound => {
+ None => {
self.inlined_item_cache
.borrow_mut()
.insert(def_id, None);
}
- decoder::FoundAst::Found(&InlinedItem::Item(d, ref item)) => {
+ Some(&InlinedItem::Item(d, ref item)) => {
assert_eq!(d, def_id);
let inlined_root_node_id = find_inlined_item_root(item.id);
cache_inlined_item(def_id, item.id, inlined_root_node_id);
}
- decoder::FoundAst::FoundParent(parent_did, item) => {
- let inlined_root_node_id = find_inlined_item_root(item.id);
- cache_inlined_item(parent_did, item.id, inlined_root_node_id);
-
- match item.node {
- hir::ItemEnum(ref ast_def, _) => {
- let ast_vs = &ast_def.variants;
- let ty_vs = &tcx.lookup_adt_def(parent_did).variants;
- assert_eq!(ast_vs.len(), ty_vs.len());
- for (ast_v, ty_v) in ast_vs.iter().zip(ty_vs.iter()) {
- cache_inlined_item(ty_v.did,
- ast_v.node.data.id(),
- inlined_root_node_id);
- }
- }
- hir::ItemStruct(ref struct_def, _) => {
- if struct_def.is_struct() {
- bug!("instantiate_inline: called on a non-tuple struct")
- } else {
- cache_inlined_item(def_id,
- struct_def.id(),
- inlined_root_node_id);
- }
- }
- _ => bug!("instantiate_inline: item has a \
- non-enum, non-struct parent")
- }
- }
- decoder::FoundAst::Found(&InlinedItem::TraitItem(_, ref trait_item)) => {
+ Some(&InlinedItem::TraitItem(_, ref trait_item)) => {
let inlined_root_node_id = find_inlined_item_root(trait_item.id);
cache_inlined_item(def_id, trait_item.id, inlined_root_node_id);
tcx.impl_or_trait_items.borrow_mut()
.insert(trait_item_def_id, ty_trait_item);
}
- decoder::FoundAst::Found(&InlinedItem::ImplItem(_, ref impl_item)) => {
+ Some(&InlinedItem::ImplItem(_, ref impl_item)) => {
let inlined_root_node_id = find_inlined_item_root(impl_item.id);
cache_inlined_item(def_id, impl_item.id, inlined_root_node_id);
}
fn maybe_get_item_mir<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
-> Option<Mir<'tcx>> {
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::maybe_get_item_mir(&cdata, tcx, def.index)
+ self.get_crate_data(def.krate).maybe_get_item_mir(tcx, def.index)
}
fn is_item_mir_available(&self, def: DefId) -> bool {
self.dep_graph.read(DepNode::MetaData(def));
- let cdata = self.get_crate_data(def.krate);
- decoder::is_item_mir_available(&cdata, def.index)
+ self.get_crate_data(def.krate).is_item_mir_available(def.index)
}
- fn crates(&self) -> Vec<ast::CrateNum>
+ fn crates(&self) -> Vec<CrateNum>
{
let mut result = vec![];
self.iter_crate_data(|cnum, _| result.push(cnum));
{
loader::meta_section_name(target)
}
- fn encode_type<'a>(&self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>,
- def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String)
- -> Vec<u8>
- {
- encoder::encoded_ty(tcx, ty, def_id_to_string)
- }
- fn used_crates(&self, prefer: LinkagePreference) -> Vec<(ast::CrateNum, Option<PathBuf>)>
+ fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, Option<PathBuf>)>
{
self.do_get_used_crates(prefer)
}
- fn used_crate_source(&self, cnum: ast::CrateNum) -> CrateSource
+ fn used_crate_source(&self, cnum: CrateNum) -> CrateSource
{
self.opt_used_crate_source(cnum).unwrap()
}
- fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<ast::CrateNum>
+ fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum>
{
self.do_extern_mod_stmt_cnum(emod_id)
}
reexports: &def::ExportMap,
link_meta: &LinkMeta,
reachable: &NodeSet,
- mir_map: &MirMap<'tcx>,
- krate: &hir::Crate) -> Vec<u8>
- {
- let ecx = encoder::EncodeContext {
- diag: tcx.sess.diagnostic(),
- tcx: tcx,
- reexports: reexports,
- link_meta: link_meta,
- cstore: self,
- reachable: reachable,
- mir_map: mir_map,
- type_abbrevs: RefCell::new(FnvHashMap()),
- };
- encoder::encode_metadata(ecx, krate)
-
+ mir_map: &MirMap<'tcx>) -> Vec<u8>
+ {
+ encoder::encode_metadata(tcx, self, reexports, link_meta, reachable, mir_map)
}
fn metadata_encoding_version(&self) -> &[u8]
{
- common::metadata_encoding_version
+ schema::METADATA_HEADER
}
/// Returns a map from a sufficiently visible external item (i.e. an external item that is
let mut visible_parent_map = self.visible_parent_map.borrow_mut();
if !visible_parent_map.is_empty() { return visible_parent_map; }
- use rustc::middle::cstore::ChildItem;
use std::collections::vec_deque::VecDeque;
use std::collections::hash_map::Entry;
- for cnum in 1 .. self.next_crate_num() {
+ for cnum in (1 .. self.next_crate_num().as_usize()).map(CrateNum::new) {
let cdata = self.get_crate_data(cnum);
match cdata.extern_crate.get() {
}
let mut bfs_queue = &mut VecDeque::new();
- let mut add_child = |bfs_queue: &mut VecDeque<_>, child: ChildItem, parent: DefId| {
- let child = match child.def {
- DefLike::DlDef(def) if child.vis == ty::Visibility::Public => def.def_id(),
- _ => return,
- };
+ let mut add_child = |bfs_queue: &mut VecDeque<_>, child: def::Export, parent: DefId| {
+ let child = child.def_id;
+
+ if self.visibility(child) != ty::Visibility::Public {
+ return;
+ }
match visible_parent_map.entry(child) {
Entry::Occupied(mut entry) => {
}
};
- let croot = DefId { krate: cnum, index: CRATE_DEF_INDEX };
- for child in self.crate_top_level_items(cnum) {
- add_child(bfs_queue, child, croot);
- }
+ bfs_queue.push_back(DefId {
+ krate: cnum,
+ index: CRATE_DEF_INDEX
+ });
while let Some(def) = bfs_queue.pop_front() {
for child in self.item_children(def) {
add_child(bfs_queue, child, def);
visible_parent_map
}
}
-
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![allow(non_camel_case_types)]
-
// The crate store - a central repo for information collected about external
// crates and libraries
-pub use self::MetadataBlob::*;
-
-use common;
-use creader;
-use decoder;
-use index;
use loader;
+use schema;
use rustc::dep_graph::DepGraph;
-use rustc::hir::def_id::{DefIndex, DefId};
+use rustc::hir::def_id::{CRATE_DEF_INDEX, CrateNum, DefIndex, DefId};
use rustc::hir::map::DefKey;
use rustc::hir::svh::Svh;
use rustc::middle::cstore::ExternCrate;
-use rustc::session::config::PanicStrategy;
+use rustc_back::PanicStrategy;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc::util::nodemap::{FnvHashMap, NodeMap, NodeSet, DefIdMap, FnvHashSet};
-use std::cell::{RefCell, Ref, Cell};
+use std::cell::{RefCell, Cell};
use std::rc::Rc;
use std::path::PathBuf;
use flate::Bytes;
use syntax::ast::{self, Ident};
use syntax::attr;
-use syntax::codemap;
use syntax_pos;
-pub use middle::cstore::{NativeLibraryKind, LinkagePreference};
-pub use middle::cstore::{NativeStatic, NativeFramework, NativeUnknown};
-pub use middle::cstore::{CrateSource, LinkMeta};
+pub use rustc::middle::cstore::{NativeLibraryKind, LinkagePreference};
+pub use rustc::middle::cstore::{NativeStatic, NativeFramework, NativeUnknown};
+pub use rustc::middle::cstore::{CrateSource, LinkMeta};
// A map from external crate numbers (as decoded from some crate file) to
// local crate numbers (as generated during this session). Each external
// crate may refer to types in other external crates, and each has their
// own crate numbers.
-pub type CrateNumMap = IndexVec<ast::CrateNum, ast::CrateNum>;
+pub type CrateNumMap = IndexVec<CrateNum, CrateNum>;
pub enum MetadataBlob {
- MetadataVec(Bytes),
- MetadataArchive(loader::ArchiveMetadata),
+ Inflated(Bytes),
+ Archive(loader::ArchiveMetadata),
}
/// Holds information about a syntax_pos::FileMap imported from another crate.
-/// See creader::import_codemap() for more information.
+/// See `imported_filemaps()` for more information.
pub struct ImportedFileMap {
/// This FileMap's byte-offset within the codemap of its original crate
pub original_start_pos: syntax_pos::BytePos,
/// (e.g., by the allocator)
pub extern_crate: Cell<Option<ExternCrate>>,
- pub data: MetadataBlob,
+ pub blob: MetadataBlob,
pub cnum_map: RefCell<CrateNumMap>,
- pub cnum: ast::CrateNum,
+ pub cnum: CrateNum,
pub codemap_import_info: RefCell<Vec<ImportedFileMap>>,
- pub staged_api: bool,
- pub index: index::Index,
- pub xref_index: index::DenseIndex,
+ pub root: schema::CrateRoot,
/// For each public item in this crate, we encode a key. When the
/// crate is loaded, we read all the keys and put them in this
pub struct CStore {
pub dep_graph: DepGraph,
- metas: RefCell<FnvHashMap<ast::CrateNum, Rc<CrateMetadata>>>,
+ metas: RefCell<FnvHashMap<CrateNum, Rc<CrateMetadata>>>,
/// Map from NodeId's of local extern crate statements to crate numbers
- extern_mod_crate_map: RefCell<NodeMap<ast::CrateNum>>,
+ extern_mod_crate_map: RefCell<NodeMap<CrateNum>>,
used_crate_sources: RefCell<Vec<CrateSource>>,
used_libraries: RefCell<Vec<(String, NativeLibraryKind)>>,
used_link_args: RefCell<Vec<String>>,
}
}
- pub fn next_crate_num(&self) -> ast::CrateNum {
- self.metas.borrow().len() as ast::CrateNum + 1
+ pub fn next_crate_num(&self) -> CrateNum {
+ CrateNum::new(self.metas.borrow().len() + 1)
}
- pub fn get_crate_data(&self, cnum: ast::CrateNum) -> Rc<CrateMetadata> {
+ pub fn get_crate_data(&self, cnum: CrateNum) -> Rc<CrateMetadata> {
self.metas.borrow().get(&cnum).unwrap().clone()
}
- pub fn get_crate_hash(&self, cnum: ast::CrateNum) -> Svh {
- let cdata = self.get_crate_data(cnum);
- decoder::get_crate_hash(cdata.data())
+ pub fn get_crate_hash(&self, cnum: CrateNum) -> Svh {
+ self.get_crate_data(cnum).hash()
}
- pub fn set_crate_data(&self, cnum: ast::CrateNum, data: Rc<CrateMetadata>) {
+ pub fn set_crate_data(&self, cnum: CrateNum, data: Rc<CrateMetadata>) {
self.metas.borrow_mut().insert(cnum, data);
}
pub fn iter_crate_data<I>(&self, mut i: I) where
- I: FnMut(ast::CrateNum, &Rc<CrateMetadata>),
+ I: FnMut(CrateNum, &Rc<CrateMetadata>),
{
for (&k, v) in self.metas.borrow().iter() {
i(k, v);
/// Like `iter_crate_data`, but passes source paths (if available) as well.
pub fn iter_crate_data_origins<I>(&self, mut i: I) where
- I: FnMut(ast::CrateNum, &CrateMetadata, Option<CrateSource>),
+ I: FnMut(CrateNum, &CrateMetadata, Option<CrateSource>),
{
for (&k, v) in self.metas.borrow().iter() {
let origin = self.opt_used_crate_source(k);
}
}
- pub fn opt_used_crate_source(&self, cnum: ast::CrateNum)
+ pub fn opt_used_crate_source(&self, cnum: CrateNum)
-> Option<CrateSource> {
self.used_crate_sources.borrow_mut()
.iter().find(|source| source.cnum == cnum).cloned()
self.statically_included_foreign_items.borrow_mut().clear();
}
- pub fn crate_dependencies_in_rpo(&self, krate: ast::CrateNum) -> Vec<ast::CrateNum>
+ pub fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec<CrateNum>
{
let mut ordering = Vec::new();
self.push_dependencies_in_postorder(&mut ordering, krate);
}
pub fn push_dependencies_in_postorder(&self,
- ordering: &mut Vec<ast::CrateNum>,
- krate: ast::CrateNum)
+ ordering: &mut Vec<CrateNum>,
+ krate: CrateNum)
{
if ordering.contains(&krate) { return }
// topological sort of all crates putting the leaves at the right-most
// positions.
pub fn do_get_used_crates(&self, prefer: LinkagePreference)
- -> Vec<(ast::CrateNum, Option<PathBuf>)> {
+ -> Vec<(CrateNum, Option<PathBuf>)> {
let mut ordering = Vec::new();
for (&num, _) in self.metas.borrow().iter() {
self.push_dependencies_in_postorder(&mut ordering, num);
pub fn add_extern_mod_stmt_cnum(&self,
emod_id: ast::NodeId,
- cnum: ast::CrateNum) {
+ cnum: CrateNum) {
self.extern_mod_crate_map.borrow_mut().insert(emod_id, cnum);
}
self.statically_included_foreign_items.borrow().contains(&id)
}
- pub fn do_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<ast::CrateNum>
+ pub fn do_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum>
{
self.extern_mod_crate_map.borrow().get(&emod_id).cloned()
}
}
impl CrateMetadata {
- pub fn data<'a>(&'a self) -> &'a [u8] { self.data.as_slice() }
- pub fn name(&self) -> &str { decoder::get_crate_name(self.data()) }
- pub fn hash(&self) -> Svh { decoder::get_crate_hash(self.data()) }
- pub fn disambiguator(&self) -> &str {
- decoder::get_crate_disambiguator(self.data())
- }
- pub fn imported_filemaps<'a>(&'a self, codemap: &codemap::CodeMap)
- -> Ref<'a, Vec<ImportedFileMap>> {
- let filemaps = self.codemap_import_info.borrow();
- if filemaps.is_empty() {
- drop(filemaps);
- let filemaps = creader::import_codemap(codemap, &self.data);
-
- // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref.
- *self.codemap_import_info.borrow_mut() = filemaps;
- self.codemap_import_info.borrow()
- } else {
- filemaps
- }
+ pub fn name(&self) -> &str { &self.root.name }
+ pub fn hash(&self) -> Svh { self.root.hash }
+ pub fn disambiguator(&self) -> &str { &self.root.disambiguator }
+
+ pub fn is_staged_api(&self) -> bool {
+ self.get_item_attrs(CRATE_DEF_INDEX).iter().any(|attr| {
+ attr.name() == "stable" || attr.name() == "unstable"
+ })
}
pub fn is_allocator(&self) -> bool {
- let attrs = decoder::get_crate_attributes(self.data());
+ let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
attr::contains_name(&attrs, "allocator")
}
pub fn needs_allocator(&self) -> bool {
- let attrs = decoder::get_crate_attributes(self.data());
+ let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
attr::contains_name(&attrs, "needs_allocator")
}
pub fn is_panic_runtime(&self) -> bool {
- let attrs = decoder::get_crate_attributes(self.data());
+ let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
attr::contains_name(&attrs, "panic_runtime")
}
pub fn needs_panic_runtime(&self) -> bool {
- let attrs = decoder::get_crate_attributes(self.data());
+ let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
attr::contains_name(&attrs, "needs_panic_runtime")
}
pub fn is_compiler_builtins(&self) -> bool {
- let attrs = decoder::get_crate_attributes(self.data());
+ let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
attr::contains_name(&attrs, "compiler_builtins")
}
- pub fn panic_strategy(&self) -> PanicStrategy {
- decoder::get_panic_strategy(self.data())
+ pub fn is_no_builtins(&self) -> bool {
+ let attrs = self.get_item_attrs(CRATE_DEF_INDEX);
+ attr::contains_name(&attrs, "no_builtins")
}
-}
-impl MetadataBlob {
- pub fn as_slice_raw<'a>(&'a self) -> &'a [u8] {
- match *self {
- MetadataVec(ref vec) => &vec[..],
- MetadataArchive(ref ar) => ar.as_slice(),
- }
- }
-
- pub fn as_slice<'a>(&'a self) -> &'a [u8] {
- let slice = self.as_slice_raw();
- let len_offset = 4 + common::metadata_encoding_version.len();
- if slice.len() < len_offset+4 {
- &[] // corrupt metadata
- } else {
- let len = (((slice[len_offset+0] as u32) << 24) |
- ((slice[len_offset+1] as u32) << 16) |
- ((slice[len_offset+2] as u32) << 8) |
- ((slice[len_offset+3] as u32) << 0)) as usize;
- if len <= slice.len() - 4 - len_offset {
- &slice[len_offset + 4..len_offset + len + 4]
- } else {
- &[] // corrupt or old metadata
- }
- }
+ pub fn panic_strategy(&self) -> PanicStrategy {
+ self.root.panic_strategy.clone()
}
}
// Decoding metadata from a single crate's metadata
-#![allow(non_camel_case_types)]
-
-use self::Family::*;
-
use astencode::decode_inlined_item;
-use cstore::{self, CrateMetadata};
-use common::*;
-use def_key;
-use encoder::def_to_u64;
-use index;
-use tls_context;
-use tydecode::TyDecoder;
-
-use rustc::hir::def_id::CRATE_DEF_INDEX;
-use rustc::hir::svh::Svh;
+use cstore::{self, CrateMetadata, MetadataBlob, NativeLibraryKind};
+use index::Index;
+use schema::*;
+
use rustc::hir::map as hir_map;
-use rustc::hir::map::DefKey;
+use rustc::hir::map::{DefKey, DefPathData};
use rustc::util::nodemap::FnvHashMap;
use rustc::hir;
-use rustc::session::config::PanicStrategy;
+use rustc::hir::intravisit::IdRange;
-use middle::cstore::{InlinedItem, LinkagePreference};
-use middle::cstore::{DefLike, DlDef, DlField, DlImpl, tls};
-use rustc::hir::def::Def;
-use rustc::hir::def_id::{DefId, DefIndex};
-use middle::lang_items;
-use rustc::ty::{ImplContainer, TraitContainer};
-use rustc::ty::{self, AdtKind, Ty, TyCtxt, TypeFoldable, VariantKind};
+use rustc::middle::cstore::{InlinedItem, LinkagePreference};
+use rustc::hir::def::{self, Def};
+use rustc::hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
+use rustc::middle::lang_items;
+use rustc::ty::{self, Ty, TyCtxt};
+use rustc::ty::subst::Substs;
use rustc_const_math::ConstInt;
-use rustc::mir;
-use rustc::mir::visit::MutVisitor;
-use rustc::mir::repr::Location;
+use rustc::mir::repr::Mir;
-use std::cell::Cell;
+use std::cell::Ref;
use std::io;
+use std::mem;
use std::rc::Rc;
use std::str;
+use std::u32;
-use rbml::reader;
-use rbml;
-use rustc_serialize::Decodable;
+use rustc_serialize::{Decodable, Decoder, SpecializedDecoder, opaque};
use syntax::attr;
-use syntax::parse::token;
-use syntax::ast;
+use syntax::ast::{self, NodeId};
use syntax::codemap;
-use syntax::print::pprust;
-use syntax_pos::{self, Span, BytePos, NO_EXPANSION};
+use syntax_pos::{self, Span, BytePos, Pos};
-pub type Cmd<'a> = &'a CrateMetadata;
+pub struct DecodeContext<'a, 'tcx: 'a> {
+ opaque: opaque::Decoder<'a>,
+ tcx: Option<TyCtxt<'a, 'tcx, 'tcx>>,
+ cdata: Option<&'a CrateMetadata>,
+ from_id_range: IdRange,
+ to_id_range: IdRange,
-impl CrateMetadata {
- fn get_item(&self, item_id: DefIndex) -> Option<rbml::Doc> {
- self.index.lookup_item(self.data(), item_id).map(|pos| {
- reader::doc_at(self.data(), pos as usize).unwrap().doc
- })
- }
+ // Cache the last used filemap for translating spans as an optimization.
+ last_filemap_index: usize,
- fn lookup_item(&self, item_id: DefIndex) -> rbml::Doc {
- match self.get_item(item_id) {
- None => bug!("lookup_item: id not found: {:?} in crate {:?} with number {}",
- item_id,
- self.name,
- self.cnum),
- Some(d) => d
- }
- }
+ lazy_state: LazyState
}
-pub fn load_index(data: &[u8]) -> index::Index {
- let index = reader::get_doc(rbml::Doc::new(data), tag_index);
- index::Index::from_rbml(index)
-}
+/// Abstract over the various ways one can create metadata decoders.
+pub trait Metadata<'a, 'tcx>: Copy {
+ fn raw_bytes(self) -> &'a [u8];
+ fn cdata(self) -> Option<&'a CrateMetadata> { None }
+ fn tcx(self) -> Option<TyCtxt<'a, 'tcx, 'tcx>> { None }
-pub fn crate_rustc_version(data: &[u8]) -> Option<String> {
- let doc = rbml::Doc::new(data);
- reader::maybe_get_doc(doc, tag_rustc_version).map(|s| s.to_string())
+ fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> {
+ let id_range = IdRange {
+ min: NodeId::from_u32(u32::MIN),
+ max: NodeId::from_u32(u32::MAX)
+ };
+ DecodeContext {
+ opaque: opaque::Decoder::new(self.raw_bytes(), pos),
+ cdata: self.cdata(),
+ tcx: self.tcx(),
+ from_id_range: id_range,
+ to_id_range: id_range,
+ last_filemap_index: 0,
+ lazy_state: LazyState::NoNode
+ }
+ }
}
-pub fn load_xrefs(data: &[u8]) -> index::DenseIndex {
- let index = reader::get_doc(rbml::Doc::new(data), tag_xref_index);
- index::DenseIndex::from_buf(index.data, index.start, index.end)
+impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a MetadataBlob {
+ fn raw_bytes(self) -> &'a [u8] {
+ match *self {
+ MetadataBlob::Inflated(ref vec) => &vec[..],
+ MetadataBlob::Archive(ref ar) => ar.as_slice(),
+ }
+ }
}
-// Go through each item in the metadata and create a map from that
-// item's def-key to the item's DefIndex.
-pub fn load_key_map(data: &[u8]) -> FnvHashMap<DefKey, DefIndex> {
- let root_doc = rbml::Doc::new(data);
- let items_doc = reader::get_doc(root_doc, tag_items);
- let items_data_doc = reader::get_doc(items_doc, tag_items_data);
- reader::docs(items_data_doc)
- .filter(|&(tag, _)| tag == tag_items_data_item)
- .map(|(_, item_doc)| {
- // load def-key from item
- let key = item_def_key(item_doc);
-
- // load def-index from item; we only encode the full def-id,
- // so just pull out the index
- let def_id_doc = reader::get_doc(item_doc, tag_def_id);
- let def_id = untranslated_def_id(def_id_doc);
- assert!(def_id.is_local()); // local to the crate we are decoding, that is
-
- (key, def_id.index)
- })
- .collect()
+impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a CrateMetadata {
+ fn raw_bytes(self) -> &'a [u8] { self.blob.raw_bytes() }
+ fn cdata(self) -> Option<&'a CrateMetadata> { Some(self) }
}
-#[derive(Clone, Copy, Debug, PartialEq)]
-enum Family {
- ImmStatic, // c
- MutStatic, // b
- Fn, // f
- StaticMethod, // F
- Method, // h
- Type, // y
- Mod, // m
- ForeignMod, // n
- Enum, // t
- Variant(VariantKind), // V, v, w
- Impl, // i
- DefaultImpl, // d
- Trait, // I
- Struct(VariantKind), // S, s, u
- Union, // U
- PublicField, // g
- InheritedField, // N
- Constant, // C
+impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a CrateMetadata, TyCtxt<'a, 'tcx, 'tcx>) {
+ fn raw_bytes(self) -> &'a [u8] { self.0.raw_bytes() }
+ fn cdata(self) -> Option<&'a CrateMetadata> { Some(self.0) }
+ fn tcx(self) -> Option<TyCtxt<'a, 'tcx, 'tcx>> { Some(self.1) }
}
-fn item_family(item: rbml::Doc) -> Family {
- let fam = reader::get_doc(item, tag_items_data_item_family);
- match reader::doc_as_u8(fam) as char {
- 'C' => Constant,
- 'c' => ImmStatic,
- 'b' => MutStatic,
- 'f' => Fn,
- 'F' => StaticMethod,
- 'h' => Method,
- 'y' => Type,
- 'm' => Mod,
- 'n' => ForeignMod,
- 't' => Enum,
- 'V' => Variant(VariantKind::Struct),
- 'v' => Variant(VariantKind::Tuple),
- 'w' => Variant(VariantKind::Unit),
- 'i' => Impl,
- 'd' => DefaultImpl,
- 'I' => Trait,
- 'S' => Struct(VariantKind::Struct),
- 's' => Struct(VariantKind::Tuple),
- 'u' => Struct(VariantKind::Unit),
- 'U' => Union,
- 'g' => PublicField,
- 'N' => InheritedField,
- c => bug!("unexpected family char: {}", c)
+// HACK(eddyb) Only used by astencode to customize the from/to IdRange's.
+impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a CrateMetadata, TyCtxt<'a, 'tcx, 'tcx>, [IdRange; 2]) {
+ fn raw_bytes(self) -> &'a [u8] { self.0.raw_bytes() }
+ fn cdata(self) -> Option<&'a CrateMetadata> { Some(self.0) }
+ fn tcx(self) -> Option<TyCtxt<'a, 'tcx, 'tcx>> { Some(self.1) }
+
+ fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> {
+ let mut dcx = (self.0, self.1).decoder(pos);
+ dcx.from_id_range = self.2[0];
+ dcx.to_id_range = self.2[1];
+ dcx
}
}
-fn item_visibility(item: rbml::Doc) -> ty::Visibility {
- match reader::maybe_get_doc(item, tag_items_data_item_visibility) {
- None => ty::Visibility::Public,
- Some(visibility_doc) => {
- match reader::doc_as_u8(visibility_doc) as char {
- 'y' => ty::Visibility::Public,
- 'i' => ty::Visibility::PrivateExternal,
- _ => bug!("unknown visibility character")
- }
- }
+impl<'a, 'tcx: 'a, T: Decodable> Lazy<T> {
+ pub fn decode<M: Metadata<'a, 'tcx>>(self, meta: M) -> T {
+ let mut dcx = meta.decoder(self.position);
+ dcx.lazy_state = LazyState::NodeStart(self.position);
+ T::decode(&mut dcx).unwrap()
}
}
-fn fn_constness(item: rbml::Doc) -> hir::Constness {
- match reader::maybe_get_doc(item, tag_items_data_item_constness) {
- None => hir::Constness::NotConst,
- Some(constness_doc) => {
- match reader::doc_as_u8(constness_doc) as char {
- 'c' => hir::Constness::Const,
- 'n' => hir::Constness::NotConst,
- _ => bug!("unknown constness character")
- }
- }
+impl<'a, 'tcx: 'a, T: Decodable> LazySeq<T> {
+ pub fn decode<M: Metadata<'a, 'tcx>>(self, meta: M) -> impl Iterator<Item=T> + 'a {
+ let mut dcx = meta.decoder(self.position);
+ dcx.lazy_state = LazyState::NodeStart(self.position);
+ (0..self.len).map(move |_| {
+ T::decode(&mut dcx).unwrap()
+ })
}
}
-fn item_defaultness(item: rbml::Doc) -> hir::Defaultness {
- match reader::maybe_get_doc(item, tag_items_data_item_defaultness) {
- None => hir::Defaultness::Default, // should occur only for default impls on traits
- Some(defaultness_doc) => {
- match reader::doc_as_u8(defaultness_doc) as char {
- 'd' => hir::Defaultness::Default,
- 'f' => hir::Defaultness::Final,
- _ => bug!("unknown defaultness character")
- }
- }
+impl<'a, 'tcx> DecodeContext<'a, 'tcx> {
+ pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
+ self.tcx.expect("missing TyCtxt in DecodeContext")
}
-}
-fn item_sort(item: rbml::Doc) -> Option<char> {
- reader::tagged_docs(item, tag_item_trait_item_sort).nth(0).map(|doc| {
- doc.as_str().as_bytes()[0] as char
- })
-}
+ pub fn cdata(&self) -> &'a CrateMetadata {
+ self.cdata.expect("missing CrateMetadata in DecodeContext")
+ }
-fn untranslated_def_id(d: rbml::Doc) -> DefId {
- let id = reader::doc_as_u64(d);
- let index = DefIndex::new((id & 0xFFFF_FFFF) as usize);
- DefId { krate: (id >> 32) as u32, index: index }
-}
+ fn with_position<F: FnOnce(&mut Self) -> R, R>(&mut self, pos: usize, f: F) -> R {
+ let new_opaque = opaque::Decoder::new(self.opaque.data, pos);
+ let old_opaque = mem::replace(&mut self.opaque, new_opaque);
+ let old_state = mem::replace(&mut self.lazy_state, LazyState::NoNode);
+ let r = f(self);
+ self.opaque = old_opaque;
+ self.lazy_state = old_state;
+ r
+ }
-fn translated_def_id(cdata: Cmd, d: rbml::Doc) -> DefId {
- let def_id = untranslated_def_id(d);
- translate_def_id(cdata, def_id)
+ fn read_lazy_distance(&mut self, min_size: usize)
+ -> Result<usize, <Self as Decoder>::Error> {
+ let distance = self.read_usize()?;
+ let position = match self.lazy_state {
+ LazyState::NoNode => {
+ bug!("read_lazy_distance: outside of a metadata node")
+ }
+ LazyState::NodeStart(start) => {
+ assert!(distance + min_size <= start);
+ start - distance - min_size
+ }
+ LazyState::Previous(last_min_end) => {
+ last_min_end + distance
+ }
+ };
+ self.lazy_state = LazyState::Previous(position + min_size);
+ Ok(position)
+ }
}
-fn item_parent_item(cdata: Cmd, d: rbml::Doc) -> Option<DefId> {
- reader::tagged_docs(d, tag_items_data_parent_item).nth(0).map(|did| {
- translated_def_id(cdata, did)
- })
+macro_rules! decoder_methods {
+ ($($name:ident -> $ty:ty;)*) => {
+ $(fn $name(&mut self) -> Result<$ty, Self::Error> {
+ self.opaque.$name()
+ })*
+ }
}
-fn item_require_parent_item(cdata: Cmd, d: rbml::Doc) -> DefId {
- translated_def_id(cdata, reader::get_doc(d, tag_items_data_parent_item))
-}
+impl<'doc, 'tcx> Decoder for DecodeContext<'doc, 'tcx> {
+ type Error = <opaque::Decoder<'doc> as Decoder>::Error;
-fn item_def_id(d: rbml::Doc, cdata: Cmd) -> DefId {
- translated_def_id(cdata, reader::get_doc(d, tag_def_id))
-}
+ decoder_methods! {
+ read_nil -> ();
-fn reexports<'a>(d: rbml::Doc<'a>) -> reader::TaggedDocsIterator<'a> {
- reader::tagged_docs(d, tag_items_data_item_reexport)
-}
+ read_u64 -> u64;
+ read_u32 -> u32;
+ read_u16 -> u16;
+ read_u8 -> u8;
+ read_usize -> usize;
-fn variant_disr_val(d: rbml::Doc) -> u64 {
- let val_doc = reader::get_doc(d, tag_disr_val);
- reader::with_doc_data(val_doc, |data| {
- str::from_utf8(data).unwrap().parse().unwrap()
- })
-}
+ read_i64 -> i64;
+ read_i32 -> i32;
+ read_i16 -> i16;
+ read_i8 -> i8;
+ read_isize -> isize;
-fn doc_type<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd) -> Ty<'tcx> {
- let tp = reader::get_doc(doc, tag_items_data_item_type);
- TyDecoder::with_doc(tcx, cdata.cnum, tp,
- &mut |did| translate_def_id(cdata, did))
- .parse_ty()
-}
+ read_bool -> bool;
+ read_f64 -> f64;
+ read_f32 -> f32;
+ read_char -> char;
+ read_str -> String;
+ }
-fn maybe_doc_type<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd)
- -> Option<Ty<'tcx>> {
- reader::maybe_get_doc(doc, tag_items_data_item_type).map(|tp| {
- TyDecoder::with_doc(tcx, cdata.cnum, tp,
- &mut |did| translate_def_id(cdata, did))
- .parse_ty()
- })
+ fn error(&mut self, err: &str) -> Self::Error {
+ self.opaque.error(err)
+ }
}
-fn doc_trait_ref<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd)
- -> ty::TraitRef<'tcx> {
- TyDecoder::with_doc(tcx, cdata.cnum, doc,
- &mut |did| translate_def_id(cdata, did))
- .parse_trait_ref()
+impl<'a, 'tcx, T> SpecializedDecoder<Lazy<T>> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<Lazy<T>, Self::Error> {
+ Ok(Lazy::with_position(self.read_lazy_distance(Lazy::<T>::min_size())?))
+ }
}
-fn item_trait_ref<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd)
- -> ty::TraitRef<'tcx> {
- let tp = reader::get_doc(doc, tag_item_trait_ref);
- doc_trait_ref(tp, tcx, cdata)
+impl<'a, 'tcx, T> SpecializedDecoder<LazySeq<T>> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<LazySeq<T>, Self::Error> {
+ let len = self.read_usize()?;
+ let position = if len == 0 {
+ 0
+ } else {
+ self.read_lazy_distance(LazySeq::<T>::min_size(len))?
+ };
+ Ok(LazySeq::with_position_and_length(position, len))
+ }
}
-fn item_name(item: rbml::Doc) -> ast::Name {
- maybe_item_name(item).expect("no item in item_name")
-}
+impl<'a, 'tcx> SpecializedDecoder<NodeId> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<NodeId, Self::Error> {
+ let id = u32::decode(self)?;
-fn maybe_item_name(item: rbml::Doc) -> Option<ast::Name> {
- reader::maybe_get_doc(item, tag_paths_data_name).map(|name| {
- let string = name.as_str();
- token::intern(string)
- })
-}
+ // from_id_range should be non-empty
+ assert!(!self.from_id_range.empty());
+ // Make sure that translating the NodeId will actually yield a
+ // meaningful result
+ if !self.from_id_range.contains(NodeId::from_u32(id)) {
+ bug!("NodeId::decode: {} out of DecodeContext range ({:?} -> {:?})",
+ id, self.from_id_range, self.to_id_range);
+ }
-fn family_to_variant_kind<'tcx>(family: Family) -> Option<ty::VariantKind> {
- match family {
- Struct(VariantKind::Struct) | Variant(VariantKind::Struct) | Union =>
- Some(ty::VariantKind::Struct),
- Struct(VariantKind::Tuple) | Variant(VariantKind::Tuple) =>
- Some(ty::VariantKind::Tuple),
- Struct(VariantKind::Unit) | Variant(VariantKind::Unit) =>
- Some(ty::VariantKind::Unit),
- _ => None,
+ // Use wrapping arithmetic because otherwise it introduces control flow.
+ // Maybe we should just have the control flow? -- aatch
+ Ok(NodeId::from_u32(id.wrapping_sub(self.from_id_range.min.as_u32())
+ .wrapping_add(self.to_id_range.min.as_u32())))
}
}
-fn item_to_def_like(cdata: Cmd, item: rbml::Doc, did: DefId) -> DefLike {
- let fam = item_family(item);
- match fam {
- Constant => {
- // Check whether we have an associated const item.
- match item_sort(item) {
- Some('C') | Some('c') => {
- DlDef(Def::AssociatedConst(did))
- }
- _ => {
- // Regular const item.
- DlDef(Def::Const(did))
- }
- }
- }
- ImmStatic => DlDef(Def::Static(did, false)),
- MutStatic => DlDef(Def::Static(did, true)),
- Struct(..) => DlDef(Def::Struct(did)),
- Union => DlDef(Def::Union(did)),
- Fn => DlDef(Def::Fn(did)),
- Method | StaticMethod => {
- DlDef(Def::Method(did))
- }
- Type => {
- if item_sort(item) == Some('t') {
- let trait_did = item_require_parent_item(cdata, item);
- DlDef(Def::AssociatedTy(trait_did, did))
- } else {
- DlDef(Def::TyAlias(did))
- }
- }
- Mod => DlDef(Def::Mod(did)),
- ForeignMod => DlDef(Def::ForeignMod(did)),
- Variant(..) => {
- let enum_did = item_require_parent_item(cdata, item);
- DlDef(Def::Variant(enum_did, did))
+impl<'a, 'tcx> SpecializedDecoder<CrateNum> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<CrateNum, Self::Error> {
+ let cnum = CrateNum::from_u32(u32::decode(self)?);
+ if cnum == LOCAL_CRATE {
+ Ok(self.cdata().cnum)
+ } else {
+ Ok(self.cdata().cnum_map.borrow()[cnum])
}
- Trait => DlDef(Def::Trait(did)),
- Enum => DlDef(Def::Enum(did)),
- Impl | DefaultImpl => DlImpl(did),
- PublicField | InheritedField => DlField,
}
}
-fn parse_unsafety(item_doc: rbml::Doc) -> hir::Unsafety {
- let unsafety_doc = reader::get_doc(item_doc, tag_unsafety);
- if reader::doc_as_u8(unsafety_doc) != 0 {
- hir::Unsafety::Unsafe
- } else {
- hir::Unsafety::Normal
- }
-}
+impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<Span, Self::Error> {
+ let lo = BytePos::decode(self)?;
+ let hi = BytePos::decode(self)?;
-fn parse_paren_sugar(item_doc: rbml::Doc) -> bool {
- let paren_sugar_doc = reader::get_doc(item_doc, tag_paren_sugar);
- reader::doc_as_u8(paren_sugar_doc) != 0
-}
+ let tcx = if let Some(tcx) = self.tcx {
+ tcx
+ } else {
+ return Ok(syntax_pos::mk_sp(lo, hi));
+ };
-fn parse_polarity(item_doc: rbml::Doc) -> hir::ImplPolarity {
- let polarity_doc = reader::get_doc(item_doc, tag_polarity);
- if reader::doc_as_u8(polarity_doc) != 0 {
- hir::ImplPolarity::Negative
- } else {
- hir::ImplPolarity::Positive
- }
-}
+ let (lo, hi) = if lo > hi {
+ // Currently macro expansion sometimes produces invalid Span values
+ // where lo > hi. In order not to crash the compiler when trying to
+ // translate these values, let's transform them into something we
+ // can handle (and which will produce useful debug locations at
+ // least some of the time).
+ // This workaround is only necessary as long as macro expansion is
+ // not fixed. FIXME(#23480)
+ (lo, lo)
+ } else {
+ (lo, hi)
+ };
-fn parse_associated_type_names(item_doc: rbml::Doc) -> Vec<ast::Name> {
- let names_doc = reader::get_doc(item_doc, tag_associated_type_names);
- reader::tagged_docs(names_doc, tag_associated_type_name)
- .map(|name_doc| token::intern(name_doc.as_str()))
- .collect()
-}
+ let imported_filemaps = self.cdata().imported_filemaps(&tcx.sess.codemap());
+ let filemap = {
+ // Optimize for the case that most spans within a translated item
+ // originate from the same filemap.
+ let last_filemap = &imported_filemaps[self.last_filemap_index];
+
+ if lo >= last_filemap.original_start_pos &&
+ lo <= last_filemap.original_end_pos &&
+ hi >= last_filemap.original_start_pos &&
+ hi <= last_filemap.original_end_pos {
+ last_filemap
+ } else {
+ let mut a = 0;
+ let mut b = imported_filemaps.len();
-pub fn get_trait_def<'a, 'tcx>(cdata: Cmd,
- item_id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::TraitDef<'tcx>
-{
- let item_doc = cdata.lookup_item(item_id);
- let generics = doc_generics(item_doc, tcx, cdata);
- let unsafety = parse_unsafety(item_doc);
- let associated_type_names = parse_associated_type_names(item_doc);
- let paren_sugar = parse_paren_sugar(item_doc);
- let def_path = def_path(cdata, item_id).unwrap();
-
- ty::TraitDef::new(unsafety,
- paren_sugar,
- generics,
- item_trait_ref(item_doc, tcx, cdata),
- associated_type_names,
- def_path.deterministic_hash(tcx))
-}
+ while b - a > 1 {
+ let m = (a + b) / 2;
+ if imported_filemaps[m].original_start_pos > lo {
+ b = m;
+ } else {
+ a = m;
+ }
+ }
-pub fn get_adt_def<'a, 'tcx>(cdata: Cmd,
- item_id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::AdtDefMaster<'tcx>
-{
- fn expect_variant_kind(family: Family) -> ty::VariantKind {
- match family_to_variant_kind(family) {
- Some(kind) => kind,
- _ => bug!("unexpected family: {:?}", family),
- }
- }
- fn get_enum_variants<'tcx>(cdata: Cmd, doc: rbml::Doc) -> Vec<ty::VariantDefData<'tcx, 'tcx>> {
- reader::tagged_docs(doc, tag_items_data_item_variant).map(|p| {
- let did = translated_def_id(cdata, p);
- let item = cdata.lookup_item(did.index);
- let disr = variant_disr_val(item);
- ty::VariantDefData {
- did: did,
- name: item_name(item),
- fields: get_variant_fields(cdata, item),
- disr_val: ConstInt::Infer(disr),
- kind: expect_variant_kind(item_family(item)),
+ self.last_filemap_index = a;
+ &imported_filemaps[a]
}
- }).collect()
- }
- fn get_variant_fields<'tcx>(cdata: Cmd, doc: rbml::Doc) -> Vec<ty::FieldDefData<'tcx, 'tcx>> {
- let mut index = 0;
- reader::tagged_docs(doc, tag_item_field).map(|f| {
- let ff = item_family(f);
- match ff {
- PublicField | InheritedField => {},
- _ => bug!("expected field, found {:?}", ff)
- };
- ty::FieldDefData::new(item_def_id(f, cdata),
- item_name(f),
- struct_field_family_to_visibility(ff))
- }).chain(reader::tagged_docs(doc, tag_item_unnamed_field).map(|f| {
- let ff = item_family(f);
- let name = token::with_ident_interner(|interner| interner.intern(index.to_string()));
- index += 1;
- ty::FieldDefData::new(item_def_id(f, cdata), name,
- struct_field_family_to_visibility(ff))
- })).collect()
- }
- fn get_struct_variant<'tcx>(cdata: Cmd,
- doc: rbml::Doc,
- did: DefId) -> ty::VariantDefData<'tcx, 'tcx> {
- ty::VariantDefData {
- did: did,
- name: item_name(doc),
- fields: get_variant_fields(cdata, doc),
- disr_val: ConstInt::Infer(0),
- kind: expect_variant_kind(item_family(doc)),
- }
+ };
+
+ let lo = (lo - filemap.original_start_pos) +
+ filemap.translated_filemap.start_pos;
+ let hi = (hi - filemap.original_start_pos) +
+ filemap.translated_filemap.start_pos;
+
+ Ok(syntax_pos::mk_sp(lo, hi))
}
+}
- let doc = cdata.lookup_item(item_id);
- let did = DefId { krate: cdata.cnum, index: item_id };
- let mut ctor_did = None;
- let (kind, variants) = match item_family(doc) {
- Enum => {
- (AdtKind::Enum, get_enum_variants(cdata, doc))
- }
- Struct(..) => {
- // Use separate constructor id for unit/tuple structs and reuse did for braced structs.
- ctor_did = reader::maybe_get_doc(doc, tag_items_data_item_struct_ctor).map(|ctor_doc| {
- translated_def_id(cdata, ctor_doc)
- });
- (AdtKind::Struct, vec![get_struct_variant(cdata, doc, ctor_did.unwrap_or(did))])
- }
- Union => {
- (AdtKind::Union, vec![get_struct_variant(cdata, doc, did)])
- }
- _ => bug!("get_adt_def called on a non-ADT {:?} - {:?}", item_family(doc), did)
- };
-
- let adt = tcx.intern_adt_def(did, kind, variants);
- if let Some(ctor_did) = ctor_did {
- // Make adt definition available through constructor id as well.
- tcx.insert_adt_def(ctor_did, adt);
- }
-
- // this needs to be done *after* the variant is interned,
- // to support recursive structures
- for variant in &adt.variants {
- if variant.kind == ty::VariantKind::Tuple && adt.is_enum() {
- // tuple-like enum variant fields aren't real items - get the types
- // from the ctor.
- debug!("evaluating the ctor-type of {:?}",
- variant.name);
- let ctor_ty = get_type(cdata, variant.did.index, tcx);
- debug!("evaluating the ctor-type of {:?}.. {:?}",
- variant.name,
- ctor_ty);
- let field_tys = match ctor_ty.sty {
- ty::TyFnDef(.., &ty::BareFnTy { sig: ty::Binder(ty::FnSig {
- ref inputs, ..
- }), ..}) => {
- // tuple-struct constructors don't have escaping regions
- assert!(!inputs.has_escaping_regions());
- inputs
- },
- _ => bug!("tuple-variant ctor is not an ADT")
+// FIXME(#36588) These impls are horribly unsound as they allow
+// the caller to pick any lifetime for 'tcx, including 'static,
+// by using the unspecialized proxies to them.
+
+impl<'a, 'tcx> SpecializedDecoder<Ty<'tcx>> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<Ty<'tcx>, Self::Error> {
+ let tcx = self.tcx();
+
+ // Handle shorthands first, if we have an usize > 0x80.
+ if self.opaque.data[self.opaque.position()] & 0x80 != 0 {
+ let pos = self.read_usize()?;
+ assert!(pos >= SHORTHAND_OFFSET);
+ let key = ty::CReaderCacheKey {
+ cnum: self.cdata().cnum,
+ pos: pos - SHORTHAND_OFFSET
};
- for (field, &ty) in variant.fields.iter().zip(field_tys.iter()) {
- field.fulfill_ty(ty);
+ if let Some(ty) = tcx.rcache.borrow().get(&key).cloned() {
+ return Ok(ty);
}
+
+ let ty = self.with_position(key.pos, Ty::decode)?;
+ tcx.rcache.borrow_mut().insert(key, ty);
+ Ok(ty)
} else {
- for field in &variant.fields {
- debug!("evaluating the type of {:?}::{:?}", variant.name, field.name);
- let ty = get_type(cdata, field.did.index, tcx);
- field.fulfill_ty(ty);
- debug!("evaluating the type of {:?}::{:?}: {:?}",
- variant.name, field.name, ty);
- }
+ Ok(tcx.mk_ty(ty::TypeVariants::decode(self)?))
}
}
-
- adt
}
-pub fn get_predicates<'a, 'tcx>(cdata: Cmd,
- item_id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::GenericPredicates<'tcx>
-{
- let item_doc = cdata.lookup_item(item_id);
- doc_predicates(item_doc, tcx, cdata, tag_item_predicates)
-}
-pub fn get_super_predicates<'a, 'tcx>(cdata: Cmd,
- item_id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::GenericPredicates<'tcx>
-{
- let item_doc = cdata.lookup_item(item_id);
- doc_predicates(item_doc, tcx, cdata, tag_item_super_predicates)
-}
+impl<'a, 'tcx> SpecializedDecoder<ty::GenericPredicates<'tcx>> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<ty::GenericPredicates<'tcx>, Self::Error> {
+ Ok(ty::GenericPredicates {
+ parent: Decodable::decode(self)?,
+ predicates: (0..self.read_usize()?).map(|_| {
+ // Handle shorthands first, if we have an usize > 0x80.
+ if self.opaque.data[self.opaque.position()] & 0x80 != 0 {
+ let pos = self.read_usize()?;
+ assert!(pos >= SHORTHAND_OFFSET);
+ let pos = pos - SHORTHAND_OFFSET;
-pub fn get_generics<'a, 'tcx>(cdata: Cmd,
- item_id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> &'tcx ty::Generics<'tcx>
-{
- let item_doc = cdata.lookup_item(item_id);
- doc_generics(item_doc, tcx, cdata)
+ self.with_position(pos, ty::Predicate::decode)
+ } else {
+ ty::Predicate::decode(self)
+ }
+ }).collect::<Result<Vec<_>, _>>()?
+ })
+ }
}
-pub fn get_type<'a, 'tcx>(cdata: Cmd, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Ty<'tcx>
-{
- let item_doc = cdata.lookup_item(id);
- doc_type(item_doc, tcx, cdata)
+impl<'a, 'tcx> SpecializedDecoder<&'tcx Substs<'tcx>> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<&'tcx Substs<'tcx>, Self::Error> {
+ Ok(self.tcx().mk_substs(Decodable::decode(self)?))
+ }
}
-pub fn get_stability(cdata: Cmd, id: DefIndex) -> Option<attr::Stability> {
- let item = cdata.lookup_item(id);
- reader::maybe_get_doc(item, tag_items_data_item_stability).map(|doc| {
- let mut decoder = reader::Decoder::new(doc);
- Decodable::decode(&mut decoder).unwrap()
- })
+impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Region> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<&'tcx ty::Region, Self::Error> {
+ Ok(self.tcx().mk_region(Decodable::decode(self)?))
+ }
}
-pub fn get_deprecation(cdata: Cmd, id: DefIndex) -> Option<attr::Deprecation> {
- let item = cdata.lookup_item(id);
- reader::maybe_get_doc(item, tag_items_data_item_deprecation).map(|doc| {
- let mut decoder = reader::Decoder::new(doc);
- Decodable::decode(&mut decoder).unwrap()
- })
+impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Slice<Ty<'tcx>>> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice<Ty<'tcx>>, Self::Error> {
+ Ok(self.tcx().mk_type_list(Decodable::decode(self)?))
+ }
}
-pub fn get_visibility(cdata: Cmd, id: DefIndex) -> ty::Visibility {
- item_visibility(cdata.lookup_item(id))
+impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::BareFnTy<'tcx>> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<&'tcx ty::BareFnTy<'tcx>, Self::Error> {
+ Ok(self.tcx().mk_bare_fn(Decodable::decode(self)?))
+ }
}
-pub fn get_parent_impl(cdata: Cmd, id: DefIndex) -> Option<DefId> {
- let item = cdata.lookup_item(id);
- reader::maybe_get_doc(item, tag_items_data_parent_impl).map(|doc| {
- translated_def_id(cdata, doc)
- })
+impl<'a, 'tcx> SpecializedDecoder<ty::AdtDef<'tcx>> for DecodeContext<'a, 'tcx> {
+ fn specialized_decode(&mut self) -> Result<ty::AdtDef<'tcx>, Self::Error> {
+ let def_id = DefId::decode(self)?;
+ Ok(self.tcx().lookup_adt_def(def_id))
+ }
}
-pub fn get_repr_attrs(cdata: Cmd, id: DefIndex) -> Vec<attr::ReprAttr> {
- let item = cdata.lookup_item(id);
- match reader::maybe_get_doc(item, tag_items_data_item_repr).map(|doc| {
- let mut decoder = reader::Decoder::new(doc);
- Decodable::decode(&mut decoder).unwrap()
- }) {
- Some(attrs) => attrs,
- None => Vec::new(),
+impl<'a, 'tcx> MetadataBlob {
+ pub fn is_compatible(&self) -> bool {
+ self.raw_bytes().starts_with(METADATA_HEADER)
}
-}
-pub fn get_impl_polarity<'tcx>(cdata: Cmd,
- id: DefIndex)
- -> Option<hir::ImplPolarity>
-{
- let item_doc = cdata.lookup_item(id);
- let fam = item_family(item_doc);
- match fam {
- Family::Impl => {
- Some(parse_polarity(item_doc))
- }
- _ => None
+ pub fn get_root(&self) -> CrateRoot {
+ let slice = self.raw_bytes();
+ let offset = METADATA_HEADER.len();
+ let pos = (((slice[offset + 0] as u32) << 24) |
+ ((slice[offset + 1] as u32) << 16) |
+ ((slice[offset + 2] as u32) << 8) |
+ ((slice[offset + 3] as u32) << 0)) as usize;
+ Lazy::with_position(pos).decode(self)
}
-}
-pub fn get_custom_coerce_unsized_kind<'tcx>(
- cdata: Cmd,
- id: DefIndex)
- -> Option<ty::adjustment::CustomCoerceUnsized>
-{
- let item_doc = cdata.lookup_item(id);
- reader::maybe_get_doc(item_doc, tag_impl_coerce_unsized_kind).map(|kind_doc| {
- let mut decoder = reader::Decoder::new(kind_doc);
- Decodable::decode(&mut decoder).unwrap()
- })
-}
+ /// Go through each item in the metadata and create a map from that
+ /// item's def-key to the item's DefIndex.
+ pub fn load_key_map(&self, index: LazySeq<Index>) -> FnvHashMap<DefKey, DefIndex> {
+ index.iter_enumerated(self.raw_bytes()).map(|(index, item)| {
+ (item.decode(self).def_key.decode(self), index)
+ }).collect()
+ }
-pub fn get_impl_trait<'a, 'tcx>(cdata: Cmd,
- id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Option<ty::TraitRef<'tcx>>
-{
- let item_doc = cdata.lookup_item(id);
- let fam = item_family(item_doc);
- match fam {
- Family::Impl | Family::DefaultImpl => {
- reader::maybe_get_doc(item_doc, tag_item_trait_ref).map(|tp| {
- doc_trait_ref(tp, tcx, cdata)
- })
+ pub fn list_crate_metadata(&self, out: &mut io::Write) -> io::Result<()> {
+ write!(out, "=External Dependencies=\n")?;
+ let root = self.get_root();
+ for (i, dep) in root.crate_deps.decode(self).enumerate() {
+ write!(out, "{} {}-{}\n", i + 1, dep.name, dep.hash)?;
}
- _ => None
+ write!(out, "\n")?;
+ Ok(())
}
}
-/// Iterates over the language items in the given crate.
-pub fn each_lang_item<F>(cdata: Cmd, mut f: F) -> bool where
- F: FnMut(DefIndex, usize) -> bool,
-{
- let root = rbml::Doc::new(cdata.data());
- let lang_items = reader::get_doc(root, tag_lang_items);
- reader::tagged_docs(lang_items, tag_lang_items_item).all(|item_doc| {
- let id_doc = reader::get_doc(item_doc, tag_lang_items_item_id);
- let id = reader::doc_as_u32(id_doc) as usize;
- let index_doc = reader::get_doc(item_doc, tag_lang_items_item_index);
- let index = DefIndex::from_u32(reader::doc_as_u32(index_doc));
-
- f(index, id)
- })
+impl<'tcx> EntryKind<'tcx> {
+ fn to_def(&self, did: DefId) -> Option<Def> {
+ Some(match *self {
+ EntryKind::Const => Def::Const(did),
+ EntryKind::AssociatedConst(_) => Def::AssociatedConst(did),
+ EntryKind::ImmStatic |
+ EntryKind::ForeignImmStatic => Def::Static(did, false),
+ EntryKind::MutStatic |
+ EntryKind::ForeignMutStatic => Def::Static(did, true),
+ EntryKind::Struct(_) => Def::Struct(did),
+ EntryKind::Union(_) => Def::Union(did),
+ EntryKind::Fn(_) |
+ EntryKind::ForeignFn(_) => Def::Fn(did),
+ EntryKind::Method(_) => Def::Method(did),
+ EntryKind::Type => Def::TyAlias(did),
+ EntryKind::AssociatedType(_) => Def::AssociatedTy(did),
+ EntryKind::Mod(_) => Def::Mod(did),
+ EntryKind::Variant(_) => Def::Variant(did),
+ EntryKind::Trait(_) => Def::Trait(did),
+ EntryKind::Enum => Def::Enum(did),
+
+ EntryKind::ForeignMod |
+ EntryKind::Impl(_) |
+ EntryKind::DefaultImpl(_) |
+ EntryKind::Field |
+ EntryKind::Closure (_) => {
+ return None
+ }
+ })
+ }
}
-fn each_child_of_item_or_crate<F, G>(cdata: Cmd,
- item_doc: rbml::Doc,
- mut get_crate_data: G,
- mut callback: F) where
- F: FnMut(DefLike, ast::Name, ty::Visibility),
- G: FnMut(ast::CrateNum) -> Rc<CrateMetadata>,
-{
- // Iterate over all children.
- for child_info_doc in reader::tagged_docs(item_doc, tag_mod_child) {
- let child_def_id = translated_def_id(cdata, child_info_doc);
-
- // This item may be in yet another crate if it was the child of a
- // reexport.
- let crate_data = if child_def_id.krate == cdata.cnum {
- None
- } else {
- Some(get_crate_data(child_def_id.krate))
- };
- let crate_data = match crate_data {
- Some(ref cdata) => &**cdata,
- None => cdata
- };
+impl<'a, 'tcx> CrateMetadata {
+ fn maybe_entry(&self, item_id: DefIndex) -> Option<Lazy<Entry<'tcx>>> {
+ self.root.index.lookup(self.blob.raw_bytes(), item_id)
+ }
- // Get the item.
- if let Some(child_item_doc) = crate_data.get_item(child_def_id.index) {
- // Hand off the item to the callback.
- let child_name = item_name(child_item_doc);
- let def_like = item_to_def_like(crate_data, child_item_doc, child_def_id);
- let visibility = item_visibility(child_item_doc);
- callback(def_like, child_name, visibility);
+ fn entry(&self, item_id: DefIndex) -> Entry<'tcx> {
+ match self.maybe_entry(item_id) {
+ None => bug!("entry: id not found: {:?} in crate {:?} with number {}",
+ item_id,
+ self.name,
+ self.cnum),
+ Some(d) => d.decode(self)
}
}
- for reexport_doc in reexports(item_doc) {
- let def_id_doc = reader::get_doc(reexport_doc,
- tag_items_data_item_reexport_def_id);
- let child_def_id = translated_def_id(cdata, def_id_doc);
-
- let name_doc = reader::get_doc(reexport_doc,
- tag_items_data_item_reexport_name);
- let name = name_doc.as_str();
-
- // This reexport may be in yet another crate.
- let crate_data = if child_def_id.krate == cdata.cnum {
- None
- } else {
- Some(get_crate_data(child_def_id.krate))
- };
- let crate_data = match crate_data {
- Some(ref cdata) => &**cdata,
- None => cdata
- };
-
- // Get the item.
- if let Some(child_item_doc) = crate_data.get_item(child_def_id.index) {
- // Hand off the item to the callback.
- let def_like = item_to_def_like(crate_data, child_item_doc, child_def_id);
- // These items have a public visibility because they're part of
- // a public re-export.
- callback(def_like, token::intern(name), ty::Visibility::Public);
+ fn local_def_id(&self, index: DefIndex) -> DefId {
+ DefId {
+ krate: self.cnum,
+ index: index
}
}
-}
-/// Iterates over each child of the given item.
-pub fn each_child_of_item<F, G>(cdata: Cmd, id: DefIndex, get_crate_data: G, callback: F)
- where F: FnMut(DefLike, ast::Name, ty::Visibility),
- G: FnMut(ast::CrateNum) -> Rc<CrateMetadata>,
-{
- // Find the item.
- let item_doc = match cdata.get_item(id) {
- None => return,
- Some(item_doc) => item_doc,
- };
-
- each_child_of_item_or_crate(cdata, item_doc, get_crate_data, callback)
-}
-
-/// Iterates over all the top-level crate items.
-pub fn each_top_level_item_of_crate<F, G>(cdata: Cmd, get_crate_data: G, callback: F)
- where F: FnMut(DefLike, ast::Name, ty::Visibility),
- G: FnMut(ast::CrateNum) -> Rc<CrateMetadata>,
-{
- each_child_of_item(cdata, CRATE_DEF_INDEX, get_crate_data, callback)
-}
-
-pub fn get_item_name(cdata: Cmd, id: DefIndex) -> ast::Name {
- item_name(cdata.lookup_item(id))
-}
-
-pub fn maybe_get_item_name(cdata: Cmd, id: DefIndex) -> Option<ast::Name> {
- maybe_item_name(cdata.lookup_item(id))
-}
-
-pub enum FoundAst<'ast> {
- Found(&'ast InlinedItem),
- FoundParent(DefId, &'ast hir::Item),
- NotFound,
-}
-
-pub fn maybe_get_item_ast<'a, 'tcx>(cdata: Cmd, tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefIndex)
- -> FoundAst<'tcx> {
- debug!("Looking up item: {:?}", id);
- let item_doc = cdata.lookup_item(id);
- let item_did = item_def_id(item_doc, cdata);
- let parent_def_id = DefId {
- krate: cdata.cnum,
- index: def_key(cdata, id).parent.unwrap()
- };
- let mut parent_def_path = def_path(cdata, id).unwrap();
- parent_def_path.data.pop();
- if let Some(ast_doc) = reader::maybe_get_doc(item_doc, tag_ast as usize) {
- let ii = decode_inlined_item(cdata,
- tcx,
- parent_def_path,
- parent_def_id,
- ast_doc,
- item_did);
- return FoundAst::Found(ii);
- } else if let Some(parent_did) = item_parent_item(cdata, item_doc) {
- // Remove the last element from the paths, since we are now
- // trying to inline the parent.
- let grandparent_def_id = DefId {
- krate: cdata.cnum,
- index: def_key(cdata, parent_def_id.index).parent.unwrap()
- };
- let mut grandparent_def_path = parent_def_path;
- grandparent_def_path.data.pop();
- let parent_doc = cdata.lookup_item(parent_did.index);
- if let Some(ast_doc) = reader::maybe_get_doc(parent_doc, tag_ast as usize) {
- let ii = decode_inlined_item(cdata,
- tcx,
- grandparent_def_path,
- grandparent_def_id,
- ast_doc,
- parent_did);
- if let &InlinedItem::Item(_, ref i) = ii {
- return FoundAst::FoundParent(parent_did, i);
- }
- }
+ fn item_name(&self, item: &Entry<'tcx>) -> ast::Name {
+ item.def_key.decode(self).disambiguated_data.data.get_opt_name()
+ .expect("no name in item_name")
}
- FoundAst::NotFound
-}
-pub fn is_item_mir_available<'tcx>(cdata: Cmd, id: DefIndex) -> bool {
- if let Some(item_doc) = cdata.get_item(id) {
- return reader::maybe_get_doc(item_doc, tag_mir as usize).is_some();
+ pub fn get_def(&self, index: DefIndex) -> Option<Def> {
+ self.entry(index).kind.to_def(self.local_def_id(index))
}
- false
-}
+ pub fn get_trait_def(&self,
+ item_id: DefIndex,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::TraitDef<'tcx> {
+ let data = match self.entry(item_id).kind {
+ EntryKind::Trait(data) => data.decode(self),
+ _ => bug!()
+ };
-pub fn maybe_get_item_mir<'a, 'tcx>(cdata: Cmd,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- id: DefIndex)
- -> Option<mir::repr::Mir<'tcx>> {
- let item_doc = cdata.lookup_item(id);
+ ty::TraitDef::new(data.unsafety, data.paren_sugar,
+ tcx.lookup_generics(self.local_def_id(item_id)),
+ data.trait_ref.decode((self, tcx)),
+ self.def_path(item_id).unwrap().deterministic_hash(tcx))
+ }
- return reader::maybe_get_doc(item_doc, tag_mir as usize).map(|mir_doc| {
- let dcx = tls_context::DecodingContext {
- crate_metadata: cdata,
- tcx: tcx,
+ fn get_variant(&self, item: &Entry<'tcx>, index: DefIndex)
+ -> (ty::VariantDefData<'tcx, 'tcx>, Option<DefIndex>) {
+ let data = match item.kind {
+ EntryKind::Variant(data) |
+ EntryKind::Struct(data) |
+ EntryKind::Union(data) => data.decode(self),
+ _ => bug!()
};
- let mut decoder = reader::Decoder::new(mir_doc);
- let mut mir = decoder.read_opaque(|opaque_decoder, _| {
- tls::enter_decoding_context(&dcx, opaque_decoder, |_, opaque_decoder| {
- Decodable::decode(opaque_decoder)
- })
- }).unwrap();
-
- assert!(decoder.position() == mir_doc.end);
+ let fields = item.children.decode(self).map(|index| {
+ let f = self.entry(index);
+ ty::FieldDefData::new(self.local_def_id(index),
+ self.item_name(&f),
+ f.visibility)
+ }).collect();
+
+ (ty::VariantDefData {
+ did: self.local_def_id(data.struct_ctor.unwrap_or(index)),
+ name: self.item_name(item),
+ fields: fields,
+ disr_val: ConstInt::Infer(data.disr),
+ kind: data.kind,
+ }, data.struct_ctor)
+ }
- let mut def_id_and_span_translator = MirDefIdAndSpanTranslator {
- crate_metadata: cdata,
- codemap: tcx.sess.codemap(),
- last_filemap_index_hint: Cell::new(0),
+ pub fn get_adt_def(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>)
+ -> ty::AdtDefMaster<'tcx> {
+ let item = self.entry(item_id);
+ let did = self.local_def_id(item_id);
+ let mut ctor_index = None;
+ let variants = if let EntryKind::Enum = item.kind {
+ item.children.decode(self).map(|index| {
+ let (variant, struct_ctor) = self.get_variant(&self.entry(index), index);
+ assert_eq!(struct_ctor, None);
+ variant
+ }).collect()
+ } else{
+ let (variant, struct_ctor) = self.get_variant(&item, item_id);
+ ctor_index = struct_ctor;
+ vec![variant]
+ };
+ let kind = match item.kind {
+ EntryKind::Enum => ty::AdtKind::Enum,
+ EntryKind::Struct(_) => ty::AdtKind::Struct,
+ EntryKind::Union(_) => ty::AdtKind::Union,
+ _ => bug!("get_adt_def called on a non-ADT {:?}", did)
};
- def_id_and_span_translator.visit_mir(&mut mir);
- for promoted in &mut mir.promoted {
- def_id_and_span_translator.visit_mir(promoted);
+ let adt = tcx.intern_adt_def(did, kind, variants);
+ if let Some(ctor_index) = ctor_index {
+ // Make adt definition available through constructor id as well.
+ tcx.insert_adt_def(self.local_def_id(ctor_index), adt);
}
- mir
- });
+ // this needs to be done *after* the variant is interned,
+ // to support recursive structures
+ for variant in &adt.variants {
+ for field in &variant.fields {
+ debug!("evaluating the type of {:?}::{:?}", variant.name, field.name);
+ let ty = self.get_type(field.did.index, tcx);
+ field.fulfill_ty(ty);
+ debug!("evaluating the type of {:?}::{:?}: {:?}",
+ variant.name, field.name, ty);
+ }
+ }
- struct MirDefIdAndSpanTranslator<'cdata, 'codemap> {
- crate_metadata: Cmd<'cdata>,
- codemap: &'codemap codemap::CodeMap,
- last_filemap_index_hint: Cell<usize>
+ adt
}
- impl<'v, 'cdata, 'codemap> mir::visit::MutVisitor<'v>
- for MirDefIdAndSpanTranslator<'cdata, 'codemap>
- {
- fn visit_def_id(&mut self, def_id: &mut DefId, _: Location) {
- *def_id = translate_def_id(self.crate_metadata, *def_id);
- }
-
- fn visit_span(&mut self, span: &mut Span) {
- *span = translate_span(self.crate_metadata,
- self.codemap,
- &self.last_filemap_index_hint,
- *span);
- }
+ pub fn get_predicates(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>)
+ -> ty::GenericPredicates<'tcx> {
+ self.entry(item_id).predicates.unwrap().decode((self, tcx))
}
-}
-fn get_explicit_self<'a, 'tcx>(item: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::ExplicitSelfCategory<'tcx> {
- fn get_mutability(ch: u8) -> hir::Mutability {
- match ch as char {
- 'i' => hir::MutImmutable,
- 'm' => hir::MutMutable,
- _ => bug!("unknown mutability character: `{}`", ch as char),
+ pub fn get_super_predicates(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>)
+ -> ty::GenericPredicates<'tcx> {
+ match self.entry(item_id).kind {
+ EntryKind::Trait(data) => {
+ data.decode(self).super_predicates.decode((self, tcx))
+ }
+ _ => bug!()
}
}
- let explicit_self_doc = reader::get_doc(item, tag_item_trait_method_explicit_self);
- let string = explicit_self_doc.as_str();
+ pub fn get_generics(&self, item_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>)
+ -> ty::Generics<'tcx> {
+ self.entry(item_id).generics.unwrap().decode((self, tcx))
+ }
- let explicit_self_kind = string.as_bytes()[0];
- match explicit_self_kind as char {
- 's' => ty::ExplicitSelfCategory::Static,
- 'v' => ty::ExplicitSelfCategory::ByValue,
- '~' => ty::ExplicitSelfCategory::ByBox,
- // FIXME(#4846) expl. region
- '&' => {
- ty::ExplicitSelfCategory::ByReference(
- tcx.mk_region(ty::ReEmpty),
- get_mutability(string.as_bytes()[1]))
- }
- _ => bug!("unknown self type code: `{}`", explicit_self_kind as char)
+ pub fn get_type(&self, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> {
+ self.entry(id).ty.unwrap().decode((self, tcx))
}
-}
-/// Returns the def IDs of all the items in the given implementation.
-pub fn get_impl_items(cdata: Cmd, impl_id: DefIndex)
- -> Vec<ty::ImplOrTraitItemId> {
- reader::tagged_docs(cdata.lookup_item(impl_id), tag_item_impl_item).map(|doc| {
- let def_id = item_def_id(doc, cdata);
- match item_sort(doc) {
- Some('C') | Some('c') => ty::ConstTraitItemId(def_id),
- Some('r') | Some('p') => ty::MethodTraitItemId(def_id),
- Some('t') => ty::TypeTraitItemId(def_id),
- _ => bug!("unknown impl item sort"),
- }
- }).collect()
-}
+ pub fn get_stability(&self, id: DefIndex) -> Option<attr::Stability> {
+ self.entry(id).stability.map(|stab| stab.decode(self))
+ }
-pub fn get_trait_name(cdata: Cmd, id: DefIndex) -> ast::Name {
- let doc = cdata.lookup_item(id);
- item_name(doc)
-}
+ pub fn get_deprecation(&self, id: DefIndex) -> Option<attr::Deprecation> {
+ self.entry(id).deprecation.map(|depr| depr.decode(self))
+ }
-pub fn get_impl_or_trait_item<'a, 'tcx>(cdata: Cmd, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Option<ty::ImplOrTraitItem<'tcx>> {
- let item_doc = cdata.lookup_item(id);
-
- let def_id = item_def_id(item_doc, cdata);
-
- let container_id = if let Some(id) = item_parent_item(cdata, item_doc) {
- id
- } else {
- return None;
- };
- let container_doc = cdata.lookup_item(container_id.index);
- let container = match item_family(container_doc) {
- Trait => TraitContainer(container_id),
- _ => ImplContainer(container_id),
- };
-
- let name = item_name(item_doc);
- let vis = item_visibility(item_doc);
- let defaultness = item_defaultness(item_doc);
-
- Some(match item_sort(item_doc) {
- sort @ Some('C') | sort @ Some('c') => {
- let ty = doc_type(item_doc, tcx, cdata);
- ty::ConstTraitItem(Rc::new(ty::AssociatedConst {
- name: name,
- ty: ty,
- vis: vis,
- defaultness: defaultness,
- def_id: def_id,
- container: container,
- has_value: sort == Some('C')
- }))
- }
- Some('r') | Some('p') => {
- let generics = doc_generics(item_doc, tcx, cdata);
- let predicates = doc_predicates(item_doc, tcx, cdata, tag_item_predicates);
- let ity = tcx.lookup_item_type(def_id).ty;
- let fty = match ity.sty {
- ty::TyFnDef(.., fty) => fty,
- _ => bug!(
- "the type {:?} of the method {:?} is not a function?",
- ity, name)
- };
- let explicit_self = get_explicit_self(item_doc, tcx);
-
- ty::MethodTraitItem(Rc::new(ty::Method::new(name,
- generics,
- predicates,
- fty,
- explicit_self,
- vis,
- defaultness,
- def_id,
- container)))
- }
- Some('t') => {
- let ty = maybe_doc_type(item_doc, tcx, cdata);
- ty::TypeTraitItem(Rc::new(ty::AssociatedType {
- name: name,
- ty: ty,
- vis: vis,
- defaultness: defaultness,
- def_id: def_id,
- container: container,
- }))
- }
- _ => return None
- })
-}
+ pub fn get_visibility(&self, id: DefIndex) -> ty::Visibility {
+ self.entry(id).visibility
+ }
-pub fn get_trait_item_def_ids(cdata: Cmd, id: DefIndex)
- -> Vec<ty::ImplOrTraitItemId> {
- let item = cdata.lookup_item(id);
- reader::tagged_docs(item, tag_item_trait_item).map(|mth| {
- let def_id = item_def_id(mth, cdata);
- match item_sort(mth) {
- Some('C') | Some('c') => ty::ConstTraitItemId(def_id),
- Some('r') | Some('p') => ty::MethodTraitItemId(def_id),
- Some('t') => ty::TypeTraitItemId(def_id),
- _ => bug!("unknown trait item sort"),
+ fn get_impl_data(&self, id: DefIndex) -> ImplData<'tcx> {
+ match self.entry(id).kind {
+ EntryKind::Impl(data) => data.decode(self),
+ _ => bug!()
}
- }).collect()
-}
+ }
-pub fn get_item_variances(cdata: Cmd, id: DefIndex) -> Vec<ty::Variance> {
- let item_doc = cdata.lookup_item(id);
- let variance_doc = reader::get_doc(item_doc, tag_item_variances);
- let mut decoder = reader::Decoder::new(variance_doc);
- Decodable::decode(&mut decoder).unwrap()
-}
+ pub fn get_parent_impl(&self, id: DefIndex) -> Option<DefId> {
+ self.get_impl_data(id).parent_impl
+ }
-pub fn get_provided_trait_methods<'a, 'tcx>(cdata: Cmd,
- id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Vec<Rc<ty::Method<'tcx>>> {
- let item = cdata.lookup_item(id);
+ pub fn get_impl_polarity(&self, id: DefIndex) -> hir::ImplPolarity {
+ self.get_impl_data(id).polarity
+ }
- reader::tagged_docs(item, tag_item_trait_item).filter_map(|mth_id| {
- let did = item_def_id(mth_id, cdata);
- let mth = cdata.lookup_item(did.index);
+ pub fn get_custom_coerce_unsized_kind(&self, id: DefIndex)
+ -> Option<ty::adjustment::CustomCoerceUnsized> {
+ self.get_impl_data(id).coerce_unsized_kind
+ }
- if item_sort(mth) == Some('p') {
- let trait_item = get_impl_or_trait_item(cdata, did.index, tcx);
- if let Some(ty::MethodTraitItem(ref method)) = trait_item {
- Some((*method).clone())
- } else {
- None
- }
- } else {
- None
- }
- }).collect()
-}
+ pub fn get_impl_trait(&self,
+ id: DefIndex,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>)
+ -> Option<ty::TraitRef<'tcx>> {
+ self.get_impl_data(id).trait_ref.map(|tr| tr.decode((self, tcx)))
+ }
-pub fn get_associated_consts<'a, 'tcx>(cdata: Cmd, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Vec<Rc<ty::AssociatedConst<'tcx>>> {
- let item = cdata.lookup_item(id);
+ /// Iterates over the language items in the given crate.
+ pub fn get_lang_items(&self) -> Vec<(DefIndex, usize)> {
+ self.root.lang_items.decode(self).collect()
+ }
- [tag_item_trait_item, tag_item_impl_item].iter().flat_map(|&tag| {
- reader::tagged_docs(item, tag).filter_map(|ac_id| {
- let did = item_def_id(ac_id, cdata);
- let ac_doc = cdata.lookup_item(did.index);
+ /// Iterates over each child of the given item.
+ pub fn each_child_of_item<F>(&self, id: DefIndex, mut callback: F)
+ where F: FnMut(def::Export)
+ {
+ // Find the item.
+ let item = match self.maybe_entry(id) {
+ None => return,
+ Some(item) => item.decode(self),
+ };
- match item_sort(ac_doc) {
- Some('C') | Some('c') => {
- let trait_item = get_impl_or_trait_item(cdata, did.index, tcx);
- if let Some(ty::ConstTraitItem(ref ac)) = trait_item {
- Some((*ac).clone())
- } else {
- None
+ // Iterate over all children.
+ for child_index in item.children.decode(self) {
+ // Get the item.
+ if let Some(child) = self.maybe_entry(child_index) {
+ let child = child.decode(self);
+ // Hand off the item to the callback.
+ match child.kind {
+ // FIXME(eddyb) Don't encode these in children.
+ EntryKind::ForeignMod => {
+ for child_index in child.children.decode(self) {
+ callback(def::Export {
+ def_id: self.local_def_id(child_index),
+ name: self.item_name(&self.entry(child_index))
+ });
+ }
+ continue;
}
+ EntryKind::Impl(_) | EntryKind::DefaultImpl(_) => continue,
+
+ _ => {}
+ }
+
+ let def_key = child.def_key.decode(self);
+ if let Some(name) = def_key.disambiguated_data.data.get_opt_name() {
+ callback(def::Export {
+ def_id: self.local_def_id(child_index),
+ name: name
+ });
}
- _ => None
}
- })
- }).collect()
-}
+ }
-pub fn get_variant_kind(cdata: Cmd, node_id: DefIndex) -> Option<VariantKind>
-{
- let item = cdata.lookup_item(node_id);
- family_to_variant_kind(item_family(item))
-}
+ if let EntryKind::Mod(data) = item.kind {
+ for exp in data.decode(self).reexports.decode(self) {
+ callback(exp);
+ }
+ }
+ }
-pub fn get_struct_ctor_def_id(cdata: Cmd, node_id: DefIndex) -> Option<DefId>
-{
- let item = cdata.lookup_item(node_id);
- reader::maybe_get_doc(item, tag_items_data_item_struct_ctor).
- map(|ctor_doc| translated_def_id(cdata, ctor_doc))
-}
+ pub fn maybe_get_item_ast(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefIndex)
+ -> Option<&'tcx InlinedItem> {
+ debug!("Looking up item: {:?}", id);
+ let item_doc = self.entry(id);
+ let item_did = self.local_def_id(id);
+ let parent_def_id = self.local_def_id(self.def_key(id).parent.unwrap());
+ let mut parent_def_path = self.def_path(id).unwrap();
+ parent_def_path.data.pop();
+ item_doc.ast.map(|ast| {
+ let ast = ast.decode(self);
+ decode_inlined_item(self, tcx, parent_def_path, parent_def_id, ast, item_did)
+ })
+ }
-/// If node_id is the constructor of a tuple struct, retrieve the NodeId of
-/// the actual type definition, otherwise, return None
-pub fn get_tuple_struct_definition_if_ctor(cdata: Cmd,
- node_id: DefIndex)
- -> Option<DefId>
-{
- let item = cdata.lookup_item(node_id);
- reader::tagged_docs(item, tag_items_data_item_is_tuple_struct_ctor).next().map(|_| {
- item_require_parent_item(cdata, item)
- })
-}
+ pub fn is_item_mir_available(&self, id: DefIndex) -> bool {
+ self.maybe_entry(id).and_then(|item| item.decode(self).mir).is_some()
+ }
-pub fn get_item_attrs(cdata: Cmd,
- orig_node_id: DefIndex)
- -> Vec<ast::Attribute> {
- // The attributes for a tuple struct are attached to the definition, not the ctor;
- // we assume that someone passing in a tuple struct ctor is actually wanting to
- // look at the definition
- let node_id = get_tuple_struct_definition_if_ctor(cdata, orig_node_id);
- let node_id = node_id.map(|x| x.index).unwrap_or(orig_node_id);
- let item = cdata.lookup_item(node_id);
- get_attributes(item)
-}
+ pub fn maybe_get_item_mir(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefIndex)
+ -> Option<Mir<'tcx>> {
+ self.entry(id).mir.map(|mir| mir.decode((self, tcx)))
+ }
-pub fn get_struct_field_attrs(cdata: Cmd) -> FnvHashMap<DefId, Vec<ast::Attribute>> {
- let data = rbml::Doc::new(cdata.data());
- let fields = reader::get_doc(data, tag_struct_fields);
- reader::tagged_docs(fields, tag_struct_field).map(|field| {
- let def_id = translated_def_id(cdata, reader::get_doc(field, tag_def_id));
- let attrs = get_attributes(field);
- (def_id, attrs)
- }).collect()
-}
+ pub fn get_impl_or_trait_item(&self, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>)
+ -> Option<ty::ImplOrTraitItem<'tcx>> {
+ let item = self.entry(id);
+ let parent_and_name = || {
+ let def_key = item.def_key.decode(self);
+ (self.local_def_id(def_key.parent.unwrap()),
+ def_key.disambiguated_data.data.get_opt_name().unwrap())
+ };
-fn struct_field_family_to_visibility(family: Family) -> ty::Visibility {
- match family {
- PublicField => ty::Visibility::Public,
- InheritedField => ty::Visibility::PrivateExternal,
- _ => bug!()
+ Some(match item.kind {
+ EntryKind::AssociatedConst(container) => {
+ let (parent, name) = parent_and_name();
+ ty::ConstTraitItem(Rc::new(ty::AssociatedConst {
+ name: name,
+ ty: item.ty.unwrap().decode((self, tcx)),
+ vis: item.visibility,
+ defaultness: container.defaultness(),
+ def_id: self.local_def_id(id),
+ container: container.with_def_id(parent),
+ has_value: container.has_body(),
+ }))
+ }
+ EntryKind::Method(data) => {
+ let (parent, name) = parent_and_name();
+ let ity = item.ty.unwrap().decode((self, tcx));
+ let fty = match ity.sty {
+ ty::TyFnDef(.., fty) => fty,
+ _ => bug!(
+ "the type {:?} of the method {:?} is not a function?",
+ ity, name)
+ };
+
+ let data = data.decode(self);
+ ty::MethodTraitItem(Rc::new(ty::Method {
+ name: name,
+ generics: tcx.lookup_generics(self.local_def_id(id)),
+ predicates: item.predicates.unwrap().decode((self, tcx)),
+ fty: fty,
+ explicit_self: data.explicit_self.decode((self, tcx)),
+ vis: item.visibility,
+ defaultness: data.container.defaultness(),
+ has_body: data.container.has_body(),
+ def_id: self.local_def_id(id),
+ container: data.container.with_def_id(parent),
+ }))
+ }
+ EntryKind::AssociatedType(container) => {
+ let (parent, name) = parent_and_name();
+ ty::TypeTraitItem(Rc::new(ty::AssociatedType {
+ name: name,
+ ty: item.ty.map(|ty| ty.decode((self, tcx))),
+ vis: item.visibility,
+ defaultness: container.defaultness(),
+ def_id: self.local_def_id(id),
+ container: container.with_def_id(parent),
+ }))
+ }
+ _ => return None
+ })
}
-}
-
-pub fn get_struct_field_names(cdata: Cmd, id: DefIndex) -> Vec<ast::Name> {
- let item = cdata.lookup_item(id);
- let mut index = 0;
- reader::tagged_docs(item, tag_item_field).map(|an_item| {
- item_name(an_item)
- }).chain(reader::tagged_docs(item, tag_item_unnamed_field).map(|_| {
- let name = token::with_ident_interner(|interner| interner.intern(index.to_string()));
- index += 1;
- name
- })).collect()
-}
-fn get_attributes(md: rbml::Doc) -> Vec<ast::Attribute> {
- reader::maybe_get_doc(md, tag_attributes).map_or(vec![], |attrs_doc| {
- let mut decoder = reader::Decoder::new(attrs_doc);
- let mut attrs: Vec<ast::Attribute> = decoder.read_opaque(|opaque_decoder, _| {
- Decodable::decode(opaque_decoder)
- }).unwrap();
+ pub fn get_item_variances(&self, id: DefIndex) -> Vec<ty::Variance> {
+ self.entry(id).variances.decode(self).collect()
+ }
- // Need new unique IDs: old thread-local IDs won't map to new threads.
- for attr in attrs.iter_mut() {
- attr.node.id = attr::mk_attr_id();
+ pub fn get_variant_kind(&self, node_id: DefIndex) -> Option<ty::VariantKind> {
+ match self.entry(node_id).kind {
+ EntryKind::Struct(data) |
+ EntryKind::Union(data) |
+ EntryKind::Variant(data) => Some(data.decode(self).kind),
+ _ => None
}
-
- attrs
- })
-}
-
-fn list_crate_attributes(md: rbml::Doc, hash: &Svh,
- out: &mut io::Write) -> io::Result<()> {
- write!(out, "=Crate Attributes ({})=\n", *hash)?;
-
- let r = get_attributes(md);
- for attr in &r {
- write!(out, "{}\n", pprust::attribute_to_string(attr))?;
}
- write!(out, "\n\n")
-}
-
-pub fn get_crate_attributes(data: &[u8]) -> Vec<ast::Attribute> {
- get_attributes(rbml::Doc::new(data))
-}
-
-#[derive(Clone)]
-pub struct CrateDep {
- pub cnum: ast::CrateNum,
- pub name: String,
- pub hash: Svh,
- pub explicitly_linked: bool,
-}
-
-pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
- let cratedoc = rbml::Doc::new(data);
- let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
-
- fn docstr(doc: rbml::Doc, tag_: usize) -> String {
- let d = reader::get_doc(doc, tag_);
- d.as_str().to_string()
- }
-
- reader::tagged_docs(depsdoc, tag_crate_dep).enumerate().map(|(crate_num, depdoc)| {
- let name = docstr(depdoc, tag_crate_dep_crate_name);
- let hash = Svh::new(reader::doc_as_u64(reader::get_doc(depdoc, tag_crate_dep_hash)));
- let doc = reader::get_doc(depdoc, tag_crate_dep_explicitly_linked);
- let explicitly_linked = reader::doc_as_u8(doc) != 0;
- CrateDep {
- cnum: crate_num as u32 + 1,
- name: name,
- hash: hash,
- explicitly_linked: explicitly_linked,
+ pub fn get_struct_ctor_def_id(&self, node_id: DefIndex) -> Option<DefId> {
+ match self.entry(node_id).kind {
+ EntryKind::Struct(data) => {
+ data.decode(self).struct_ctor.map(|index| self.local_def_id(index))
+ }
+ _ => None
}
- }).collect()
-}
-
-fn list_crate_deps(data: &[u8], out: &mut io::Write) -> io::Result<()> {
- write!(out, "=External Dependencies=\n")?;
- for dep in &get_crate_deps(data) {
- write!(out, "{} {}-{}\n", dep.cnum, dep.name, dep.hash)?;
}
- write!(out, "\n")?;
- Ok(())
-}
-
-pub fn maybe_get_crate_hash(data: &[u8]) -> Option<Svh> {
- let cratedoc = rbml::Doc::new(data);
- reader::maybe_get_doc(cratedoc, tag_crate_hash).map(|doc| {
- Svh::new(reader::doc_as_u64(doc))
- })
-}
-
-pub fn get_crate_hash(data: &[u8]) -> Svh {
- let cratedoc = rbml::Doc::new(data);
- let hashdoc = reader::get_doc(cratedoc, tag_crate_hash);
- Svh::new(reader::doc_as_u64(hashdoc))
-}
-pub fn maybe_get_crate_name(data: &[u8]) -> Option<&str> {
- let cratedoc = rbml::Doc::new(data);
- reader::maybe_get_doc(cratedoc, tag_crate_crate_name).map(|doc| {
- doc.as_str()
- })
-}
-
-pub fn get_crate_disambiguator<'a>(data: &'a [u8]) -> &'a str {
- let crate_doc = rbml::Doc::new(data);
- let disambiguator_doc = reader::get_doc(crate_doc, tag_crate_disambiguator);
- let slice: &'a str = disambiguator_doc.as_str();
- slice
-}
-
-pub fn get_crate_triple(data: &[u8]) -> Option<String> {
- let cratedoc = rbml::Doc::new(data);
- let triple_doc = reader::maybe_get_doc(cratedoc, tag_crate_triple);
- triple_doc.map(|s| s.as_str().to_string())
-}
-
-pub fn get_crate_name(data: &[u8]) -> &str {
- maybe_get_crate_name(data).expect("no crate name in crate")
-}
-
-pub fn list_crate_metadata(bytes: &[u8], out: &mut io::Write) -> io::Result<()> {
- let hash = get_crate_hash(bytes);
- let md = rbml::Doc::new(bytes);
- list_crate_attributes(md, &hash, out)?;
- list_crate_deps(bytes, out)
-}
-
-// Translates a def_id from an external crate to a def_id for the current
-// compilation environment. We use this when trying to load types from
-// external crates - if those types further refer to types in other crates
-// then we must translate the crate number from that encoded in the external
-// crate to the correct local crate number.
-pub fn translate_def_id(cdata: Cmd, did: DefId) -> DefId {
- if did.is_local() {
- return DefId { krate: cdata.cnum, index: did.index };
+ pub fn get_item_attrs(&self, node_id: DefIndex) -> Vec<ast::Attribute> {
+ // The attributes for a tuple struct are attached to the definition, not the ctor;
+ // we assume that someone passing in a tuple struct ctor is actually wanting to
+ // look at the definition
+ let mut item = self.entry(node_id);
+ let def_key = item.def_key.decode(self);
+ if def_key.disambiguated_data.data == DefPathData::StructCtor {
+ item = self.entry(def_key.parent.unwrap());
+ }
+ self.get_attributes(&item)
}
- DefId {
- krate: cdata.cnum_map.borrow()[did.krate],
- index: did.index
+ pub fn get_struct_field_names(&self, id: DefIndex) -> Vec<ast::Name> {
+ self.entry(id).children.decode(self).map(|index| {
+ self.item_name(&self.entry(index))
+ }).collect()
}
-}
-// Translate a DefId from the current compilation environment to a DefId
-// for an external crate.
-fn reverse_translate_def_id(cdata: Cmd, did: DefId) -> Option<DefId> {
- for (local, &global) in cdata.cnum_map.borrow().iter_enumerated() {
- if global == did.krate {
- return Some(DefId { krate: local, index: did.index });
- }
+ fn get_attributes(&self, item: &Entry<'tcx>) -> Vec<ast::Attribute> {
+ item.attributes.decode(self).map(|mut attr| {
+ // Need new unique IDs: old thread-local IDs won't map to new threads.
+ attr.node.id = attr::mk_attr_id();
+ attr
+ }).collect()
}
- None
-}
-
-/// Translates a `Span` from an extern crate to the corresponding `Span`
-/// within the local crate's codemap.
-pub fn translate_span(cdata: Cmd,
- codemap: &codemap::CodeMap,
- last_filemap_index_hint: &Cell<usize>,
- span: syntax_pos::Span)
- -> syntax_pos::Span {
- let span = if span.lo > span.hi {
- // Currently macro expansion sometimes produces invalid Span values
- // where lo > hi. In order not to crash the compiler when trying to
- // translate these values, let's transform them into something we
- // can handle (and which will produce useful debug locations at
- // least some of the time).
- // This workaround is only necessary as long as macro expansion is
- // not fixed. FIXME(#23480)
- syntax_pos::mk_sp(span.lo, span.lo)
- } else {
- span
- };
-
- let imported_filemaps = cdata.imported_filemaps(&codemap);
- let filemap = {
- // Optimize for the case that most spans within a translated item
- // originate from the same filemap.
- let last_filemap_index = last_filemap_index_hint.get();
- let last_filemap = &imported_filemaps[last_filemap_index];
-
- if span.lo >= last_filemap.original_start_pos &&
- span.lo <= last_filemap.original_end_pos &&
- span.hi >= last_filemap.original_start_pos &&
- span.hi <= last_filemap.original_end_pos {
- last_filemap
- } else {
- let mut a = 0;
- let mut b = imported_filemaps.len();
-
- while b - a > 1 {
- let m = (a + b) / 2;
- if imported_filemaps[m].original_start_pos > span.lo {
- b = m;
- } else {
- a = m;
- }
+ // Translate a DefId from the current compilation environment to a DefId
+ // for an external crate.
+ fn reverse_translate_def_id(&self, did: DefId) -> Option<DefId> {
+ for (local, &global) in self.cnum_map.borrow().iter_enumerated() {
+ if global == did.krate {
+ return Some(DefId { krate: local, index: did.index });
}
-
- last_filemap_index_hint.set(a);
- &imported_filemaps[a]
}
- };
-
- let lo = (span.lo - filemap.original_start_pos) +
- filemap.translated_filemap.start_pos;
- let hi = (span.hi - filemap.original_start_pos) +
- filemap.translated_filemap.start_pos;
- syntax_pos::mk_sp(lo, hi)
-}
+ None
+ }
-pub fn each_inherent_implementation_for_type<F>(cdata: Cmd,
- id: DefIndex,
- mut callback: F)
- where F: FnMut(DefId),
-{
- let item_doc = cdata.lookup_item(id);
- for impl_doc in reader::tagged_docs(item_doc, tag_items_data_item_inherent_impl) {
- if reader::maybe_get_doc(impl_doc, tag_item_trait_ref).is_none() {
- callback(item_def_id(impl_doc, cdata));
- }
+ pub fn get_inherent_implementations_for_type(&self, id: DefIndex) -> Vec<DefId> {
+ self.entry(id).inherent_impls.decode(self).map(|index| {
+ self.local_def_id(index)
+ }).collect()
}
-}
-pub fn each_implementation_for_trait<F>(cdata: Cmd,
- def_id: DefId,
- mut callback: F) where
- F: FnMut(DefId),
-{
- // Do a reverse lookup beforehand to avoid touching the crate_num
- // hash map in the loop below.
- if let Some(crate_local_did) = reverse_translate_def_id(cdata, def_id) {
- let def_id_u64 = def_to_u64(crate_local_did);
-
- let impls_doc = reader::get_doc(rbml::Doc::new(cdata.data()), tag_impls);
- for trait_doc in reader::tagged_docs(impls_doc, tag_impls_trait) {
- let trait_def_id = reader::get_doc(trait_doc, tag_def_id);
- if reader::doc_as_u64(trait_def_id) != def_id_u64 {
+ pub fn get_implementations_for_trait(&self, filter: Option<DefId>, result: &mut Vec<DefId>) {
+ // Do a reverse lookup beforehand to avoid touching the crate_num
+ // hash map in the loop below.
+ let filter = match filter.map(|def_id| self.reverse_translate_def_id(def_id)) {
+ Some(Some(def_id)) => Some((def_id.krate.as_u32(), def_id.index)),
+ Some(None) => return,
+ None => None
+ };
+
+ // FIXME(eddyb) Make this O(1) instead of O(n).
+ for trait_impls in self.root.impls.decode(self) {
+ if filter.is_some() && filter != Some(trait_impls.trait_id) {
continue;
}
- for impl_doc in reader::tagged_docs(trait_doc, tag_impls_trait_impl) {
- callback(translated_def_id(cdata, impl_doc));
+
+ result.extend(trait_impls.impls.decode(self).map(|index| {
+ self.local_def_id(index)
+ }));
+
+ if filter.is_some() {
+ break;
}
}
}
-}
-pub fn get_trait_of_item(cdata: Cmd, id: DefIndex) -> Option<DefId> {
- let item_doc = cdata.lookup_item(id);
- let parent_item_id = match item_parent_item(cdata, item_doc) {
- None => return None,
- Some(item_id) => item_id,
- };
- let parent_item_doc = cdata.lookup_item(parent_item_id.index);
- match item_family(parent_item_doc) {
- Trait => Some(item_def_id(parent_item_doc, cdata)),
- _ => None
+ pub fn get_trait_of_item(&self, id: DefIndex) -> Option<DefId> {
+ self.entry(id).def_key.decode(self).parent.and_then(|parent_index| {
+ match self.entry(parent_index).kind {
+ EntryKind::Trait(_) => Some(self.local_def_id(parent_index)),
+ _ => None
+ }
+ })
}
-}
-pub fn get_native_libraries(cdata: Cmd)
- -> Vec<(cstore::NativeLibraryKind, String)> {
- let libraries = reader::get_doc(rbml::Doc::new(cdata.data()),
- tag_native_libraries);
- reader::tagged_docs(libraries, tag_native_libraries_lib).map(|lib_doc| {
- let kind_doc = reader::get_doc(lib_doc, tag_native_libraries_kind);
- let name_doc = reader::get_doc(lib_doc, tag_native_libraries_name);
- let kind: cstore::NativeLibraryKind =
- cstore::NativeLibraryKind::from_u32(reader::doc_as_u32(kind_doc)).unwrap();
- let name = name_doc.as_str().to_string();
- (kind, name)
- }).collect()
-}
+ pub fn get_native_libraries(&self) -> Vec<(NativeLibraryKind, String)> {
+ self.root.native_libraries.decode(self).collect()
+ }
-pub fn get_plugin_registrar_fn(data: &[u8]) -> Option<DefIndex> {
- reader::maybe_get_doc(rbml::Doc::new(data), tag_plugin_registrar_fn)
- .map(|doc| DefIndex::from_u32(reader::doc_as_u32(doc)))
-}
+ pub fn get_dylib_dependency_formats(&self) -> Vec<(CrateNum, LinkagePreference)> {
+ self.root.dylib_dependency_formats.decode(self).enumerate().flat_map(|(i, link)| {
+ let cnum = CrateNum::new(i + 1);
+ link.map(|link| (self.cnum_map.borrow()[cnum], link))
+ }).collect()
+ }
-pub fn each_exported_macro<F>(data: &[u8], mut f: F) where
- F: FnMut(ast::Name, Vec<ast::Attribute>, Span, String) -> bool,
-{
- let macros = reader::get_doc(rbml::Doc::new(data), tag_macro_defs);
- for macro_doc in reader::tagged_docs(macros, tag_macro_def) {
- let name = item_name(macro_doc);
- let attrs = get_attributes(macro_doc);
- let span = get_macro_span(macro_doc);
- let body = reader::get_doc(macro_doc, tag_macro_def_body);
- if !f(name, attrs, span, body.as_str().to_string()) {
- break;
- }
+ pub fn get_missing_lang_items(&self) -> Vec<lang_items::LangItem> {
+ self.root.lang_items_missing.decode(self).collect()
}
-}
-pub fn get_derive_registrar_fn(data: &[u8]) -> Option<DefIndex> {
- reader::maybe_get_doc(rbml::Doc::new(data), tag_macro_derive_registrar)
- .map(|doc| DefIndex::from_u32(reader::doc_as_u32(doc)))
-}
+ pub fn get_fn_arg_names(&self, id: DefIndex) -> Vec<ast::Name> {
+ let arg_names = match self.entry(id).kind {
+ EntryKind::Fn(data) |
+ EntryKind::ForeignFn(data) => data.decode(self).arg_names,
+ EntryKind::Method(data) => data.decode(self).fn_data.arg_names,
+ _ => LazySeq::empty()
+ };
+ arg_names.decode(self).collect()
+ }
-pub fn get_macro_span(doc: rbml::Doc) -> Span {
- let lo_doc = reader::get_doc(doc, tag_macro_def_span_lo);
- let lo = BytePos(reader::doc_as_u32(lo_doc));
- let hi_doc = reader::get_doc(doc, tag_macro_def_span_hi);
- let hi = BytePos(reader::doc_as_u32(hi_doc));
- return Span { lo: lo, hi: hi, expn_id: NO_EXPANSION };
-}
+ pub fn get_reachable_ids(&self) -> Vec<DefId> {
+ self.root.reachable_ids.decode(self).map(|index| self.local_def_id(index)).collect()
+ }
-pub fn get_dylib_dependency_formats(cdata: Cmd)
- -> Vec<(ast::CrateNum, LinkagePreference)>
-{
- let formats = reader::get_doc(rbml::Doc::new(cdata.data()),
- tag_dylib_dependency_formats);
- let mut result = Vec::new();
-
- debug!("found dylib deps: {}", formats.as_str());
- for spec in formats.as_str().split(',') {
- if spec.is_empty() { continue }
- let mut split = spec.split(':');
- let cnum = split.next().unwrap();
- let link = split.next().unwrap();
- let cnum: ast::CrateNum = cnum.parse().unwrap();
- let cnum = cdata.cnum_map.borrow()[cnum];
- result.push((cnum, if link == "d" {
- LinkagePreference::RequireDynamic
- } else {
- LinkagePreference::RequireStatic
- }));
+ pub fn is_const_fn(&self, id: DefIndex) -> bool {
+ let constness = match self.entry(id).kind {
+ EntryKind::Method(data) => data.decode(self).fn_data.constness,
+ EntryKind::Fn(data) => data.decode(self).constness,
+ _ => hir::Constness::NotConst
+ };
+ constness == hir::Constness::Const
}
- return result;
-}
-pub fn get_missing_lang_items(cdata: Cmd)
- -> Vec<lang_items::LangItem>
-{
- let items = reader::get_doc(rbml::Doc::new(cdata.data()), tag_lang_items);
- reader::tagged_docs(items, tag_lang_items_missing).map(|missing_docs| {
- lang_items::LangItem::from_u32(reader::doc_as_u32(missing_docs)).unwrap()
- }).collect()
-}
+ pub fn is_extern_item(&self, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
+ let item = match self.maybe_entry(id) {
+ Some(item) => item.decode(self),
+ None => return false,
+ };
+ let applicable = match item.kind {
+ EntryKind::ImmStatic |
+ EntryKind::MutStatic |
+ EntryKind::ForeignImmStatic |
+ EntryKind::ForeignMutStatic => true,
+
+ EntryKind::Fn(_) | EntryKind::ForeignFn(_) => {
+ self.get_generics(id, tcx).types.is_empty()
+ }
-pub fn get_method_arg_names(cdata: Cmd, id: DefIndex) -> Vec<String> {
- let method_doc = cdata.lookup_item(id);
- match reader::maybe_get_doc(method_doc, tag_method_argument_names) {
- Some(args_doc) => {
- reader::tagged_docs(args_doc, tag_method_argument_name).map(|name_doc| {
- name_doc.as_str().to_string()
- }).collect()
- },
- None => vec![],
- }
-}
+ _ => false,
+ };
-pub fn get_reachable_ids(cdata: Cmd) -> Vec<DefId> {
- let items = reader::get_doc(rbml::Doc::new(cdata.data()),
- tag_reachable_ids);
- reader::tagged_docs(items, tag_reachable_id).map(|doc| {
- DefId {
- krate: cdata.cnum,
- index: DefIndex::from_u32(reader::doc_as_u32(doc)),
+ if applicable {
+ attr::contains_extern_indicator(tcx.sess.diagnostic(),
+ &self.get_attributes(&item))
+ } else {
+ false
}
- }).collect()
-}
-
-pub fn is_typedef(cdata: Cmd, id: DefIndex) -> bool {
- let item_doc = cdata.lookup_item(id);
- match item_family(item_doc) {
- Type => true,
- _ => false,
}
-}
-pub fn is_const_fn(cdata: Cmd, id: DefIndex) -> bool {
- let item_doc = cdata.lookup_item(id);
- match fn_constness(item_doc) {
- hir::Constness::Const => true,
- hir::Constness::NotConst => false,
+ pub fn is_foreign_item(&self, id: DefIndex) -> bool {
+ match self.entry(id).kind {
+ EntryKind::ForeignImmStatic |
+ EntryKind::ForeignMutStatic |
+ EntryKind::ForeignFn(_) => true,
+ _ => false
+ }
}
-}
-pub fn is_extern_item<'a, 'tcx>(cdata: Cmd,
- id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> bool {
- let item_doc = match cdata.get_item(id) {
- Some(doc) => doc,
- None => return false,
- };
- let applicable = match item_family(item_doc) {
- ImmStatic | MutStatic => true,
- Fn => get_generics(cdata, id, tcx).types.is_empty(),
- _ => false,
- };
-
- if applicable {
- attr::contains_extern_indicator(tcx.sess.diagnostic(),
- &get_attributes(item_doc))
- } else {
- false
+ pub fn is_defaulted_trait(&self, trait_id: DefIndex) -> bool {
+ match self.entry(trait_id).kind {
+ EntryKind::Trait(data) => data.decode(self).has_default_impl,
+ _ => bug!()
+ }
}
-}
-pub fn is_foreign_item(cdata: Cmd, id: DefIndex) -> bool {
- let item_doc = cdata.lookup_item(id);
- let parent_item_id = match item_parent_item(cdata, item_doc) {
- None => return false,
- Some(item_id) => item_id,
- };
- let parent_item_doc = cdata.lookup_item(parent_item_id.index);
- item_family(parent_item_doc) == ForeignMod
-}
+ pub fn is_default_impl(&self, impl_id: DefIndex) -> bool {
+ match self.entry(impl_id).kind {
+ EntryKind::DefaultImpl(_) => true,
+ _ => false
+ }
+ }
-pub fn is_impl(cdata: Cmd, id: DefIndex) -> bool {
- let item_doc = cdata.lookup_item(id);
- match item_family(item_doc) {
- Impl => true,
- _ => false,
+ pub fn closure_kind(&self, closure_id: DefIndex) -> ty::ClosureKind {
+ match self.entry(closure_id).kind {
+ EntryKind::Closure(data) => data.decode(self).kind,
+ _ => bug!()
+ }
}
-}
-fn doc_generics<'a, 'tcx>(base_doc: rbml::Doc,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cdata: Cmd)
- -> &'tcx ty::Generics<'tcx>
-{
- let doc = reader::get_doc(base_doc, tag_item_generics);
- TyDecoder::with_doc(tcx, cdata.cnum, doc,
- &mut |did| translate_def_id(cdata, did))
- .parse_generics()
-}
+ pub fn closure_ty(&self, closure_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>)
+ -> ty::ClosureTy<'tcx> {
+ match self.entry(closure_id).kind {
+ EntryKind::Closure(data) => data.decode(self).ty.decode((self, tcx)),
+ _ => bug!()
+ }
+ }
-fn doc_predicate<'a, 'tcx>(cdata: Cmd,
- doc: rbml::Doc,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::Predicate<'tcx>
-{
- let predicate_pos = cdata.xref_index.lookup(
- cdata.data(), reader::doc_as_u32(doc)).unwrap() as usize;
- TyDecoder::new(
- cdata.data(), cdata.cnum, predicate_pos, tcx,
- &mut |did| translate_def_id(cdata, did)
- ).parse_predicate()
-}
+ pub fn def_key(&self, id: DefIndex) -> hir_map::DefKey {
+ debug!("def_key: id={:?}", id);
+ self.entry(id).def_key.decode(self)
+ }
-fn doc_predicates<'a, 'tcx>(base_doc: rbml::Doc,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cdata: Cmd,
- tag: usize)
- -> ty::GenericPredicates<'tcx>
-{
- let doc = reader::get_doc(base_doc, tag);
-
- ty::GenericPredicates {
- parent: item_parent_item(cdata, doc),
- predicates: reader::tagged_docs(doc, tag_predicate).map(|predicate_doc| {
- doc_predicate(cdata, predicate_doc, tcx)
- }).collect()
+ // Returns the path leading to the thing with this `id`. Note that
+ // some def-ids don't wind up in the metadata, so `def_path` sometimes
+ // returns `None`
+ pub fn def_path(&self, id: DefIndex) -> Option<hir_map::DefPath> {
+ debug!("def_path(id={:?})", id);
+ if self.maybe_entry(id).is_some() {
+ Some(hir_map::DefPath::make(self.cnum, id, |parent| self.def_key(parent)))
+ } else {
+ None
+ }
}
-}
-pub fn is_defaulted_trait(cdata: Cmd, trait_id: DefIndex) -> bool {
- let trait_doc = cdata.lookup_item(trait_id);
- assert!(item_family(trait_doc) == Family::Trait);
- let defaulted_doc = reader::get_doc(trait_doc, tag_defaulted_trait);
- reader::doc_as_u8(defaulted_doc) != 0
-}
+ /// Imports the codemap from an external crate into the codemap of the crate
+ /// currently being compiled (the "local crate").
+ ///
+ /// The import algorithm works analogous to how AST items are inlined from an
+ /// external crate's metadata:
+ /// For every FileMap in the external codemap an 'inline' copy is created in the
+ /// local codemap. The correspondence relation between external and local
+ /// FileMaps is recorded in the `ImportedFileMap` objects returned from this
+ /// function. When an item from an external crate is later inlined into this
+ /// crate, this correspondence information is used to translate the span
+ /// information of the inlined item so that it refers the correct positions in
+ /// the local codemap (see `<decoder::DecodeContext as SpecializedDecoder<Span>>`).
+ ///
+ /// The import algorithm in the function below will reuse FileMaps already
+ /// existing in the local codemap. For example, even if the FileMap of some
+ /// source file of libstd gets imported many times, there will only ever be
+ /// one FileMap object for the corresponding file in the local codemap.
+ ///
+ /// Note that imported FileMaps do not actually contain the source code of the
+ /// file they represent, just information about length, line breaks, and
+ /// multibyte characters. This information is enough to generate valid debuginfo
+ /// for items inlined from other crates.
+ pub fn imported_filemaps(&'a self, local_codemap: &codemap::CodeMap)
+ -> Ref<'a, Vec<cstore::ImportedFileMap>> {
+ {
+ let filemaps = self.codemap_import_info.borrow();
+ if !filemaps.is_empty() {
+ return filemaps;
+ }
+ }
-pub fn is_default_impl(cdata: Cmd, impl_id: DefIndex) -> bool {
- let impl_doc = cdata.lookup_item(impl_id);
- item_family(impl_doc) == Family::DefaultImpl
-}
+ let external_codemap = self.root.codemap.decode(self);
+
+ let imported_filemaps = external_codemap.map(|filemap_to_import| {
+ // Try to find an existing FileMap that can be reused for the filemap to
+ // be imported. A FileMap is reusable if it is exactly the same, just
+ // positioned at a different offset within the codemap.
+ let reusable_filemap = {
+ local_codemap.files
+ .borrow()
+ .iter()
+ .find(|fm| are_equal_modulo_startpos(&fm, &filemap_to_import))
+ .map(|rc| rc.clone())
+ };
-pub fn get_imported_filemaps(metadata: &[u8]) -> Vec<syntax_pos::FileMap> {
- let crate_doc = rbml::Doc::new(metadata);
- let cm_doc = reader::get_doc(crate_doc, tag_codemap);
+ match reusable_filemap {
+ Some(fm) => {
+ cstore::ImportedFileMap {
+ original_start_pos: filemap_to_import.start_pos,
+ original_end_pos: filemap_to_import.end_pos,
+ translated_filemap: fm
+ }
+ }
+ None => {
+ // We can't reuse an existing FileMap, so allocate a new one
+ // containing the information we need.
+ let syntax_pos::FileMap {
+ name,
+ abs_path,
+ start_pos,
+ end_pos,
+ lines,
+ multibyte_chars,
+ ..
+ } = filemap_to_import;
+
+ let source_length = (end_pos - start_pos).to_usize();
+
+ // Translate line-start positions and multibyte character
+ // position into frame of reference local to file.
+ // `CodeMap::new_imported_filemap()` will then translate those
+ // coordinates to their new global frame of reference when the
+ // offset of the FileMap is known.
+ let mut lines = lines.into_inner();
+ for pos in &mut lines {
+ *pos = *pos - start_pos;
+ }
+ let mut multibyte_chars = multibyte_chars.into_inner();
+ for mbc in &mut multibyte_chars {
+ mbc.pos = mbc.pos - start_pos;
+ }
- reader::tagged_docs(cm_doc, tag_codemap_filemap).map(|filemap_doc| {
- let mut decoder = reader::Decoder::new(filemap_doc);
- decoder.read_opaque(|opaque_decoder, _| {
- Decodable::decode(opaque_decoder)
- }).unwrap()
- }).collect()
-}
+ let local_version = local_codemap.new_imported_filemap(name,
+ abs_path,
+ source_length,
+ lines,
+ multibyte_chars);
+ cstore::ImportedFileMap {
+ original_start_pos: start_pos,
+ original_end_pos: end_pos,
+ translated_filemap: local_version
+ }
+ }
+ }
+ }).collect();
-pub fn closure_kind(cdata: Cmd, closure_id: DefIndex) -> ty::ClosureKind {
- let closure_doc = cdata.lookup_item(closure_id);
- let closure_kind_doc = reader::get_doc(closure_doc, tag_items_closure_kind);
- let mut decoder = reader::Decoder::new(closure_kind_doc);
- ty::ClosureKind::decode(&mut decoder).unwrap()
+ // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref.
+ *self.codemap_import_info.borrow_mut() = imported_filemaps;
+ self.codemap_import_info.borrow()
+ }
}
-pub fn closure_ty<'a, 'tcx>(cdata: Cmd, closure_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::ClosureTy<'tcx> {
- let closure_doc = cdata.lookup_item(closure_id);
- let closure_ty_doc = reader::get_doc(closure_doc, tag_items_closure_ty);
- TyDecoder::with_doc(tcx, cdata.cnum, closure_ty_doc, &mut |did| translate_def_id(cdata, did))
- .parse_closure_ty()
-}
+fn are_equal_modulo_startpos(fm1: &syntax_pos::FileMap, fm2: &syntax_pos::FileMap) -> bool {
+ if fm1.name != fm2.name {
+ return false;
+ }
-pub fn def_key(cdata: Cmd, id: DefIndex) -> hir_map::DefKey {
- debug!("def_key: id={:?}", id);
- let item_doc = cdata.lookup_item(id);
- item_def_key(item_doc)
-}
+ let lines1 = fm1.lines.borrow();
+ let lines2 = fm2.lines.borrow();
-fn item_def_key(item_doc: rbml::Doc) -> hir_map::DefKey {
- match reader::maybe_get_doc(item_doc, tag_def_key) {
- Some(def_key_doc) => {
- let mut decoder = reader::Decoder::new(def_key_doc);
- let simple_key = def_key::DefKey::decode(&mut decoder).unwrap();
- let name = reader::maybe_get_doc(item_doc, tag_paths_data_name).map(|name| {
- token::intern(name.as_str()).as_str()
- });
- def_key::recover_def_key(simple_key, name)
- }
- None => {
- bug!("failed to find block with tag {:?} for item with family {:?}",
- tag_def_key,
- item_family(item_doc))
+ if lines1.len() != lines2.len() {
+ return false;
+ }
+
+ for (&line1, &line2) in lines1.iter().zip(lines2.iter()) {
+ if (line1 - fm1.start_pos) != (line2 - fm2.start_pos) {
+ return false;
}
}
-}
-// Returns the path leading to the thing with this `id`. Note that
-// some def-ids don't wind up in the metadata, so `def_path` sometimes
-// returns `None`
-pub fn def_path(cdata: Cmd, id: DefIndex) -> Option<hir_map::DefPath> {
- debug!("def_path(id={:?})", id);
- if cdata.get_item(id).is_some() {
- Some(hir_map::DefPath::make(cdata.cnum, id, |parent| def_key(cdata, parent)))
- } else {
- None
+ let multibytes1 = fm1.multibyte_chars.borrow();
+ let multibytes2 = fm2.multibyte_chars.borrow();
+
+ if multibytes1.len() != multibytes2.len() {
+ return false;
}
-}
-pub fn get_panic_strategy(data: &[u8]) -> PanicStrategy {
- let crate_doc = rbml::Doc::new(data);
- let strat_doc = reader::get_doc(crate_doc, tag_panic_strategy);
- match reader::doc_as_u8(strat_doc) {
- b'U' => PanicStrategy::Unwind,
- b'A' => PanicStrategy::Abort,
- b => panic!("unknown panic strategy in metadata: {}", b),
+ for (mb1, mb2) in multibytes1.iter().zip(multibytes2.iter()) {
+ if (mb1.bytes != mb2.bytes) ||
+ ((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) {
+ return false;
+ }
}
+
+ true
}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::hir::def_id::DefIndex;
-use rustc::hir::map as hir_map;
-use syntax::parse::token::InternedString;
-
-#[derive(RustcEncodable, RustcDecodable)]
-pub struct DefKey {
- pub parent: Option<DefIndex>,
- pub disambiguated_data: DisambiguatedDefPathData,
-}
-
-#[derive(RustcEncodable, RustcDecodable)]
-pub struct DisambiguatedDefPathData {
- pub data: DefPathData,
- pub disambiguator: u32,
-}
-
-#[derive(RustcEncodable, RustcDecodable)]
-pub enum DefPathData {
- CrateRoot,
- Misc,
- Impl,
- TypeNs,
- ValueNs,
- Module,
- MacroDef,
- ClosureExpr,
- TypeParam,
- LifetimeDef,
- EnumVariant,
- Field,
- StructCtor,
- Initializer,
- Binding,
- ImplTrait,
-}
-
-pub fn simplify_def_key(key: hir_map::DefKey) -> DefKey {
- let data = DisambiguatedDefPathData {
- data: simplify_def_path_data(key.disambiguated_data.data),
- disambiguator: key.disambiguated_data.disambiguator,
- };
- DefKey {
- parent: key.parent,
- disambiguated_data: data,
- }
-}
-
-fn simplify_def_path_data(data: hir_map::DefPathData) -> DefPathData {
- match data {
- hir_map::DefPathData::CrateRoot => DefPathData::CrateRoot,
- hir_map::DefPathData::InlinedRoot(_) => bug!("unexpected DefPathData"),
- hir_map::DefPathData::Misc => DefPathData::Misc,
- hir_map::DefPathData::Impl => DefPathData::Impl,
- hir_map::DefPathData::TypeNs(_) => DefPathData::TypeNs,
- hir_map::DefPathData::ValueNs(_) => DefPathData::ValueNs,
- hir_map::DefPathData::Module(_) => DefPathData::Module,
- hir_map::DefPathData::MacroDef(_) => DefPathData::MacroDef,
- hir_map::DefPathData::ClosureExpr => DefPathData::ClosureExpr,
- hir_map::DefPathData::TypeParam(_) => DefPathData::TypeParam,
- hir_map::DefPathData::LifetimeDef(_) => DefPathData::LifetimeDef,
- hir_map::DefPathData::EnumVariant(_) => DefPathData::EnumVariant,
- hir_map::DefPathData::Field(_) => DefPathData::Field,
- hir_map::DefPathData::StructCtor => DefPathData::StructCtor,
- hir_map::DefPathData::Initializer => DefPathData::Initializer,
- hir_map::DefPathData::Binding(_) => DefPathData::Binding,
- hir_map::DefPathData::ImplTrait => DefPathData::ImplTrait,
- }
-}
-
-pub fn recover_def_key(key: DefKey, name: Option<InternedString>) -> hir_map::DefKey {
- let data = hir_map::DisambiguatedDefPathData {
- data: recover_def_path_data(key.disambiguated_data.data, name),
- disambiguator: key.disambiguated_data.disambiguator,
- };
- hir_map::DefKey {
- parent: key.parent,
- disambiguated_data: data,
- }
-}
-
-fn recover_def_path_data(data: DefPathData, name: Option<InternedString>) -> hir_map::DefPathData {
- match data {
- DefPathData::CrateRoot => hir_map::DefPathData::CrateRoot,
- DefPathData::Misc => hir_map::DefPathData::Misc,
- DefPathData::Impl => hir_map::DefPathData::Impl,
- DefPathData::TypeNs => hir_map::DefPathData::TypeNs(name.unwrap()),
- DefPathData::ValueNs => hir_map::DefPathData::ValueNs(name.unwrap()),
- DefPathData::Module => hir_map::DefPathData::Module(name.unwrap()),
- DefPathData::MacroDef => hir_map::DefPathData::MacroDef(name.unwrap()),
- DefPathData::ClosureExpr => hir_map::DefPathData::ClosureExpr,
- DefPathData::TypeParam => hir_map::DefPathData::TypeParam(name.unwrap()),
- DefPathData::LifetimeDef => hir_map::DefPathData::LifetimeDef(name.unwrap()),
- DefPathData::EnumVariant => hir_map::DefPathData::EnumVariant(name.unwrap()),
- DefPathData::Field => hir_map::DefPathData::Field(name.unwrap()),
- DefPathData::StructCtor => hir_map::DefPathData::StructCtor,
- DefPathData::Initializer => hir_map::DefPathData::Initializer,
- DefPathData::Binding => hir_map::DefPathData::Binding(name.unwrap()),
- DefPathData::ImplTrait => hir_map::DefPathData::ImplTrait,
- }
-}
The rust compiler cannot link to an external library if you don't give it its
name. Example:
-```
+```ignore
#[link(name = "some_lib")] extern {} // ok!
```
"##,
Erroneous code example:
-```compile_fail,E0455
-#[link(name = "FooCoreServices", kind = "framework")] extern {}
+```ignore
+#[link(name = "FooCoreServices", kind = "framework")] extern {}
// OS used to compile is Linux for example
```
Please add the name parameter to allow the rust compiler to find the library
you want. Example:
-```
+```ignore
#[link(kind = "dylib", name = "some_lib")] extern {} // ok!
```
"##,
well, and you link to them the same way.
"##,
+E0466: r##"
+Macro import declarations were malformed.
+
+Erroneous code examples:
+
+```compile_fail,E0466
+#[macro_use(a_macro(another_macro))] // error: invalid import declaration
+extern crate core as some_crate;
+
+#[macro_use(i_want = "some_macros")] // error: invalid import declaration
+extern crate core as another_crate;
+```
+
+This is a syntax error at the level of attribute declarations. The proper
+syntax for macro imports is the following:
+
+```ignore
+// In some_crate:
+#[macro_export]
+macro_rules! get_tacos {
+ ...
+}
+
+#[macro_export]
+macro_rules! get_pimientos {
+ ...
+}
+
+// In your crate:
+#[macro_use(get_tacos, get_pimientos)] // It imports `get_tacos` and
+extern crate some_crate; // `get_pimientos` macros from some_crate
+```
+
+If you would like to import all exported macros, write `macro_use` with no
+arguments.
+"##,
+
+E0467: r##"
+Macro reexport declarations were empty or malformed.
+
+Erroneous code examples:
+
+```compile_fail,E0467
+#[macro_reexport] // error: no macros listed for export
+extern crate core as macros_for_good;
+
+#[macro_reexport(fun_macro = "foo")] // error: not a macro identifier
+extern crate core as other_macros_for_good;
+```
+
+This is a syntax error at the level of attribute declarations.
+
+Currently, `macro_reexport` requires at least one macro name to be listed.
+Unlike `macro_use`, listing no names does not reexport all macros from the
+given crate.
+
+Decide which macros you would like to export and list them properly.
+
+These are proper reexport declarations:
+
+```ignore
+#[macro_reexport(some_macro, another_macro)]
+extern crate macros_for_good;
+```
+"##,
+
+E0468: r##"
+A non-root module attempts to import macros from another crate.
+
+Example of erroneous code:
+
+```compile_fail,E0468
+mod foo {
+ #[macro_use(helpful_macro)] // error: must be at crate root to import
+ extern crate core; // macros from another crate
+ helpful_macro!(...);
+}
+```
+
+Only `extern crate` imports at the crate root level are allowed to import
+macros.
+
+Either move the macro import to crate root or do without the foreign macros.
+This will work:
+
+```ignore
+#[macro_use(helpful_macro)]
+extern crate some_crate;
+
+mod foo {
+ helpful_macro!(...)
+}
+```
+"##,
+
+E0469: r##"
+A macro listed for import was not found.
+
+Erroneous code example:
+
+```compile_fail,E0469
+#[macro_use(drink, be_merry)] // error: imported macro not found
+extern crate collections;
+
+fn main() {
+ // ...
+}
+```
+
+Either the listed macro is not contained in the imported crate, or it is not
+exported from the given crate.
+
+This could be caused by a typo. Did you misspell the macro's name?
+
+Double-check the names of the macros listed for import, and that the crate
+in question exports them.
+
+A working version would be:
+
+```ignore
+// In some_crate crate:
+#[macro_export]
+macro_rules! eat {
+ ...
+}
+
+#[macro_export]
+macro_rules! drink {
+ ...
+}
+
+// In your crate:
+#[macro_use(eat, drink)]
+extern crate some_crate; //ok!
+```
+"##,
+
+E0470: r##"
+A macro listed for reexport was not found.
+
+Erroneous code example:
+
+```compile_fail,E0470
+#[macro_reexport(drink, be_merry)]
+extern crate collections;
+
+fn main() {
+ // ...
+}
+```
+
+Either the listed macro is not contained in the imported crate, or it is not
+exported from the given crate.
+
+This could be caused by a typo. Did you misspell the macro's name?
+
+Double-check the names of the macros listed for reexport, and that the crate
+in question exports them.
+
+A working version:
+
+```ignore
+// In some_crate crate:
+#[macro_export]
+macro_rules! eat {
+ ...
+}
+
+#[macro_export]
+macro_rules! drink {
+ ...
+}
+
+// In your_crate:
+#[macro_reexport(eat, drink)]
+extern crate some_crate;
+```
+"##,
+
}
register_diagnostics! {
E0462, // found staticlib `..` instead of rlib or dylib
E0464, // multiple matching crates for `..`
E0465, // multiple .. candidates for `..` found
- E0466, // bad macro import
- E0467, // bad macro reexport
- E0468, // an `extern crate` loading macros must be at the crate root
- E0469, // imported macro not found
- E0470, // reexported macro not found
E0519, // local crate and dependency have same (crate-name, disambiguator)
E0523, // two dependencies have same (crate-name, disambiguator) but different SVH
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// Metadata encoding
-
-#![allow(unused_must_use)] // everything is just a MemWriter, can't fail
-#![allow(non_camel_case_types)]
-
-use astencode::encode_inlined_item;
-use common::*;
use cstore;
-use decoder;
-use def_key;
-use tyencode;
-use index::{self, IndexData};
+use index::Index;
+use schema::*;
-use middle::cstore::{InlinedItemRef, LinkMeta, tls};
+use rustc::middle::cstore::{InlinedItemRef, LinkMeta};
+use rustc::middle::cstore::{LinkagePreference, NativeLibraryKind};
use rustc::hir::def;
-use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
-use middle::dependency_format::Linkage;
-use rustc::dep_graph::DepNode;
+use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefIndex, DefId};
+use rustc::middle::dependency_format::Linkage;
+use rustc::middle::lang_items;
+use rustc::mir;
use rustc::traits::specialization_graph;
use rustc::ty::{self, Ty, TyCtxt};
-use rustc::hir::svh::Svh;
use rustc::mir::mir_map::MirMap;
-use rustc::session::config::{self, PanicStrategy, CrateTypeRustcMacro};
+use rustc::session::config::{self, CrateTypeRustcMacro};
use rustc::util::nodemap::{FnvHashMap, NodeSet};
-use rustc_serialize::Encodable;
-use std::cell::RefCell;
+use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
+use std::hash::Hash;
+use std::intrinsics;
use std::io::prelude::*;
-use std::io::{Cursor, SeekFrom};
+use std::io::Cursor;
use std::rc::Rc;
use std::u32;
-use syntax::ast::{self, NodeId, Name, CRATE_NODE_ID, CrateNum};
+use syntax::ast::{self, CRATE_NODE_ID};
use syntax::attr;
-use errors::Handler;
use syntax;
-use syntax_pos::BytePos;
-use rbml::writer::Encoder;
+use syntax_pos;
use rustc::hir::{self, PatKind};
use rustc::hir::intravisit::Visitor;
use rustc::hir::intravisit;
-use rustc::hir::map::DefKey;
-use super::index_builder::{FromId, IndexBuilder, ItemContentBuilder, Untracked, XRef};
+use super::index_builder::{FromId, IndexBuilder, Untracked};
pub struct EncodeContext<'a, 'tcx: 'a> {
- pub diag: &'a Handler,
+ opaque: opaque::Encoder<'a>,
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
- pub reexports: &'a def::ExportMap,
- pub link_meta: &'a LinkMeta,
- pub cstore: &'a cstore::CStore,
- pub type_abbrevs: tyencode::abbrev_map<'tcx>,
- pub reachable: &'a NodeSet,
- pub mir_map: &'a MirMap<'tcx>,
+ reexports: &'a def::ExportMap,
+ link_meta: &'a LinkMeta,
+ cstore: &'a cstore::CStore,
+ reachable: &'a NodeSet,
+ mir_map: &'a MirMap<'tcx>,
+
+ lazy_state: LazyState,
+ type_shorthands: FnvHashMap<Ty<'tcx>, usize>,
+ predicate_shorthands: FnvHashMap<ty::Predicate<'tcx>, usize>,
}
-impl<'a, 'tcx> EncodeContext<'a,'tcx> {
- fn local_id(&self, def_id: DefId) -> NodeId {
- self.tcx.map.as_local_node_id(def_id).unwrap()
+macro_rules! encoder_methods {
+ ($($name:ident($ty:ty);)*) => {
+ $(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> {
+ self.opaque.$name(value)
+ })*
}
}
-fn encode_name(rbml_w: &mut Encoder, name: Name) {
- rbml_w.wr_tagged_str(tag_paths_data_name, &name.as_str());
-}
+impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> {
+ type Error = <opaque::Encoder<'a> as Encoder>::Error;
-fn encode_def_id(rbml_w: &mut Encoder, id: DefId) {
- rbml_w.wr_tagged_u64(tag_def_id, def_to_u64(id));
-}
+ fn emit_nil(&mut self) -> Result<(), Self::Error> {
+ Ok(())
+ }
-fn encode_def_key(rbml_w: &mut Encoder, key: DefKey) {
- let simple_key = def_key::simplify_def_key(key);
- rbml_w.start_tag(tag_def_key);
- simple_key.encode(rbml_w);
- rbml_w.end_tag();
-}
+ encoder_methods! {
+ emit_usize(usize);
+ emit_u64(u64);
+ emit_u32(u32);
+ emit_u16(u16);
+ emit_u8(u8);
-/// For every DefId that we create a metadata item for, we include a
-/// serialized copy of its DefKey, which allows us to recreate a path.
-fn encode_def_id_and_key(ecx: &EncodeContext,
- rbml_w: &mut Encoder,
- def_id: DefId)
-{
- encode_def_id(rbml_w, def_id);
- let def_key = ecx.tcx.map.def_key(def_id);
- encode_def_key(rbml_w, def_key);
-}
+ emit_isize(isize);
+ emit_i64(i64);
+ emit_i32(i32);
+ emit_i16(i16);
+ emit_i8(i8);
-fn encode_trait_ref<'a, 'tcx>(rbml_w: &mut Encoder,
- ecx: &EncodeContext<'a, 'tcx>,
- trait_ref: ty::TraitRef<'tcx>,
- tag: usize) {
- rbml_w.start_tag(tag);
- tyencode::enc_trait_ref(rbml_w.writer, &ecx.ty_str_ctxt(), trait_ref);
- rbml_w.mark_stable_position();
- rbml_w.end_tag();
+ emit_bool(bool);
+ emit_f64(f64);
+ emit_f32(f32);
+ emit_char(char);
+ emit_str(&str);
+ }
}
-// Item info table encoding
-fn encode_family(rbml_w: &mut Encoder, c: char) {
- rbml_w.wr_tagged_u8(tag_items_data_item_family, c as u8);
+impl<'a, 'tcx, T> SpecializedEncoder<Lazy<T>> for EncodeContext<'a, 'tcx> {
+ fn specialized_encode(&mut self, lazy: &Lazy<T>) -> Result<(), Self::Error> {
+ self.emit_lazy_distance(lazy.position, Lazy::<T>::min_size())
+ }
}
-pub fn def_to_u64(did: DefId) -> u64 {
- assert!(did.index.as_u32() < u32::MAX);
- (did.krate as u64) << 32 | (did.index.as_usize() as u64)
+impl<'a, 'tcx, T> SpecializedEncoder<LazySeq<T>> for EncodeContext<'a, 'tcx> {
+ fn specialized_encode(&mut self, seq: &LazySeq<T>) -> Result<(), Self::Error> {
+ self.emit_usize(seq.len)?;
+ if seq.len == 0 {
+ return Ok(());
+ }
+ self.emit_lazy_distance(seq.position, LazySeq::<T>::min_size(seq.len))
+ }
}
-pub fn def_to_string(_tcx: TyCtxt, did: DefId) -> String {
- format!("{}:{}", did.krate, did.index.as_usize())
+impl<'a, 'tcx> SpecializedEncoder<Ty<'tcx>> for EncodeContext<'a, 'tcx> {
+ fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> {
+ self.encode_with_shorthand(ty, &ty.sty, |ecx| &mut ecx.type_shorthands)
+ }
}
-fn encode_item_variances(rbml_w: &mut Encoder,
- ecx: &EncodeContext,
- id: NodeId) {
- let v = ecx.tcx.item_variances(ecx.tcx.map.local_def_id(id));
- rbml_w.start_tag(tag_item_variances);
- v.encode(rbml_w);
- rbml_w.end_tag();
+impl<'a, 'tcx> SpecializedEncoder<ty::GenericPredicates<'tcx>> for EncodeContext<'a, 'tcx> {
+ fn specialized_encode(&mut self, predicates: &ty::GenericPredicates<'tcx>)
+ -> Result<(), Self::Error> {
+ predicates.parent.encode(self)?;
+ predicates.predicates.len().encode(self)?;
+ for predicate in &predicates.predicates {
+ self.encode_with_shorthand(predicate, predicate, |ecx| &mut ecx.predicate_shorthands)?
+ }
+ Ok(())
+ }
}
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
- fn encode_bounds_and_type_for_item(&mut self,
- id: NodeId) {
- let ecx = self.ecx();
- self.encode_bounds_and_type(&ecx.tcx.lookup_item_type(ecx.tcx.map.local_def_id(id)),
- &ecx.tcx.lookup_predicates(ecx.tcx.map.local_def_id(id)));
+impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
+ pub fn position(&self) -> usize {
+ self.opaque.position()
}
- fn encode_bounds_and_type(&mut self,
- scheme: &ty::TypeScheme<'tcx>,
- predicates: &ty::GenericPredicates<'tcx>) {
- self.encode_generics(&scheme.generics, &predicates);
- self.encode_type(scheme.ty);
+ fn emit_node<F: FnOnce(&mut Self, usize) -> R, R>(&mut self, f: F) -> R {
+ assert_eq!(self.lazy_state, LazyState::NoNode);
+ let pos = self.position();
+ self.lazy_state = LazyState::NodeStart(pos);
+ let r = f(self, pos);
+ self.lazy_state = LazyState::NoNode;
+ r
}
-}
-fn encode_variant_id(rbml_w: &mut Encoder, vid: DefId) {
- let id = def_to_u64(vid);
- rbml_w.wr_tagged_u64(tag_items_data_item_variant, id);
- rbml_w.wr_tagged_u64(tag_mod_child, id);
-}
+ fn emit_lazy_distance(&mut self, position: usize, min_size: usize)
+ -> Result<(), <Self as Encoder>::Error> {
+ let min_end = position + min_size;
+ let distance = match self.lazy_state {
+ LazyState::NoNode => {
+ bug!("emit_lazy_distance: outside of a metadata node")
+ }
+ LazyState::NodeStart(start) => {
+ assert!(min_end <= start);
+ start - min_end
+ }
+ LazyState::Previous(last_min_end) => {
+ assert!(last_min_end <= position);
+ position - last_min_end
+ }
+ };
+ self.lazy_state = LazyState::Previous(min_end);
+ self.emit_usize(distance)
+ }
-fn write_closure_type<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
- rbml_w: &mut Encoder,
- closure_type: &ty::ClosureTy<'tcx>) {
- tyencode::enc_closure_ty(rbml_w.writer, &ecx.ty_str_ctxt(), closure_type);
- rbml_w.mark_stable_position();
-}
+ pub fn lazy<T: Encodable>(&mut self, value: &T) -> Lazy<T> {
+ self.emit_node(|ecx, pos| {
+ value.encode(ecx).unwrap();
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
- fn encode_type(&mut self,
- typ: Ty<'tcx>) {
- let ecx = self.ecx;
- self.rbml_w.start_tag(tag_items_data_item_type);
- tyencode::enc_ty(self.rbml_w.writer, &ecx.ty_str_ctxt(), typ);
- self.rbml_w.mark_stable_position();
- self.rbml_w.end_tag();
+ assert!(pos + Lazy::<T>::min_size() <= ecx.position());
+ Lazy::with_position(pos)
+ })
}
- fn encode_disr_val(&mut self,
- disr_val: ty::Disr) {
- // convert to u64 so just the number is printed, without any type info
- self.rbml_w.wr_tagged_str(tag_disr_val, &disr_val.to_u64_unchecked().to_string());
+ fn lazy_seq<I, T>(&mut self, iter: I) -> LazySeq<T>
+ where I: IntoIterator<Item=T>, T: Encodable {
+ self.emit_node(|ecx, pos| {
+ let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count();
+
+ assert!(pos + LazySeq::<T>::min_size(len) <= ecx.position());
+ LazySeq::with_position_and_length(pos, len)
+ })
}
- fn encode_parent_item(&mut self, id: DefId) {
- self.rbml_w.wr_tagged_u64(tag_items_data_parent_item, def_to_u64(id));
+ fn lazy_seq_ref<'b, I, T>(&mut self, iter: I) -> LazySeq<T>
+ where I: IntoIterator<Item=&'b T>, T: 'b + Encodable {
+ self.emit_node(|ecx, pos| {
+ let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count();
+
+ assert!(pos + LazySeq::<T>::min_size(len) <= ecx.position());
+ LazySeq::with_position_and_length(pos, len)
+ })
}
- fn encode_struct_fields(&mut self,
- variant: ty::VariantDef) {
- for f in &variant.fields {
- if variant.kind == ty::VariantKind::Tuple {
- self.rbml_w.start_tag(tag_item_unnamed_field);
- } else {
- self.rbml_w.start_tag(tag_item_field);
- encode_name(self.rbml_w, f.name);
- }
- self.encode_struct_field_family(f.vis);
- encode_def_id(self.rbml_w, f.did);
- self.rbml_w.end_tag();
+ /// Encode the given value or a previously cached shorthand.
+ fn encode_with_shorthand<T, U, M>(&mut self, value: &T, variant: &U, map: M)
+ -> Result<(), <Self as Encoder>::Error>
+ where M: for<'b> Fn(&'b mut Self) -> &'b mut FnvHashMap<T, usize>,
+ T: Clone + Eq + Hash,
+ U: Encodable {
+ let existing_shorthand = map(self).get(value).cloned();
+ if let Some(shorthand) = existing_shorthand {
+ return self.emit_usize(shorthand);
}
- }
-}
-impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> {
- fn encode_enum_variant_infos(&mut self,
- enum_did: DefId) {
- debug!("encode_enum_variant_info(enum_did={:?})", enum_did);
- let ecx = self.ecx();
- let def = ecx.tcx.lookup_adt_def(enum_did);
- self.encode_fields(enum_did);
- for (i, variant) in def.variants.iter().enumerate() {
- self.record(variant.did,
- ItemContentBuilder::encode_enum_variant_info,
- (enum_did, Untracked(i)));
+ let start = self.position();
+ variant.encode(self)?;
+ let len = self.position() - start;
+
+ // The shorthand encoding uses the same usize as the
+ // discriminant, with an offset so they can't conflict.
+ let discriminant = unsafe {
+ intrinsics::discriminant_value(variant)
+ };
+ assert!(discriminant < SHORTHAND_OFFSET as u64);
+ let shorthand = start + SHORTHAND_OFFSET;
+
+ // Get the number of bits that leb128 could fit
+ // in the same space as the fully encoded type.
+ let leb128_bits = len * 7;
+
+ // Check that the shorthand is a not longer than the
+ // full encoding itself, i.e. it's an obvious win.
+ if leb128_bits >= 64 || (shorthand as u64) < (1 << leb128_bits) {
+ map(self).insert(value.clone(), shorthand);
}
+
+ Ok(())
+ }
+
+ /// For every DefId that we create a metadata item for, we include a
+ /// serialized copy of its DefKey, which allows us to recreate a path.
+ fn encode_def_key(&mut self, def_id: DefId) -> Lazy<hir::map::DefKey> {
+ let tcx = self.tcx;
+ self.lazy(&tcx.map.def_key(def_id))
+ }
+
+ fn encode_item_variances(&mut self, def_id: DefId) -> LazySeq<ty::Variance> {
+ let tcx = self.tcx;
+ self.lazy_seq(tcx.item_variances(def_id).iter().cloned())
+ }
+
+ fn encode_item_type(&mut self, def_id: DefId) -> Lazy<Ty<'tcx>> {
+ let tcx = self.tcx;
+ self.lazy(&tcx.lookup_item_type(def_id).ty)
}
-}
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
/// Encode data for the given variant of the given ADT. The
/// index of the variant is untracked: this is ok because we
/// will have to lookup the adt-def by its id, and that gives us
/// e.g., the length of the various vectors).
fn encode_enum_variant_info(&mut self,
(enum_did, Untracked(index)):
- (DefId, Untracked<usize>)) {
- let ecx = self.ecx;
- let def = ecx.tcx.lookup_adt_def(enum_did);
+ (DefId, Untracked<usize>)) -> Entry<'tcx> {
+ let tcx = self.tcx;
+ let def = tcx.lookup_adt_def(enum_did);
let variant = &def.variants[index];
- let vid = variant.did;
- let variant_node_id = ecx.local_id(vid);
- encode_def_id_and_key(ecx, self.rbml_w, vid);
- encode_family(self.rbml_w, match variant.kind {
- ty::VariantKind::Struct => 'V',
- ty::VariantKind::Tuple => 'v',
- ty::VariantKind::Unit => 'w',
- });
- encode_name(self.rbml_w, variant.name);
- self.encode_parent_item(enum_did);
-
- let enum_id = ecx.tcx.map.as_local_node_id(enum_did).unwrap();
- let enum_vis = &ecx.tcx.map.expect_item(enum_id).vis;
- self.encode_visibility(enum_vis);
-
- let attrs = ecx.tcx.get_attrs(vid);
- encode_attributes(self.rbml_w, &attrs);
- self.encode_repr_attrs(&attrs);
-
- let stab = ecx.tcx.lookup_stability(vid);
- let depr = ecx.tcx.lookup_deprecation(vid);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
-
- self.encode_struct_fields(variant);
- self.encode_disr_val(variant.disr_val);
- self.encode_bounds_and_type_for_item(variant_node_id);
- }
-}
-
-fn encode_reexports(ecx: &EncodeContext,
- rbml_w: &mut Encoder,
- id: NodeId) {
- debug!("(encoding info for module) encoding reexports for {}", id);
- match ecx.reexports.get(&id) {
- Some(exports) => {
- debug!("(encoding info for module) found reexports for {}", id);
- for exp in exports {
- debug!("(encoding info for module) reexport '{}' ({:?}) for \
- {}",
- exp.name,
- exp.def_id,
- id);
- rbml_w.start_tag(tag_items_data_item_reexport);
- rbml_w.wr_tagged_u64(tag_items_data_item_reexport_def_id,
- def_to_u64(exp.def_id));
- rbml_w.wr_tagged_str(tag_items_data_item_reexport_name,
- &exp.name.as_str());
- rbml_w.end_tag();
- }
- },
- None => debug!("(encoding info for module) found no reexports for {}", id),
+ let def_id = variant.did;
+
+ let data = VariantData {
+ kind: variant.kind,
+ disr: variant.disr_val.to_u64_unchecked(),
+ struct_ctor: None
+ };
+
+ let enum_id = tcx.map.as_local_node_id(enum_did).unwrap();
+ let enum_vis = &tcx.map.expect_item(enum_id).vis;
+
+ Entry {
+ kind: EntryKind::Variant(self.lazy(&data)),
+ visibility: enum_vis.simplify(),
+ def_key: self.encode_def_key(def_id),
+ attributes: self.encode_attributes(&tcx.get_attrs(def_id)),
+ children: self.lazy_seq(variant.fields.iter().map(|f| {
+ assert!(f.did.is_local());
+ f.did.index
+ })),
+ stability: self.encode_stability(def_id),
+ deprecation: self.encode_deprecation(def_id),
+
+ ty: Some(self.encode_item_type(def_id)),
+ inherent_impls: LazySeq::empty(),
+ variances: LazySeq::empty(),
+ generics: Some(self.encode_generics(def_id)),
+ predicates: Some(self.encode_predicates(def_id)),
+
+ ast: None,
+ mir: None
+ }
}
-}
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
fn encode_info_for_mod(&mut self,
- FromId(id, (md, attrs, name, vis)):
- FromId<(&hir::Mod, &[ast::Attribute], Name, &hir::Visibility)>) {
- let ecx = self.ecx();
-
- encode_def_id_and_key(ecx, self.rbml_w, ecx.tcx.map.local_def_id(id));
- encode_family(self.rbml_w, 'm');
- encode_name(self.rbml_w, name);
- debug!("(encoding info for module) encoding info for module ID {}", id);
-
- // Encode info about all the module children.
- for item_id in &md.item_ids {
- self.rbml_w.wr_tagged_u64(tag_mod_child,
- def_to_u64(ecx.tcx.map.local_def_id(item_id.id)));
- }
-
- self.encode_visibility(vis);
-
- let stab = ecx.tcx.lookup_stability(ecx.tcx.map.local_def_id(id));
- let depr = ecx.tcx.lookup_deprecation(ecx.tcx.map.local_def_id(id));
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
-
- // Encode the reexports of this module, if this module is public.
- if *vis == hir::Public {
- debug!("(encoding info for module) encoding reexports for {}", id);
- encode_reexports(ecx, self.rbml_w, id);
+ FromId(id, (md, attrs, vis)):
+ FromId<(&hir::Mod, &[ast::Attribute], &hir::Visibility)>)
+ -> Entry<'tcx> {
+ let tcx = self.tcx;
+ let def_id = tcx.map.local_def_id(id);
+
+ let data = ModData {
+ reexports: match self.reexports.get(&id) {
+ Some(exports) if *vis == hir::Public => {
+ self.lazy_seq_ref(exports)
+ }
+ _ => LazySeq::empty()
+ }
+ };
+
+ Entry {
+ kind: EntryKind::Mod(self.lazy(&data)),
+ visibility: vis.simplify(),
+ def_key: self.encode_def_key(def_id),
+ attributes: self.encode_attributes(attrs),
+ children: self.lazy_seq(md.item_ids.iter().map(|item_id| {
+ tcx.map.local_def_id(item_id.id).index
+ })),
+ stability: self.encode_stability(def_id),
+ deprecation: self.encode_deprecation(def_id),
+
+ ty: None,
+ inherent_impls: LazySeq::empty(),
+ variances: LazySeq::empty(),
+ generics: None,
+ predicates: None,
+
+ ast: None,
+ mir: None
}
- encode_attributes(self.rbml_w, attrs);
- }
-
- fn encode_struct_field_family(&mut self,
- visibility: ty::Visibility) {
- encode_family(self.rbml_w, if visibility.is_public() { 'g' } else { 'N' });
- }
-
- fn encode_visibility<T: HasVisibility>(&mut self, visibility: T) {
- let ch = if visibility.is_public() { 'y' } else { 'i' };
- self.rbml_w.wr_tagged_u8(tag_items_data_item_visibility, ch as u8);
}
}
-trait HasVisibility: Sized {
- fn is_public(self) -> bool;
+trait Visibility {
+ fn simplify(&self) -> ty::Visibility;
}
-impl<'a> HasVisibility for &'a hir::Visibility {
- fn is_public(self) -> bool {
- *self == hir::Public
- }
-}
-
-impl HasVisibility for ty::Visibility {
- fn is_public(self) -> bool {
- self == ty::Visibility::Public
- }
-}
-
-fn encode_constness(rbml_w: &mut Encoder, constness: hir::Constness) {
- rbml_w.start_tag(tag_items_data_item_constness);
- let ch = match constness {
- hir::Constness::Const => 'c',
- hir::Constness::NotConst => 'n',
- };
- rbml_w.wr_str(&ch.to_string());
- rbml_w.end_tag();
-}
-
-fn encode_defaultness(rbml_w: &mut Encoder, defaultness: hir::Defaultness) {
- let ch = match defaultness {
- hir::Defaultness::Default => 'd',
- hir::Defaultness::Final => 'f',
- };
- rbml_w.wr_tagged_u8(tag_items_data_item_defaultness, ch as u8);
-}
-
-fn encode_explicit_self(rbml_w: &mut Encoder,
- explicit_self: &ty::ExplicitSelfCategory) {
- let tag = tag_item_trait_method_explicit_self;
-
- // Encode the base self type.
- match *explicit_self {
- ty::ExplicitSelfCategory::Static => {
- rbml_w.wr_tagged_bytes(tag, &['s' as u8]);
- }
- ty::ExplicitSelfCategory::ByValue => {
- rbml_w.wr_tagged_bytes(tag, &['v' as u8]);
- }
- ty::ExplicitSelfCategory::ByBox => {
- rbml_w.wr_tagged_bytes(tag, &['~' as u8]);
- }
- ty::ExplicitSelfCategory::ByReference(_, m) => {
- // FIXME(#4846) encode custom lifetime
- let ch = encode_mutability(m);
- rbml_w.wr_tagged_bytes(tag, &['&' as u8, ch]);
+impl Visibility for hir::Visibility {
+ fn simplify(&self) -> ty::Visibility {
+ if *self == hir::Public {
+ ty::Visibility::Public
+ } else {
+ ty::Visibility::PrivateExternal
}
}
+}
- fn encode_mutability(m: hir::Mutability) -> u8 {
- match m {
- hir::MutImmutable => 'i' as u8,
- hir::MutMutable => 'm' as u8,
+impl Visibility for ty::Visibility {
+ fn simplify(&self) -> ty::Visibility {
+ if *self == ty::Visibility::Public {
+ ty::Visibility::Public
+ } else {
+ ty::Visibility::PrivateExternal
}
}
}
-fn encode_item_sort(rbml_w: &mut Encoder, sort: char) {
- rbml_w.wr_tagged_u8(tag_item_trait_item_sort, sort as u8);
-}
-
-impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> {
+impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
fn encode_fields(&mut self,
adt_def_id: DefId) {
- let def = self.ecx().tcx.lookup_adt_def(adt_def_id);
+ let def = self.tcx.lookup_adt_def(adt_def_id);
for (variant_index, variant) in def.variants.iter().enumerate() {
for (field_index, field) in variant.fields.iter().enumerate() {
self.record(field.did,
- ItemContentBuilder::encode_field,
+ EncodeContext::encode_field,
(adt_def_id, Untracked((variant_index, field_index))));
}
}
}
}
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
+impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
/// Encode data for the given field of the given variant of the
/// given ADT. The indices of the variant/field are untracked:
/// this is ok because we will have to lookup the adt-def by its
/// vectors).
fn encode_field(&mut self,
(adt_def_id, Untracked((variant_index, field_index))):
- (DefId, Untracked<(usize, usize)>)) {
- let ecx = self.ecx();
- let def = ecx.tcx.lookup_adt_def(adt_def_id);
- let variant = &def.variants[variant_index];
+ (DefId, Untracked<(usize, usize)>)) -> Entry<'tcx> {
+ let tcx = self.tcx;
+ let variant = &tcx.lookup_adt_def(adt_def_id).variants[variant_index];
let field = &variant.fields[field_index];
- let nm = field.name;
- let id = ecx.local_id(field.did);
- debug!("encode_field: encoding {} {}", nm, id);
-
- self.encode_struct_field_family(field.vis);
- encode_name(self.rbml_w, nm);
- self.encode_bounds_and_type_for_item(id);
- encode_def_id_and_key(ecx, self.rbml_w, field.did);
-
- let stab = ecx.tcx.lookup_stability(field.did);
- let depr = ecx.tcx.lookup_deprecation(field.did);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
- }
-}
-
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
- fn encode_struct_ctor(&mut self,
- (struct_def_id, struct_node_id, ctor_node_id):
- (DefId, ast::NodeId, ast::NodeId)) {
- let ecx = self.ecx();
- let def = ecx.tcx.lookup_adt_def(struct_def_id);
- let variant = def.struct_variant();
- let item = ecx.tcx.map.expect_item(struct_node_id);
- let ctor_def_id = ecx.tcx.map.local_def_id(ctor_node_id);
- encode_def_id_and_key(ecx, self.rbml_w, ctor_def_id);
- encode_family(self.rbml_w, match variant.kind {
- ty::VariantKind::Struct => 'S',
- ty::VariantKind::Tuple => 's',
- ty::VariantKind::Unit => 'u',
- });
- self.encode_bounds_and_type_for_item(ctor_node_id);
- encode_name(self.rbml_w, item.name);
- self.encode_parent_item(struct_def_id);
-
- let stab = ecx.tcx.lookup_stability(ctor_def_id);
- let depr = ecx.tcx.lookup_deprecation(ctor_def_id);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
-
- // indicate that this is a tuple struct ctor, because
- // downstream users will normally want the tuple struct
- // definition, but without this there is no way for them
- // to tell that they actually have a ctor rather than a
- // normal function
- self.rbml_w.wr_tagged_bytes(tag_items_data_item_is_tuple_struct_ctor, &[]);
- }
-}
-
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
- fn encode_generics(&mut self,
- generics: &ty::Generics<'tcx>,
- predicates: &ty::GenericPredicates<'tcx>)
- {
- let ecx = self.ecx();
- self.rbml_w.start_tag(tag_item_generics);
- tyencode::enc_generics(self.rbml_w.writer, &ecx.ty_str_ctxt(), generics);
- self.rbml_w.mark_stable_position();
- self.rbml_w.end_tag();
- self.encode_predicates(predicates, tag_item_predicates);
- }
-
- fn encode_predicates(&mut self,
- predicates: &ty::GenericPredicates<'tcx>,
- tag: usize) {
- self.rbml_w.start_tag(tag);
- if let Some(def_id) = predicates.parent {
- self.rbml_w.wr_tagged_u64(tag_items_data_parent_item, def_to_u64(def_id));
+ let def_id = field.did;
+ let variant_id = tcx.map.as_local_node_id(variant.did).unwrap();
+ let variant_data = tcx.map.expect_variant_data(variant_id);
+
+ Entry {
+ kind: EntryKind::Field,
+ visibility: field.vis.simplify(),
+ def_key: self.encode_def_key(def_id),
+ attributes: self.encode_attributes(&variant_data.fields()[field_index].attrs),
+ children: LazySeq::empty(),
+ stability: self.encode_stability(def_id),
+ deprecation: self.encode_deprecation(def_id),
+
+ ty: Some(self.encode_item_type(def_id)),
+ inherent_impls: LazySeq::empty(),
+ variances: LazySeq::empty(),
+ generics: Some(self.encode_generics(def_id)),
+ predicates: Some(self.encode_predicates(def_id)),
+
+ ast: None,
+ mir: None
}
- for predicate in &predicates.predicates {
- let xref = self.add_xref(XRef::Predicate(predicate.clone()));
- self.rbml_w.wr_tagged_u32(tag_predicate, xref);
- }
- self.rbml_w.end_tag();
}
- fn encode_method_ty_fields(&mut self,
- method_ty: &ty::Method<'tcx>) {
- let ecx = self.ecx();
- encode_def_id_and_key(ecx, self.rbml_w, method_ty.def_id);
- encode_name(self.rbml_w, method_ty.name);
- self.encode_generics(&method_ty.generics, &method_ty.predicates);
- self.encode_visibility(method_ty.vis);
- encode_explicit_self(self.rbml_w, &method_ty.explicit_self);
- match method_ty.explicit_self {
- ty::ExplicitSelfCategory::Static => {
- encode_family(self.rbml_w, STATIC_METHOD_FAMILY);
- }
- _ => encode_family(self.rbml_w, METHOD_FAMILY)
+ fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId))
+ -> Entry<'tcx> {
+ let variant = self.tcx.lookup_adt_def(adt_def_id).struct_variant();
+
+ let data = VariantData {
+ kind: variant.kind,
+ disr: variant.disr_val.to_u64_unchecked(),
+ struct_ctor: Some(def_id.index)
+ };
+
+ Entry {
+ kind: EntryKind::Struct(self.lazy(&data)),
+ visibility: ty::Visibility::Public,
+ def_key: self.encode_def_key(def_id),
+ attributes: LazySeq::empty(),
+ children: LazySeq::empty(),
+ stability: self.encode_stability(def_id),
+ deprecation: self.encode_deprecation(def_id),
+
+ ty: Some(self.encode_item_type(def_id)),
+ inherent_impls: LazySeq::empty(),
+ variances: LazySeq::empty(),
+ generics: Some(self.encode_generics(def_id)),
+ predicates: Some(self.encode_predicates(def_id)),
+
+ ast: None,
+ mir: None
}
}
-}
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
- fn encode_info_for_trait_item(&mut self,
- (trait_def_id, item_def_id, trait_item):
- (DefId, DefId, &hir::TraitItem)) {
- let ecx = self.ecx;
- let tcx = ecx.tcx;
-
- self.encode_parent_item(trait_def_id);
+ fn encode_generics(&mut self, def_id: DefId) -> Lazy<ty::Generics<'tcx>> {
+ let tcx = self.tcx;
+ self.lazy(tcx.lookup_generics(def_id))
+ }
- let stab = tcx.lookup_stability(item_def_id);
- let depr = tcx.lookup_deprecation(item_def_id);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
+ fn encode_predicates(&mut self, def_id: DefId) -> Lazy<ty::GenericPredicates<'tcx>> {
+ let tcx = self.tcx;
+ self.lazy(&tcx.lookup_predicates(def_id))
+ }
- let trait_item_type =
- tcx.impl_or_trait_item(item_def_id);
- let is_nonstatic_method;
- match trait_item_type {
- ty::ConstTraitItem(associated_const) => {
- encode_name(self.rbml_w, associated_const.name);
- encode_def_id_and_key(ecx, self.rbml_w, associated_const.def_id);
- self.encode_visibility(associated_const.vis);
+ fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> {
+ let tcx = self.tcx;
- encode_family(self.rbml_w, 'C');
+ let node_id = tcx.map.as_local_node_id(def_id).unwrap();
+ let ast_item = tcx.map.expect_trait_item(node_id);
+ let trait_item = tcx.impl_or_trait_item(def_id);
- self.encode_bounds_and_type_for_item(
- ecx.local_id(associated_const.def_id));
+ let container = |has_body| if has_body {
+ AssociatedContainer::TraitWithDefault
+ } else {
+ AssociatedContainer::TraitRequired
+ };
- is_nonstatic_method = false;
+ let kind = match trait_item {
+ ty::ConstTraitItem(ref associated_const) => {
+ EntryKind::AssociatedConst(container(associated_const.has_value))
}
- ty::MethodTraitItem(method_ty) => {
- let method_def_id = item_def_id;
-
- self.encode_method_ty_fields(&method_ty);
-
- match method_ty.explicit_self {
- ty::ExplicitSelfCategory::Static => {
- encode_family(self.rbml_w,
- STATIC_METHOD_FAMILY);
- }
- _ => {
- encode_family(self.rbml_w,
- METHOD_FAMILY);
+ ty::MethodTraitItem(ref method_ty) => {
+ let fn_data = if let hir::MethodTraitItem(ref sig, _) = ast_item.node {
+ FnData {
+ constness: hir::Constness::NotConst,
+ arg_names: self.encode_fn_arg_names(&sig.decl)
}
- }
- self.encode_bounds_and_type_for_item(ecx.local_id(method_def_id));
-
- is_nonstatic_method = method_ty.explicit_self !=
- ty::ExplicitSelfCategory::Static;
- }
- ty::TypeTraitItem(associated_type) => {
- encode_name(self.rbml_w, associated_type.name);
- encode_def_id_and_key(ecx, self.rbml_w, associated_type.def_id);
- encode_item_sort(self.rbml_w, 't');
- encode_family(self.rbml_w, 'y');
-
- if let Some(ty) = associated_type.ty {
- self.encode_type(ty);
- }
-
- is_nonstatic_method = false;
- }
- }
-
- encode_attributes(self.rbml_w, &trait_item.attrs);
- match trait_item.node {
- hir::ConstTraitItem(_, ref default) => {
- if default.is_some() {
- encode_item_sort(self.rbml_w, 'C');
} else {
- encode_item_sort(self.rbml_w, 'c');
- }
-
- encode_inlined_item(ecx, self.rbml_w,
- InlinedItemRef::TraitItem(trait_def_id, trait_item));
- self.encode_mir(trait_item.id);
+ bug!()
+ };
+ let data = MethodData {
+ fn_data: fn_data,
+ container: container(method_ty.has_body),
+ explicit_self: self.lazy(&method_ty.explicit_self)
+ };
+ EntryKind::Method(self.lazy(&data))
+ }
+ ty::TypeTraitItem(_) => {
+ EntryKind::AssociatedType(container(false))
}
- hir::MethodTraitItem(ref sig, ref body) => {
- // If this is a static method, we've already
- // encoded self.
- if is_nonstatic_method {
- self.encode_bounds_and_type_for_item(
- ecx.local_id(item_def_id));
+ };
+
+ Entry {
+ kind: kind,
+ visibility: trait_item.vis().simplify(),
+ def_key: self.encode_def_key(def_id),
+ attributes: self.encode_attributes(&ast_item.attrs),
+ children: LazySeq::empty(),
+ stability: self.encode_stability(def_id),
+ deprecation: self.encode_deprecation(def_id),
+
+ ty: match trait_item {
+ ty::ConstTraitItem(_) |
+ ty::MethodTraitItem(_) => {
+ Some(self.encode_item_type(def_id))
}
-
- if body.is_some() {
- encode_item_sort(self.rbml_w, 'p');
- self.encode_mir(trait_item.id);
- } else {
- encode_item_sort(self.rbml_w, 'r');
+ ty::TypeTraitItem(ref associated_type) => {
+ associated_type.ty.map(|ty| self.lazy(&ty))
}
- self.encode_method_argument_names(&sig.decl);
- }
-
- hir::TypeTraitItem(..) => {}
+ },
+ inherent_impls: LazySeq::empty(),
+ variances: LazySeq::empty(),
+ generics: Some(self.encode_generics(def_id)),
+ predicates: Some(self.encode_predicates(def_id)),
+
+ ast: if let ty::ConstTraitItem(_) = trait_item {
+ let trait_def_id = trait_item.container().id();
+ Some(self.encode_inlined_item(InlinedItemRef::TraitItem(trait_def_id, ast_item)))
+ } else {
+ None
+ },
+ mir: self.encode_mir(def_id)
}
}
- fn encode_info_for_impl_item(&mut self,
- (impl_id, impl_item_def_id, ast_item):
- (NodeId, DefId, Option<&hir::ImplItem>)) {
- match self.ecx.tcx.impl_or_trait_item(impl_item_def_id) {
- ty::ConstTraitItem(ref associated_const) => {
- self.encode_info_for_associated_const(&associated_const,
- impl_id,
- ast_item)
+ fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
+ let node_id = self.tcx.map.as_local_node_id(def_id).unwrap();
+ let ast_item = self.tcx.map.expect_impl_item(node_id);
+ let impl_item = self.tcx.impl_or_trait_item(def_id);
+ let impl_def_id = impl_item.container().id();
+
+ let container = match ast_item.defaultness {
+ hir::Defaultness::Default => AssociatedContainer::ImplDefault,
+ hir::Defaultness::Final => AssociatedContainer::ImplFinal
+ };
+
+ let kind = match impl_item {
+ ty::ConstTraitItem(_) => {
+ EntryKind::AssociatedConst(container)
}
- ty::MethodTraitItem(ref method_type) => {
- self.encode_info_for_method(&method_type,
- false,
- impl_id,
- ast_item)
+ ty::MethodTraitItem(ref method_ty) => {
+ let fn_data = if let hir::ImplItemKind::Method(ref sig, _) = ast_item.node {
+ FnData {
+ constness: sig.constness,
+ arg_names: self.encode_fn_arg_names(&sig.decl)
+ }
+ } else {
+ bug!()
+ };
+ let data = MethodData {
+ fn_data: fn_data,
+ container: container,
+ explicit_self: self.lazy(&method_ty.explicit_self)
+ };
+ EntryKind::Method(self.lazy(&data))
}
- ty::TypeTraitItem(ref associated_type) => {
- self.encode_info_for_associated_type(&associated_type,
- impl_id,
- ast_item)
+ ty::TypeTraitItem(_) => {
+ EntryKind::AssociatedType(container)
}
- }
- }
-
- fn encode_info_for_associated_const(&mut self,
- associated_const: &ty::AssociatedConst,
- parent_id: NodeId,
- impl_item_opt: Option<&hir::ImplItem>) {
- let ecx = self.ecx();
- debug!("encode_info_for_associated_const({:?},{:?})",
- associated_const.def_id,
- associated_const.name);
-
- encode_def_id_and_key(ecx, self.rbml_w, associated_const.def_id);
- encode_name(self.rbml_w, associated_const.name);
- self.encode_visibility(associated_const.vis);
- encode_family(self.rbml_w, 'C');
-
- self.encode_parent_item(ecx.tcx.map.local_def_id(parent_id));
- encode_item_sort(self.rbml_w, 'C');
-
- self.encode_bounds_and_type_for_item(ecx.local_id(associated_const.def_id));
-
- let stab = ecx.tcx.lookup_stability(associated_const.def_id);
- let depr = ecx.tcx.lookup_deprecation(associated_const.def_id);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
-
- if let Some(ii) = impl_item_opt {
- encode_attributes(self.rbml_w, &ii.attrs);
- encode_defaultness(self.rbml_w, ii.defaultness);
- encode_inlined_item(ecx,
- self.rbml_w,
- InlinedItemRef::ImplItem(ecx.tcx.map.local_def_id(parent_id),
- ii));
- self.encode_mir(ii.id);
- }
- }
-
- fn encode_info_for_method(&mut self,
- m: &ty::Method<'tcx>,
- is_default_impl: bool,
- parent_id: NodeId,
- impl_item_opt: Option<&hir::ImplItem>) {
- let ecx = self.ecx();
-
- debug!("encode_info_for_method: {:?} {:?}", m.def_id,
- m.name);
- self.encode_method_ty_fields(m);
- self.encode_parent_item(ecx.tcx.map.local_def_id(parent_id));
- encode_item_sort(self.rbml_w, 'r');
-
- let stab = ecx.tcx.lookup_stability(m.def_id);
- let depr = ecx.tcx.lookup_deprecation(m.def_id);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
-
- let m_node_id = ecx.local_id(m.def_id);
- self.encode_bounds_and_type_for_item(m_node_id);
-
- if let Some(impl_item) = impl_item_opt {
- if let hir::ImplItemKind::Method(ref sig, _) = impl_item.node {
- encode_attributes(self.rbml_w, &impl_item.attrs);
- let generics = ecx.tcx.lookup_generics(m.def_id);
- let types = generics.parent_types as usize + generics.types.len();
- let needs_inline = types > 0 || is_default_impl ||
- attr::requests_inline(&impl_item.attrs);
- if sig.constness == hir::Constness::Const {
- encode_inlined_item(
- ecx,
- self.rbml_w,
- InlinedItemRef::ImplItem(ecx.tcx.map.local_def_id(parent_id),
- impl_item));
+ };
+
+ let (ast, mir) = if let ty::ConstTraitItem(_) = impl_item {
+ (true, true)
+ } else if let hir::ImplItemKind::Method(ref sig, _) = ast_item.node {
+ let generics = self.tcx.lookup_generics(def_id);
+ let types = generics.parent_types as usize + generics.types.len();
+ let needs_inline = types > 0 || attr::requests_inline(&ast_item.attrs);
+ let is_const_fn = sig.constness == hir::Constness::Const;
+ (is_const_fn, needs_inline || is_const_fn)
+ } else {
+ (false, false)
+ };
+
+ Entry {
+ kind: kind,
+ visibility: impl_item.vis().simplify(),
+ def_key: self.encode_def_key(def_id),
+ attributes: self.encode_attributes(&ast_item.attrs),
+ children: LazySeq::empty(),
+ stability: self.encode_stability(def_id),
+ deprecation: self.encode_deprecation(def_id),
+
+ ty: match impl_item {
+ ty::ConstTraitItem(_) |
+ ty::MethodTraitItem(_) => {
+ Some(self.encode_item_type(def_id))
}
- if needs_inline || sig.constness == hir::Constness::Const {
- self.encode_mir(impl_item.id);
+ ty::TypeTraitItem(ref associated_type) => {
+ associated_type.ty.map(|ty| self.lazy(&ty))
}
- encode_constness(self.rbml_w, sig.constness);
- encode_defaultness(self.rbml_w, impl_item.defaultness);
- self.encode_method_argument_names(&sig.decl);
+ },
+ inherent_impls: LazySeq::empty(),
+ variances: LazySeq::empty(),
+ generics: Some(self.encode_generics(def_id)),
+ predicates: Some(self.encode_predicates(def_id)),
+
+ ast: if ast {
+ Some(self.encode_inlined_item(InlinedItemRef::ImplItem(impl_def_id, ast_item)))
+ } else {
+ None
+ },
+ mir: if mir {
+ self.encode_mir(def_id)
+ } else {
+ None
}
}
}
- fn encode_info_for_associated_type(&mut self,
- associated_type: &ty::AssociatedType<'tcx>,
- parent_id: NodeId,
- impl_item_opt: Option<&hir::ImplItem>) {
- let ecx = self.ecx();
- debug!("encode_info_for_associated_type({:?},{:?})",
- associated_type.def_id,
- associated_type.name);
-
- encode_def_id_and_key(ecx, self.rbml_w, associated_type.def_id);
- encode_name(self.rbml_w, associated_type.name);
- self.encode_visibility(associated_type.vis);
- encode_family(self.rbml_w, 'y');
- self.encode_parent_item(ecx.tcx.map.local_def_id(parent_id));
- encode_item_sort(self.rbml_w, 't');
-
- let stab = ecx.tcx.lookup_stability(associated_type.def_id);
- let depr = ecx.tcx.lookup_deprecation(associated_type.def_id);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
-
- if let Some(ii) = impl_item_opt {
- encode_attributes(self.rbml_w, &ii.attrs);
- encode_defaultness(self.rbml_w, ii.defaultness);
- }
-
- if let Some(ty) = associated_type.ty {
- self.encode_type(ty);
- }
- }
-}
-
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
- fn encode_method_argument_names(&mut self,
- decl: &hir::FnDecl) {
- self.rbml_w.start_tag(tag_method_argument_names);
- for arg in &decl.inputs {
- let tag = tag_method_argument_name;
+ fn encode_fn_arg_names(&mut self, decl: &hir::FnDecl) -> LazySeq<ast::Name> {
+ self.lazy_seq(decl.inputs.iter().map(|arg| {
if let PatKind::Binding(_, ref path1, _) = arg.pat.node {
- let name = path1.node.as_str();
- self.rbml_w.wr_tagged_bytes(tag, name.as_bytes());
+ path1.node
} else {
- self.rbml_w.wr_tagged_bytes(tag, &[]);
+ syntax::parse::token::intern("")
}
- }
- self.rbml_w.end_tag();
- }
-
- fn encode_repr_attrs(&mut self,
- attrs: &[ast::Attribute]) {
- let ecx = self.ecx();
- let mut repr_attrs = Vec::new();
- for attr in attrs {
- repr_attrs.extend(attr::find_repr_attrs(ecx.tcx.sess.diagnostic(),
- attr));
- }
- self.rbml_w.start_tag(tag_items_data_item_repr);
- repr_attrs.encode(self.rbml_w);
- self.rbml_w.end_tag();
+ }))
}
- fn encode_mir(&mut self, node_id: NodeId) {
- let ecx = self.ecx();
- let def_id = ecx.tcx.map.local_def_id(node_id);
- if let Some(mir) = ecx.mir_map.map.get(&def_id) {
- self.rbml_w.start_tag(tag_mir as usize);
- self.rbml_w.emit_opaque(|opaque_encoder| {
- tls::enter_encoding_context(ecx, opaque_encoder, |_, opaque_encoder| {
- Encodable::encode(mir, opaque_encoder)
- })
- }).unwrap();
- self.rbml_w.end_tag();
- }
+ fn encode_mir(&mut self, def_id: DefId) -> Option<Lazy<mir::repr::Mir<'tcx>>> {
+ self.mir_map.map.get(&def_id).map(|mir| self.lazy(mir))
}
-}
-const FN_FAMILY: char = 'f';
-const STATIC_METHOD_FAMILY: char = 'F';
-const METHOD_FAMILY: char = 'h';
-
-// Encodes the inherent implementations of a structure, enumeration, or trait.
-fn encode_inherent_implementations(ecx: &EncodeContext,
- rbml_w: &mut Encoder,
- def_id: DefId) {
- match ecx.tcx.inherent_impls.borrow().get(&def_id) {
- None => {}
- Some(implementations) => {
- for &impl_def_id in implementations.iter() {
- rbml_w.start_tag(tag_items_data_item_inherent_impl);
- encode_def_id(rbml_w, impl_def_id);
- rbml_w.end_tag();
+ // Encodes the inherent implementations of a structure, enumeration, or trait.
+ fn encode_inherent_implementations(&mut self, def_id: DefId) -> LazySeq<DefIndex> {
+ match self.tcx.inherent_impls.borrow().get(&def_id) {
+ None => LazySeq::empty(),
+ Some(implementations) => {
+ self.lazy_seq(implementations.iter().map(|&def_id| {
+ assert!(def_id.is_local());
+ def_id.index
+ }))
}
}
}
-}
-
-fn encode_stability(rbml_w: &mut Encoder, stab_opt: Option<&attr::Stability>) {
- stab_opt.map(|stab| {
- rbml_w.start_tag(tag_items_data_item_stability);
- stab.encode(rbml_w).unwrap();
- rbml_w.end_tag();
- });
-}
-
-fn encode_deprecation(rbml_w: &mut Encoder, depr_opt: Option<attr::Deprecation>) {
- depr_opt.map(|depr| {
- rbml_w.start_tag(tag_items_data_item_deprecation);
- depr.encode(rbml_w).unwrap();
- rbml_w.end_tag();
- });
-}
-fn encode_parent_impl(rbml_w: &mut Encoder, parent_opt: Option<DefId>) {
- parent_opt.map(|parent| {
- rbml_w.wr_tagged_u64(tag_items_data_parent_impl, def_to_u64(parent));
- });
-}
-
-fn encode_xrefs<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
- rbml_w: &mut Encoder,
- xrefs: FnvHashMap<XRef<'tcx>, u32>)
-{
- let mut xref_positions = vec![0; xrefs.len()];
-
- // Encode XRefs sorted by their ID
- let mut sorted_xrefs: Vec<_> = xrefs.into_iter().collect();
- sorted_xrefs.sort_by_key(|&(_, id)| id);
-
- rbml_w.start_tag(tag_xref_data);
- for (xref, id) in sorted_xrefs.into_iter() {
- xref_positions[id as usize] = rbml_w.mark_stable_position() as u32;
- match xref {
- XRef::Predicate(p) => {
- tyencode::enc_predicate(rbml_w.writer, &ecx.ty_str_ctxt(), &p)
- }
- }
+ fn encode_stability(&mut self, def_id: DefId) -> Option<Lazy<attr::Stability>> {
+ self.tcx.lookup_stability(def_id).map(|stab| self.lazy(stab))
}
- rbml_w.mark_stable_position();
- rbml_w.end_tag();
- rbml_w.start_tag(tag_xref_index);
- index::write_dense_index(xref_positions, rbml_w.writer);
- rbml_w.end_tag();
-}
+ fn encode_deprecation(&mut self, def_id: DefId) -> Option<Lazy<attr::Deprecation>> {
+ self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(&depr))
+ }
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
fn encode_info_for_item(&mut self,
- (def_id, item): (DefId, &hir::Item)) {
- let ecx = self.ecx();
- let tcx = ecx.tcx;
+ (def_id, item): (DefId, &hir::Item)) -> Entry<'tcx> {
+ let tcx = self.tcx;
debug!("encoding info for item at {}",
tcx.sess.codemap().span_to_string(item.span));
- let vis = &item.vis;
-
- let (stab, depr) = tcx.dep_graph.with_task(DepNode::MetaData(def_id), || {
- (tcx.lookup_stability(ecx.tcx.map.local_def_id(item.id)),
- tcx.lookup_deprecation(ecx.tcx.map.local_def_id(item.id)))
- });
+ let kind = match item.node {
+ hir::ItemStatic(_, hir::MutMutable, _) => EntryKind::MutStatic,
+ hir::ItemStatic(_, hir::MutImmutable, _) => EntryKind::ImmStatic,
+ hir::ItemConst(..) => EntryKind::Const,
+ hir::ItemFn(ref decl, _, constness, ..) => {
+ let data = FnData {
+ constness: constness,
+ arg_names: self.encode_fn_arg_names(&decl)
+ };
- match item.node {
- hir::ItemStatic(_, m, _) => {
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- if m == hir::MutMutable {
- encode_family(self.rbml_w, 'b');
- } else {
- encode_family(self.rbml_w, 'c');
- }
- self.encode_bounds_and_type_for_item(item.id);
- encode_name(self.rbml_w, item.name);
- self.encode_visibility(vis);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
- encode_attributes(self.rbml_w, &item.attrs);
- }
- hir::ItemConst(..) => {
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_family(self.rbml_w, 'C');
- self.encode_bounds_and_type_for_item(item.id);
- encode_name(self.rbml_w, item.name);
- encode_attributes(self.rbml_w, &item.attrs);
- encode_inlined_item(ecx, self.rbml_w, InlinedItemRef::Item(def_id, item));
- self.encode_mir(item.id);
- self.encode_visibility(vis);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
- }
- hir::ItemFn(ref decl, _, constness, _, ref generics, _) => {
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_family(self.rbml_w, FN_FAMILY);
- let tps_len = generics.ty_params.len();
- self.encode_bounds_and_type_for_item(item.id);
- encode_name(self.rbml_w, item.name);
- encode_attributes(self.rbml_w, &item.attrs);
- let needs_inline = tps_len > 0 || attr::requests_inline(&item.attrs);
- if constness == hir::Constness::Const {
- encode_inlined_item(ecx, self.rbml_w, InlinedItemRef::Item(def_id, item));
- }
- if needs_inline || constness == hir::Constness::Const {
- self.encode_mir(item.id);
- }
- encode_constness(self.rbml_w, constness);
- self.encode_visibility(vis);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
- self.encode_method_argument_names(&decl);
+ EntryKind::Fn(self.lazy(&data))
}
hir::ItemMod(ref m) => {
- self.encode_info_for_mod(FromId(item.id, (m, &item.attrs, item.name, &item.vis)));
- }
- hir::ItemForeignMod(ref fm) => {
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_family(self.rbml_w, 'n');
- encode_name(self.rbml_w, item.name);
-
- // Encode all the items in self module.
- for foreign_item in &fm.items {
- self.rbml_w.wr_tagged_u64(
- tag_mod_child,
- def_to_u64(ecx.tcx.map.local_def_id(foreign_item.id)));
- }
- self.encode_visibility(vis);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
- }
- hir::ItemTy(..) => {
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_family(self.rbml_w, 'y');
- self.encode_bounds_and_type_for_item(item.id);
- encode_name(self.rbml_w, item.name);
- self.encode_visibility(vis);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
- }
- hir::ItemEnum(ref enum_definition, _) => {
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_family(self.rbml_w, 't');
- encode_item_variances(self.rbml_w, ecx, item.id);
- self.encode_bounds_and_type_for_item(item.id);
- encode_name(self.rbml_w, item.name);
- encode_attributes(self.rbml_w, &item.attrs);
- self.encode_repr_attrs(&item.attrs);
- for v in &enum_definition.variants {
- encode_variant_id(self.rbml_w, ecx.tcx.map.local_def_id(v.node.data.id()));
- }
-
- // Encode inherent implementations for self enumeration.
- encode_inherent_implementations(ecx, self.rbml_w, def_id);
-
- self.encode_visibility(vis);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
+ return self.encode_info_for_mod(FromId(item.id, (m, &item.attrs, &item.vis)));
}
+ hir::ItemForeignMod(_) => EntryKind::ForeignMod,
+ hir::ItemTy(..) => EntryKind::Type,
+ hir::ItemEnum(..) => EntryKind::Enum,
hir::ItemStruct(ref struct_def, _) => {
- /* Index the class*/
- let def = ecx.tcx.lookup_adt_def(def_id);
- let variant = def.struct_variant();
-
- /* Now, make an item for the class itself */
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_family(self.rbml_w, match *struct_def {
- hir::VariantData::Struct(..) => 'S',
- hir::VariantData::Tuple(..) => 's',
- hir::VariantData::Unit(..) => 'u',
- });
- self.encode_bounds_and_type_for_item(item.id);
-
- encode_item_variances(self.rbml_w, ecx, item.id);
- encode_name(self.rbml_w, item.name);
- encode_attributes(self.rbml_w, &item.attrs);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
- self.encode_visibility(vis);
- self.encode_repr_attrs(&item.attrs);
+ let variant = tcx.lookup_adt_def(def_id).struct_variant();
/* Encode def_ids for each field and method
for methods, write all the stuff get_trait_method
needs to know*/
- self.encode_struct_fields(variant);
-
- // Encode inherent implementations for self structure.
- encode_inherent_implementations(ecx, self.rbml_w, def_id);
-
- if !struct_def.is_struct() {
- let ctor_did = ecx.tcx.map.local_def_id(struct_def.id());
- self.rbml_w.wr_tagged_u64(tag_items_data_item_struct_ctor,
- def_to_u64(ctor_did));
- }
+ let struct_ctor = if !struct_def.is_struct() {
+ Some(tcx.map.local_def_id(struct_def.id()).index)
+ } else {
+ None
+ };
+ EntryKind::Struct(self.lazy(&VariantData {
+ kind: variant.kind,
+ disr: variant.disr_val.to_u64_unchecked(),
+ struct_ctor: struct_ctor
+ }))
}
hir::ItemUnion(..) => {
- let def = ecx.tcx.lookup_adt_def(def_id);
- let variant = def.struct_variant();
-
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_family(self.rbml_w, 'U');
- self.encode_bounds_and_type_for_item(item.id);
-
- encode_item_variances(self.rbml_w, ecx, item.id);
- encode_name(self.rbml_w, item.name);
- encode_attributes(self.rbml_w, &item.attrs);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
- self.encode_visibility(vis);
- self.encode_repr_attrs(&item.attrs);
-
- /* Encode def_ids for each field and method
- for methods, write all the stuff get_trait_method
- needs to know*/
- self.encode_struct_fields(variant);
-
- encode_inlined_item(ecx, self.rbml_w, InlinedItemRef::Item(def_id, item));
- self.encode_mir(item.id);
+ let variant = tcx.lookup_adt_def(def_id).struct_variant();
- // Encode inherent implementations for self union.
- encode_inherent_implementations(ecx, self.rbml_w, def_id);
+ EntryKind::Union(self.lazy(&VariantData {
+ kind: variant.kind,
+ disr: variant.disr_val.to_u64_unchecked(),
+ struct_ctor: None
+ }))
}
- hir::ItemDefaultImpl(unsafety, _) => {
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_family(self.rbml_w, 'd');
- encode_name(self.rbml_w, item.name);
- encode_unsafety(self.rbml_w, unsafety);
-
- let trait_ref = tcx.impl_trait_ref(ecx.tcx.map.local_def_id(item.id)).unwrap();
- encode_trait_ref(self.rbml_w, ecx, trait_ref, tag_item_trait_ref);
- }
- hir::ItemImpl(unsafety, polarity, ..) => {
- // We need to encode information about the default methods we
- // have inherited, so we drive self based on the impl structure.
- let impl_items = tcx.impl_items.borrow();
- let items = &impl_items[&def_id];
-
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_family(self.rbml_w, 'i');
- self.encode_bounds_and_type_for_item(item.id);
- encode_name(self.rbml_w, item.name);
- encode_attributes(self.rbml_w, &item.attrs);
- encode_unsafety(self.rbml_w, unsafety);
- encode_polarity(self.rbml_w, polarity);
-
- match
- tcx.custom_coerce_unsized_kinds
- .borrow()
- .get(&ecx.tcx.map.local_def_id(item.id))
- {
- Some(&kind) => {
- self.rbml_w.start_tag(tag_impl_coerce_unsized_kind);
- kind.encode(self.rbml_w);
- self.rbml_w.end_tag();
- }
- None => {}
- }
+ hir::ItemDefaultImpl(..) => {
+ let data = ImplData {
+ polarity: hir::ImplPolarity::Positive,
+ parent_impl: None,
+ coerce_unsized_kind: None,
+ trait_ref: tcx.impl_trait_ref(def_id).map(|trait_ref| self.lazy(&trait_ref))
+ };
- for &item_def_id in items {
- self.rbml_w.start_tag(tag_item_impl_item);
- match item_def_id {
- ty::ConstTraitItemId(item_def_id) => {
- encode_def_id(self.rbml_w, item_def_id);
- encode_item_sort(self.rbml_w, 'C');
- }
- ty::MethodTraitItemId(item_def_id) => {
- encode_def_id(self.rbml_w, item_def_id);
- encode_item_sort(self.rbml_w, 'r');
- }
- ty::TypeTraitItemId(item_def_id) => {
- encode_def_id(self.rbml_w, item_def_id);
- encode_item_sort(self.rbml_w, 't');
+ EntryKind::DefaultImpl(self.lazy(&data))
+ }
+ hir::ItemImpl(_, polarity, ..) => {
+ let trait_ref = tcx.impl_trait_ref(def_id);
+ let parent = if let Some(trait_ref) = trait_ref {
+ let trait_def = tcx.lookup_trait_def(trait_ref.def_id);
+ trait_def.ancestors(def_id).skip(1).next().and_then(|node| {
+ match node {
+ specialization_graph::Node::Impl(parent) => Some(parent),
+ _ => None,
}
- }
- self.rbml_w.end_tag();
- }
+ })
+ } else {
+ None
+ };
- let did = ecx.tcx.map.local_def_id(item.id);
- if let Some(trait_ref) = tcx.impl_trait_ref(did) {
- encode_trait_ref(self.rbml_w, ecx, trait_ref, tag_item_trait_ref);
+ let data = ImplData {
+ polarity: polarity,
+ parent_impl: parent,
+ coerce_unsized_kind: tcx.custom_coerce_unsized_kinds.borrow()
+ .get(&def_id).cloned(),
+ trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref))
+ };
- let trait_def = tcx.lookup_trait_def(trait_ref.def_id);
- let parent = trait_def.ancestors(did)
- .skip(1)
- .next()
- .and_then(|node| match node {
- specialization_graph::Node::Impl(parent) =>
- Some(parent),
- _ => None,
- });
- encode_parent_impl(self.rbml_w, parent);
- }
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
+ EntryKind::Impl(self.lazy(&data))
}
hir::ItemTrait(..) => {
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_family(self.rbml_w, 'I');
- encode_item_variances(self.rbml_w, ecx, item.id);
let trait_def = tcx.lookup_trait_def(def_id);
- let trait_predicates = tcx.lookup_predicates(def_id);
- encode_unsafety(self.rbml_w, trait_def.unsafety);
- encode_paren_sugar(self.rbml_w, trait_def.paren_sugar);
- encode_defaulted(self.rbml_w, tcx.trait_has_default_impl(def_id));
- encode_associated_type_names(self.rbml_w, &trait_def.associated_type_names);
- self.encode_generics(&trait_def.generics, &trait_predicates);
- self.encode_predicates(&tcx.lookup_super_predicates(def_id),
- tag_item_super_predicates);
- encode_trait_ref(self.rbml_w, ecx, trait_def.trait_ref, tag_item_trait_ref);
- encode_name(self.rbml_w, item.name);
- encode_attributes(self.rbml_w, &item.attrs);
- self.encode_visibility(vis);
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
- for &method_def_id in tcx.trait_item_def_ids(def_id).iter() {
- self.rbml_w.start_tag(tag_item_trait_item);
- match method_def_id {
- ty::ConstTraitItemId(const_def_id) => {
- encode_def_id(self.rbml_w, const_def_id);
- encode_item_sort(self.rbml_w, 'C');
- }
- ty::MethodTraitItemId(method_def_id) => {
- encode_def_id(self.rbml_w, method_def_id);
- encode_item_sort(self.rbml_w, 'r');
- }
- ty::TypeTraitItemId(type_def_id) => {
- encode_def_id(self.rbml_w, type_def_id);
- encode_item_sort(self.rbml_w, 't');
- }
- }
- self.rbml_w.end_tag();
-
- self.rbml_w.wr_tagged_u64(tag_mod_child,
- def_to_u64(method_def_id.def_id()));
- }
+ let data = TraitData {
+ unsafety: trait_def.unsafety,
+ paren_sugar: trait_def.paren_sugar,
+ has_default_impl: tcx.trait_has_default_impl(def_id),
+ trait_ref: self.lazy(&trait_def.trait_ref),
+ super_predicates: self.lazy(&tcx.lookup_super_predicates(def_id))
+ };
- // Encode inherent implementations for self trait.
- encode_inherent_implementations(ecx, self.rbml_w, def_id);
+ EntryKind::Trait(self.lazy(&data))
}
hir::ItemExternCrate(_) | hir::ItemUse(_) => {
bug!("cannot encode info for item {:?}", item)
}
+ };
+
+ Entry {
+ kind: kind,
+ visibility: item.vis.simplify(),
+ def_key: self.encode_def_key(def_id),
+ attributes: self.encode_attributes(&item.attrs),
+ children: match item.node {
+ hir::ItemForeignMod(ref fm) => {
+ self.lazy_seq(fm.items.iter().map(|foreign_item| {
+ tcx.map.local_def_id(foreign_item.id).index
+ }))
+ }
+ hir::ItemEnum(..) => {
+ let def = self.tcx.lookup_adt_def(def_id);
+ self.lazy_seq(def.variants.iter().map(|v| {
+ assert!(v.did.is_local());
+ v.did.index
+ }))
+ }
+ hir::ItemStruct(..) |
+ hir::ItemUnion(..) => {
+ let def = self.tcx.lookup_adt_def(def_id);
+ self.lazy_seq(def.struct_variant().fields.iter().map(|f| {
+ assert!(f.did.is_local());
+ f.did.index
+ }))
+ }
+ hir::ItemImpl(..) |
+ hir::ItemTrait(..) => {
+ self.lazy_seq(tcx.impl_or_trait_items(def_id).iter().map(|&def_id| {
+ assert!(def_id.is_local());
+ def_id.index
+ }))
+ }
+ _ => LazySeq::empty()
+ },
+ stability: self.encode_stability(def_id),
+ deprecation: self.encode_deprecation(def_id),
+
+ ty: match item.node {
+ hir::ItemStatic(..) |
+ hir::ItemConst(..) |
+ hir::ItemFn(..) |
+ hir::ItemTy(..) |
+ hir::ItemEnum(..) |
+ hir::ItemStruct(..) |
+ hir::ItemUnion(..) |
+ hir::ItemImpl(..) => {
+ Some(self.encode_item_type(def_id))
+ }
+ _ => None
+ },
+ inherent_impls: self.encode_inherent_implementations(def_id),
+ variances: match item.node {
+ hir::ItemEnum(..) |
+ hir::ItemStruct(..) |
+ hir::ItemUnion(..) |
+ hir::ItemTrait(..) => {
+ self.encode_item_variances(def_id)
+ }
+ _ => LazySeq::empty()
+ },
+ generics: match item.node {
+ hir::ItemStatic(..) |
+ hir::ItemConst(..) |
+ hir::ItemFn(..) |
+ hir::ItemTy(..) |
+ hir::ItemEnum(..) |
+ hir::ItemStruct(..) |
+ hir::ItemUnion(..) |
+ hir::ItemImpl(..) |
+ hir::ItemTrait(..) => {
+ Some(self.encode_generics(def_id))
+ }
+ _ => None
+ },
+ predicates: match item.node {
+ hir::ItemStatic(..) |
+ hir::ItemConst(..) |
+ hir::ItemFn(..) |
+ hir::ItemTy(..) |
+ hir::ItemEnum(..) |
+ hir::ItemStruct(..) |
+ hir::ItemUnion(..) |
+ hir::ItemImpl(..) |
+ hir::ItemTrait(..) => {
+ Some(self.encode_predicates(def_id))
+ }
+ _ => None
+ },
+
+ ast: match item.node {
+ hir::ItemConst(..) |
+ hir::ItemFn(_, _, hir::Constness::Const, ..) => {
+ Some(self.encode_inlined_item(InlinedItemRef::Item(def_id, item)))
+ }
+ _ => None
+ },
+ mir: match item.node {
+ hir::ItemConst(..) => {
+ self.encode_mir(def_id)
+ }
+ hir::ItemFn(_, _, constness, _, ref generics, _) => {
+ let tps_len = generics.ty_params.len();
+ let needs_inline = tps_len > 0 || attr::requests_inline(&item.attrs);
+ if needs_inline || constness == hir::Constness::Const {
+ self.encode_mir(def_id)
+ } else {
+ None
+ }
+ }
+ _ => None
+ }
}
}
}
-impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> {
+impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
/// In some cases, along with the item itself, we also
/// encode some sub-items. Usually we want some info from the item
/// so it's easier to do that here then to wait until we would encounter
/// normally in the visitor walk.
fn encode_addl_info_for_item(&mut self,
item: &hir::Item) {
- let def_id = self.ecx().tcx.map.local_def_id(item.id);
+ let def_id = self.tcx.map.local_def_id(item.id);
match item.node {
hir::ItemStatic(..) |
hir::ItemConst(..) |
// no sub-item recording needed in these cases
}
hir::ItemEnum(..) => {
- self.encode_enum_variant_infos(def_id);
+ self.encode_fields(def_id);
+
+ let def = self.tcx.lookup_adt_def(def_id);
+ for (i, variant) in def.variants.iter().enumerate() {
+ self.record(variant.did,
+ EncodeContext::encode_enum_variant_info,
+ (def_id, Untracked(i)));
+ }
}
hir::ItemStruct(ref struct_def, _) => {
- self.encode_addl_struct_info(def_id, struct_def.id(), item);
+ self.encode_fields(def_id);
+
+ // If this is a tuple-like struct, encode the type of the constructor.
+ match self.tcx.lookup_adt_def(def_id).struct_variant().kind {
+ ty::VariantKind::Struct => {
+ // no value for structs like struct Foo { ... }
+ }
+ ty::VariantKind::Tuple | ty::VariantKind::Unit => {
+ // there is a value for structs like `struct
+ // Foo()` and `struct Foo`
+ let ctor_def_id = self.tcx.map.local_def_id(struct_def.id());
+ self.record(ctor_def_id,
+ EncodeContext::encode_struct_ctor,
+ (def_id, ctor_def_id));
+ }
+ }
}
hir::ItemUnion(..) => {
- self.encode_addl_union_info(def_id);
- }
- hir::ItemImpl(.., ref ast_items) => {
- self.encode_addl_impl_info(def_id, item.id, ast_items);
+ self.encode_fields(def_id);
}
- hir::ItemTrait(.., ref trait_items) => {
- self.encode_addl_trait_info(def_id, trait_items);
- }
- }
- }
-
- fn encode_addl_struct_info(&mut self,
- def_id: DefId,
- struct_node_id: ast::NodeId,
- item: &hir::Item) {
- let ecx = self.ecx();
- let def = ecx.tcx.lookup_adt_def(def_id);
- let variant = def.struct_variant();
-
- self.encode_fields(def_id);
-
- // If this is a tuple-like struct, encode the type of the constructor.
- match variant.kind {
- ty::VariantKind::Struct => {
- // no value for structs like struct Foo { ... }
+ hir::ItemImpl(..) => {
+ for &trait_item_def_id in &self.tcx.impl_or_trait_items(def_id)[..] {
+ self.record(trait_item_def_id,
+ EncodeContext::encode_info_for_impl_item,
+ trait_item_def_id);
+ }
}
- ty::VariantKind::Tuple | ty::VariantKind::Unit => {
- // there is a value for structs like `struct
- // Foo()` and `struct Foo`
- let ctor_def_id = ecx.tcx.map.local_def_id(struct_node_id);
- self.record(ctor_def_id,
- ItemContentBuilder::encode_struct_ctor,
- (def_id, item.id, struct_node_id));
+ hir::ItemTrait(..) => {
+ for &item_def_id in &self.tcx.impl_or_trait_items(def_id)[..] {
+ self.record(item_def_id,
+ EncodeContext::encode_info_for_trait_item,
+ item_def_id);
+ }
}
}
}
-
- fn encode_addl_union_info(&mut self, def_id: DefId) {
- self.encode_fields(def_id);
- }
-
- fn encode_addl_impl_info(&mut self,
- def_id: DefId,
- impl_id: ast::NodeId,
- ast_items: &[hir::ImplItem]) {
- let ecx = self.ecx();
- let impl_items = ecx.tcx.impl_items.borrow();
- let items = &impl_items[&def_id];
-
- // Iterate down the trait items, emitting them. We rely on the
- // assumption that all of the actually implemented trait items
- // appear first in the impl structure, in the same order they do
- // in the ast. This is a little sketchy.
- let num_implemented_methods = ast_items.len();
- for (i, &trait_item_def_id) in items.iter().enumerate() {
- let ast_item = if i < num_implemented_methods {
- Some(&ast_items[i])
- } else {
- None
- };
-
- let trait_item_def_id = trait_item_def_id.def_id();
- self.record(trait_item_def_id,
- ItemContentBuilder::encode_info_for_impl_item,
- (impl_id, trait_item_def_id, ast_item));
- }
- }
-
- fn encode_addl_trait_info(&mut self,
- def_id: DefId,
- trait_items: &[hir::TraitItem]) {
- // Now output the trait item info for each trait item.
- let tcx = self.ecx().tcx;
- let r = tcx.trait_item_def_ids(def_id);
- for (item_def_id, trait_item) in r.iter().zip(trait_items) {
- let item_def_id = item_def_id.def_id();
- assert!(item_def_id.is_local());
- self.record(item_def_id,
- ItemContentBuilder::encode_info_for_trait_item,
- (def_id, item_def_id, trait_item));
- }
- }
}
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
+impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
fn encode_info_for_foreign_item(&mut self,
- (def_id, nitem): (DefId, &hir::ForeignItem)) {
- let ecx = self.ecx();
+ (def_id, nitem): (DefId, &hir::ForeignItem))
+ -> Entry<'tcx> {
+ let tcx = self.tcx;
- debug!("writing foreign item {}", ecx.tcx.node_path_str(nitem.id));
+ debug!("writing foreign item {}", tcx.node_path_str(nitem.id));
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- let parent_id = ecx.tcx.map.get_parent(nitem.id);
- self.encode_parent_item(ecx.tcx.map.local_def_id(parent_id));
- self.encode_visibility(&nitem.vis);
- match nitem.node {
+ let kind = match nitem.node {
hir::ForeignItemFn(ref fndecl, _) => {
- encode_family(self.rbml_w, FN_FAMILY);
- self.encode_bounds_and_type_for_item(nitem.id);
- encode_name(self.rbml_w, nitem.name);
- encode_attributes(self.rbml_w, &nitem.attrs);
- let stab = ecx.tcx.lookup_stability(ecx.tcx.map.local_def_id(nitem.id));
- let depr = ecx.tcx.lookup_deprecation(ecx.tcx.map.local_def_id(nitem.id));
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
- self.encode_method_argument_names(&fndecl);
- }
- hir::ForeignItemStatic(_, mutbl) => {
- if mutbl {
- encode_family(self.rbml_w, 'b');
- } else {
- encode_family(self.rbml_w, 'c');
- }
- self.encode_bounds_and_type_for_item(nitem.id);
- encode_attributes(self.rbml_w, &nitem.attrs);
- let stab = ecx.tcx.lookup_stability(ecx.tcx.map.local_def_id(nitem.id));
- let depr = ecx.tcx.lookup_deprecation(ecx.tcx.map.local_def_id(nitem.id));
- encode_stability(self.rbml_w, stab);
- encode_deprecation(self.rbml_w, depr);
- encode_name(self.rbml_w, nitem.name);
+ let data = FnData {
+ constness: hir::Constness::NotConst,
+ arg_names: self.encode_fn_arg_names(&fndecl)
+ };
+ EntryKind::ForeignFn(self.lazy(&data))
}
+ hir::ForeignItemStatic(_, true) => EntryKind::ForeignMutStatic,
+ hir::ForeignItemStatic(_, false) => EntryKind::ForeignImmStatic
+ };
+
+ Entry {
+ kind: kind,
+ visibility: nitem.vis.simplify(),
+ def_key: self.encode_def_key(def_id),
+ attributes: self.encode_attributes(&nitem.attrs),
+ children: LazySeq::empty(),
+ stability: self.encode_stability(def_id),
+ deprecation: self.encode_deprecation(def_id),
+
+ ty: Some(self.encode_item_type(def_id)),
+ inherent_impls: LazySeq::empty(),
+ variances: LazySeq::empty(),
+ generics: Some(self.encode_generics(def_id)),
+ predicates: Some(self.encode_predicates(def_id)),
+
+ ast: None,
+ mir: None
}
}
}
-struct EncodeVisitor<'a, 'ecx: 'a, 'tcx: 'ecx, 'encoder: 'ecx> {
- index: &'a mut IndexBuilder<'ecx, 'tcx, 'encoder>,
+struct EncodeVisitor<'a, 'b: 'a, 'tcx: 'b> {
+ index: IndexBuilder<'a, 'b, 'tcx>,
}
-impl<'a, 'ecx, 'tcx, 'encoder> Visitor<'tcx> for EncodeVisitor<'a, 'ecx, 'tcx, 'encoder> {
+impl<'a, 'b, 'tcx> Visitor<'tcx> for EncodeVisitor<'a, 'b, 'tcx> {
fn visit_expr(&mut self, ex: &'tcx hir::Expr) {
intravisit::walk_expr(self, ex);
self.index.encode_info_for_expr(ex);
}
fn visit_item(&mut self, item: &'tcx hir::Item) {
intravisit::walk_item(self, item);
- let def_id = self.index.ecx().tcx.map.local_def_id(item.id);
+ let def_id = self.index.tcx.map.local_def_id(item.id);
match item.node {
hir::ItemExternCrate(_) | hir::ItemUse(_) => (), // ignore these
_ => self.index.record(def_id,
- ItemContentBuilder::encode_info_for_item,
+ EncodeContext::encode_info_for_item,
(def_id, item)),
}
self.index.encode_addl_info_for_item(item);
}
fn visit_foreign_item(&mut self, ni: &'tcx hir::ForeignItem) {
intravisit::walk_foreign_item(self, ni);
- let def_id = self.index.ecx().tcx.map.local_def_id(ni.id);
+ let def_id = self.index.tcx.map.local_def_id(ni.id);
self.index.record(def_id,
- ItemContentBuilder::encode_info_for_foreign_item,
+ EncodeContext::encode_info_for_foreign_item,
(def_id, ni));
}
fn visit_ty(&mut self, ty: &'tcx hir::Ty) {
}
}
-impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> {
+impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
fn encode_info_for_ty(&mut self, ty: &hir::Ty) {
- let ecx = self.ecx();
if let hir::TyImplTrait(_) = ty.node {
- let def_id = ecx.tcx.map.local_def_id(ty.id);
+ let def_id = self.tcx.map.local_def_id(ty.id);
self.record(def_id,
- ItemContentBuilder::encode_info_for_anon_ty,
- (def_id, ty.id));
+ EncodeContext::encode_info_for_anon_ty,
+ def_id);
}
}
fn encode_info_for_expr(&mut self, expr: &hir::Expr) {
- let ecx = self.ecx();
-
match expr.node {
hir::ExprClosure(..) => {
- let def_id = ecx.tcx.map.local_def_id(expr.id);
+ let def_id = self.tcx.map.local_def_id(expr.id);
self.record(def_id,
- ItemContentBuilder::encode_info_for_closure,
- (def_id, expr.id));
+ EncodeContext::encode_info_for_closure,
+ def_id);
}
_ => { }
}
}
}
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
- fn encode_info_for_anon_ty(&mut self, (def_id, ty_id): (DefId, NodeId)) {
- let ecx = self.ecx;
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_family(self.rbml_w, 'y');
- self.encode_bounds_and_type_for_item(ty_id);
- }
-
- fn encode_info_for_closure(&mut self, (def_id, expr_id): (DefId, NodeId)) {
- let ecx = self.ecx;
- encode_def_id_and_key(ecx, self.rbml_w, def_id);
- encode_name(self.rbml_w, syntax::parse::token::intern("<closure>"));
-
- self.rbml_w.start_tag(tag_items_closure_ty);
- write_closure_type(ecx,
- self.rbml_w,
- &ecx.tcx.tables.borrow().closure_tys[&def_id]);
- self.rbml_w.end_tag();
+impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
+ fn encode_info_for_anon_ty(&mut self, def_id: DefId) -> Entry<'tcx> {
+ Entry {
+ kind: EntryKind::Type,
+ visibility: ty::Visibility::Public,
+ def_key: self.encode_def_key(def_id),
+ attributes: LazySeq::empty(),
+ children: LazySeq::empty(),
+ stability: None,
+ deprecation: None,
- self.rbml_w.start_tag(tag_items_closure_kind);
- ecx.tcx.closure_kind(def_id).encode(self.rbml_w).unwrap();
- self.rbml_w.end_tag();
+ ty: Some(self.encode_item_type(def_id)),
+ inherent_impls: LazySeq::empty(),
+ variances: LazySeq::empty(),
+ generics: Some(self.encode_generics(def_id)),
+ predicates: Some(self.encode_predicates(def_id)),
- assert!(ecx.mir_map.map.contains_key(&def_id));
- self.encode_mir(expr_id);
+ ast: None,
+ mir: None
+ }
}
-}
-
-fn encode_info_for_items<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
- rbml_w: &mut Encoder)
- -> (IndexData, FnvHashMap<XRef<'tcx>, u32>) {
- let krate = ecx.tcx.map.krate();
- rbml_w.start_tag(tag_items_data);
+ fn encode_info_for_closure(&mut self, def_id: DefId) -> Entry<'tcx> {
+ let tcx = self.tcx;
- let fields = {
- let mut index = IndexBuilder::new(ecx, rbml_w);
- index.record(DefId::local(CRATE_DEF_INDEX),
- ItemContentBuilder::encode_info_for_mod,
- FromId(CRATE_NODE_ID, (&krate.module,
- &[],
- syntax::parse::token::intern(&ecx.link_meta.crate_name),
- &hir::Public)));
- krate.visit_all_items(&mut EncodeVisitor {
- index: &mut index,
- });
- index.into_fields()
- };
+ let data = ClosureData {
+ kind: tcx.closure_kind(def_id),
+ ty: self.lazy(&tcx.tables.borrow().closure_tys[&def_id])
+ };
- rbml_w.end_tag();
+ Entry {
+ kind: EntryKind::Closure(self.lazy(&data)),
+ visibility: ty::Visibility::Public,
+ def_key: self.encode_def_key(def_id),
+ attributes: self.encode_attributes(&tcx.get_attrs(def_id)),
+ children: LazySeq::empty(),
+ stability: None,
+ deprecation: None,
- fields
-}
+ ty: None,
+ inherent_impls: LazySeq::empty(),
+ variances: LazySeq::empty(),
+ generics: None,
+ predicates: None,
-fn encode_item_index(rbml_w: &mut Encoder, index: IndexData) {
- rbml_w.start_tag(tag_index);
- index.write_index(rbml_w.writer);
- rbml_w.end_tag();
-}
-
-fn encode_attributes(rbml_w: &mut Encoder, attrs: &[ast::Attribute]) {
- rbml_w.start_tag(tag_attributes);
- rbml_w.emit_opaque(|opaque_encoder| {
- attrs.encode(opaque_encoder)
- }).unwrap();
- rbml_w.end_tag();
-}
-
-fn encode_unsafety(rbml_w: &mut Encoder, unsafety: hir::Unsafety) {
- let byte: u8 = match unsafety {
- hir::Unsafety::Normal => 0,
- hir::Unsafety::Unsafe => 1,
- };
- rbml_w.wr_tagged_u8(tag_unsafety, byte);
-}
-
-fn encode_paren_sugar(rbml_w: &mut Encoder, paren_sugar: bool) {
- let byte: u8 = if paren_sugar {1} else {0};
- rbml_w.wr_tagged_u8(tag_paren_sugar, byte);
-}
-
-fn encode_defaulted(rbml_w: &mut Encoder, is_defaulted: bool) {
- let byte: u8 = if is_defaulted {1} else {0};
- rbml_w.wr_tagged_u8(tag_defaulted_trait, byte);
-}
-
-fn encode_associated_type_names(rbml_w: &mut Encoder, names: &[Name]) {
- rbml_w.start_tag(tag_associated_type_names);
- for &name in names {
- rbml_w.wr_tagged_str(tag_associated_type_name, &name.as_str());
- }
- rbml_w.end_tag();
-}
-
-fn encode_polarity(rbml_w: &mut Encoder, polarity: hir::ImplPolarity) {
- let byte: u8 = match polarity {
- hir::ImplPolarity::Positive => 0,
- hir::ImplPolarity::Negative => 1,
- };
- rbml_w.wr_tagged_u8(tag_polarity, byte);
-}
-
-fn encode_crate_deps(rbml_w: &mut Encoder, cstore: &cstore::CStore) {
- fn get_ordered_deps(cstore: &cstore::CStore)
- -> Vec<(CrateNum, Rc<cstore::CrateMetadata>)> {
- // Pull the cnums and name,vers,hash out of cstore
- let mut deps = Vec::new();
- cstore.iter_crate_data(|cnum, val| {
- deps.push((cnum, val.clone()));
- });
-
- // Sort by cnum
- deps.sort_by(|kv1, kv2| kv1.0.cmp(&kv2.0));
-
- // Sanity-check the crate numbers
- let mut expected_cnum = 1;
- for &(n, _) in &deps {
- assert_eq!(n, expected_cnum);
- expected_cnum += 1;
+ ast: None,
+ mir: self.encode_mir(def_id)
}
-
- deps
- }
-
- // We're just going to write a list of crate 'name-hash-version's, with
- // the assumption that they are numbered 1 to n.
- // FIXME (#2166): This is not nearly enough to support correct versioning
- // but is enough to get transitive crate dependencies working.
- rbml_w.start_tag(tag_crate_deps);
- for (_cnum, dep) in get_ordered_deps(cstore) {
- encode_crate_dep(rbml_w, &dep);
}
- rbml_w.end_tag();
-}
-
-fn encode_lang_items(ecx: &EncodeContext, rbml_w: &mut Encoder) {
- rbml_w.start_tag(tag_lang_items);
- for (i, &opt_def_id) in ecx.tcx.lang_items.items().iter().enumerate() {
- if let Some(def_id) = opt_def_id {
- if def_id.is_local() {
- rbml_w.start_tag(tag_lang_items_item);
- rbml_w.wr_tagged_u32(tag_lang_items_item_id, i as u32);
- rbml_w.wr_tagged_u32(tag_lang_items_item_index, def_id.index.as_u32());
- rbml_w.end_tag();
+ fn encode_info_for_items(&mut self) -> Index {
+ let krate = self.tcx.map.krate();
+ let mut index = IndexBuilder::new(self);
+ index.record(DefId::local(CRATE_DEF_INDEX),
+ EncodeContext::encode_info_for_mod,
+ FromId(CRATE_NODE_ID, (&krate.module, &krate.attrs, &hir::Public)));
+ let mut visitor = EncodeVisitor {
+ index: index,
+ };
+ krate.visit_all_items(&mut visitor);
+ visitor.index.into_items()
+ }
+
+ fn encode_attributes(&mut self, attrs: &[ast::Attribute]) -> LazySeq<ast::Attribute> {
+ self.lazy_seq_ref(attrs)
+ }
+
+ fn encode_crate_deps(&mut self) -> LazySeq<CrateDep> {
+ fn get_ordered_deps(cstore: &cstore::CStore)
+ -> Vec<(CrateNum, Rc<cstore::CrateMetadata>)> {
+ // Pull the cnums and name,vers,hash out of cstore
+ let mut deps = Vec::new();
+ cstore.iter_crate_data(|cnum, val| {
+ deps.push((cnum, val.clone()));
+ });
+
+ // Sort by cnum
+ deps.sort_by(|kv1, kv2| kv1.0.cmp(&kv2.0));
+
+ // Sanity-check the crate numbers
+ let mut expected_cnum = 1;
+ for &(n, _) in &deps {
+ assert_eq!(n, CrateNum::new(expected_cnum));
+ expected_cnum += 1;
}
+
+ deps
}
- }
- for i in &ecx.tcx.lang_items.missing {
- rbml_w.wr_tagged_u32(tag_lang_items_missing, *i as u32);
+ // We're just going to write a list of crate 'name-hash-version's, with
+ // the assumption that they are numbered 1 to n.
+ // FIXME (#2166): This is not nearly enough to support correct versioning
+ // but is enough to get transitive crate dependencies working.
+ let deps = get_ordered_deps(self.cstore);
+ self.lazy_seq(deps.iter().map(|&(_, ref dep)| {
+ CrateDep {
+ name: syntax::parse::token::intern(dep.name()),
+ hash: dep.hash(),
+ explicitly_linked: dep.explicitly_linked.get()
+ }
+ }))
}
- rbml_w.end_tag(); // tag_lang_items
-}
-
-fn encode_native_libraries(ecx: &EncodeContext, rbml_w: &mut Encoder) {
- rbml_w.start_tag(tag_native_libraries);
-
- for &(ref lib, kind) in ecx.tcx.sess.cstore.used_libraries().iter() {
- match kind {
- cstore::NativeStatic => {} // these libraries are not propagated
- cstore::NativeFramework | cstore::NativeUnknown => {
- rbml_w.start_tag(tag_native_libraries_lib);
- rbml_w.wr_tagged_u32(tag_native_libraries_kind, kind as u32);
- rbml_w.wr_tagged_str(tag_native_libraries_name, lib);
- rbml_w.end_tag();
+ fn encode_lang_items(&mut self)
+ -> (LazySeq<(DefIndex, usize)>, LazySeq<lang_items::LangItem>) {
+ let tcx = self.tcx;
+ let lang_items = tcx.lang_items.items().iter();
+ (self.lazy_seq(lang_items.enumerate().filter_map(|(i, &opt_def_id)| {
+ if let Some(def_id) = opt_def_id {
+ if def_id.is_local() {
+ return Some((def_id.index, i));
+ }
}
- }
+ None
+ })), self.lazy_seq_ref(&tcx.lang_items.missing))
}
- rbml_w.end_tag();
-}
-
-fn encode_plugin_registrar_fn(ecx: &EncodeContext, rbml_w: &mut Encoder) {
- match ecx.tcx.sess.plugin_registrar_fn.get() {
- Some(id) => {
- let def_id = ecx.tcx.map.local_def_id(id);
- rbml_w.wr_tagged_u32(tag_plugin_registrar_fn, def_id.index.as_u32());
- }
- None => {}
+ fn encode_native_libraries(&mut self) -> LazySeq<(NativeLibraryKind, String)> {
+ let used_libraries = self.tcx.sess.cstore.used_libraries();
+ self.lazy_seq(used_libraries.into_iter().filter_map(|(lib, kind)| {
+ match kind {
+ cstore::NativeStatic => None, // these libraries are not propagated
+ cstore::NativeFramework | cstore::NativeUnknown => {
+ Some((kind, lib))
+ }
+ }
+ }))
}
-}
-
-fn encode_codemap(ecx: &EncodeContext, rbml_w: &mut Encoder) {
- rbml_w.start_tag(tag_codemap);
- let codemap = ecx.tcx.sess.codemap();
- for filemap in &codemap.files.borrow()[..] {
-
- if filemap.lines.borrow().is_empty() || filemap.is_imported() {
+ fn encode_codemap(&mut self) -> LazySeq<syntax_pos::FileMap> {
+ let codemap = self.tcx.sess.codemap();
+ let all_filemaps = codemap.files.borrow();
+ self.lazy_seq_ref(all_filemaps.iter().filter(|filemap| {
// No need to export empty filemaps, as they can't contain spans
// that need translation.
// Also no need to re-export imported filemaps, as any downstream
// crate will import them from their original source.
- continue;
- }
-
- rbml_w.start_tag(tag_codemap_filemap);
- rbml_w.emit_opaque(|opaque_encoder| {
- filemap.encode(opaque_encoder)
- }).unwrap();
- rbml_w.end_tag();
- }
-
- rbml_w.end_tag();
-}
-
-/// Serialize the text of the exported macros
-fn encode_macro_defs(rbml_w: &mut Encoder,
- krate: &hir::Crate,
- tcx: TyCtxt) {
- rbml_w.start_tag(tag_macro_defs);
- for def in &krate.exported_macros {
- rbml_w.start_tag(tag_macro_def);
-
- encode_name(rbml_w, def.name);
- encode_attributes(rbml_w, &def.attrs);
- let &BytePos(lo) = &def.span.lo;
- let &BytePos(hi) = &def.span.hi;
- rbml_w.wr_tagged_u32(tag_macro_def_span_lo, lo);
- rbml_w.wr_tagged_u32(tag_macro_def_span_hi, hi);
-
- rbml_w.wr_tagged_str(tag_macro_def_body,
- &::syntax::print::pprust::tts_to_string(&def.body));
-
- rbml_w.end_tag();
- }
- rbml_w.end_tag();
-
- if tcx.sess.crate_types.borrow().contains(&CrateTypeRustcMacro) {
- let id = tcx.sess.derive_registrar_fn.get().unwrap();
- let did = tcx.map.local_def_id(id);
- rbml_w.wr_tagged_u32(tag_macro_derive_registrar, did.index.as_u32());
- }
-}
-
-fn encode_struct_field_attrs(ecx: &EncodeContext,
- rbml_w: &mut Encoder,
- krate: &hir::Crate) {
- struct StructFieldVisitor<'a, 'b:'a, 'c:'a, 'tcx:'b> {
- ecx: &'a EncodeContext<'b, 'tcx>,
- rbml_w: &'a mut Encoder<'c>,
- }
-
- impl<'a, 'b, 'c, 'tcx, 'v> Visitor<'v> for StructFieldVisitor<'a, 'b, 'c, 'tcx> {
- fn visit_struct_field(&mut self, field: &hir::StructField) {
- self.rbml_w.start_tag(tag_struct_field);
- let def_id = self.ecx.tcx.map.local_def_id(field.id);
- encode_def_id(self.rbml_w, def_id);
- encode_attributes(self.rbml_w, &field.attrs);
- self.rbml_w.end_tag();
- }
+ !filemap.lines.borrow().is_empty() && !filemap.is_imported()
+ }).map(|filemap| &**filemap))
+ }
+
+ /// Serialize the text of the exported macros
+ fn encode_macro_defs(&mut self) -> LazySeq<MacroDef> {
+ let tcx = self.tcx;
+ self.lazy_seq(tcx.map.krate().exported_macros.iter().map(|def| {
+ MacroDef {
+ name: def.name,
+ attrs: def.attrs.to_vec(),
+ span: def.span,
+ body: ::syntax::print::pprust::tts_to_string(&def.body)
+ }
+ }))
}
-
- rbml_w.start_tag(tag_struct_fields);
- krate.visit_all_items(&mut StructFieldVisitor { ecx: ecx, rbml_w: rbml_w });
- rbml_w.end_tag();
}
-
-
struct ImplVisitor<'a, 'tcx:'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impls: FnvHashMap<DefId, Vec<DefId>>
+ impls: FnvHashMap<DefId, Vec<DefIndex>>
}
impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> {
if let Some(trait_ref) = self.tcx.impl_trait_ref(impl_id) {
self.impls.entry(trait_ref.def_id)
.or_insert(vec![])
- .push(impl_id);
+ .push(impl_id.index);
}
}
}
}
-/// Encodes an index, mapping each trait to its (local) implementations.
-fn encode_impls<'a>(ecx: &'a EncodeContext,
- krate: &hir::Crate,
- rbml_w: &'a mut Encoder) {
- let mut visitor = ImplVisitor {
- tcx: ecx.tcx,
- impls: FnvHashMap()
- };
- krate.visit_all_items(&mut visitor);
-
- rbml_w.start_tag(tag_impls);
- for (trait_, trait_impls) in visitor.impls {
- rbml_w.start_tag(tag_impls_trait);
- encode_def_id(rbml_w, trait_);
- for impl_ in trait_impls {
- rbml_w.wr_tagged_u64(tag_impls_trait_impl, def_to_u64(impl_));
- }
- rbml_w.end_tag();
- }
- rbml_w.end_tag();
-}
-
-// Encodes all reachable symbols in this crate into the metadata.
-//
-// This pass is seeded off the reachability list calculated in the
-// middle::reachable module but filters out items that either don't have a
-// symbol associated with them (they weren't translated) or if they're an FFI
-// definition (as that's not defined in this crate).
-fn encode_reachable(ecx: &EncodeContext, rbml_w: &mut Encoder) {
- rbml_w.start_tag(tag_reachable_ids);
- for &id in ecx.reachable {
- let def_id = ecx.tcx.map.local_def_id(id);
- rbml_w.wr_tagged_u32(tag_reachable_id, def_id.index.as_u32());
- }
- rbml_w.end_tag();
-}
-
-fn encode_crate_dep(rbml_w: &mut Encoder,
- dep: &cstore::CrateMetadata) {
- rbml_w.start_tag(tag_crate_dep);
- rbml_w.wr_tagged_str(tag_crate_dep_crate_name, &dep.name());
- let hash = decoder::get_crate_hash(dep.data());
- rbml_w.wr_tagged_u64(tag_crate_dep_hash, hash.as_u64());
- rbml_w.wr_tagged_u8(tag_crate_dep_explicitly_linked,
- dep.explicitly_linked.get() as u8);
- rbml_w.end_tag();
-}
-
-fn encode_hash(rbml_w: &mut Encoder, hash: &Svh) {
- rbml_w.wr_tagged_u64(tag_crate_hash, hash.as_u64());
-}
-
-fn encode_rustc_version(rbml_w: &mut Encoder) {
- rbml_w.wr_tagged_str(tag_rustc_version, &rustc_version());
-}
+impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
+ /// Encodes an index, mapping each trait to its (local) implementations.
+ fn encode_impls(&mut self) -> LazySeq<TraitImpls> {
+ let mut visitor = ImplVisitor {
+ tcx: self.tcx,
+ impls: FnvHashMap()
+ };
+ self.tcx.map.krate().visit_all_items(&mut visitor);
-fn encode_crate_name(rbml_w: &mut Encoder, crate_name: &str) {
- rbml_w.wr_tagged_str(tag_crate_crate_name, crate_name);
-}
-
-fn encode_crate_disambiguator(rbml_w: &mut Encoder, crate_disambiguator: &str) {
- rbml_w.wr_tagged_str(tag_crate_disambiguator, crate_disambiguator);
-}
-
-fn encode_crate_triple(rbml_w: &mut Encoder, triple: &str) {
- rbml_w.wr_tagged_str(tag_crate_triple, triple);
-}
+ let all_impls: Vec<_> = visitor.impls.into_iter().map(|(trait_def_id, impls)| {
+ TraitImpls {
+ trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index),
+ impls: self.lazy_seq(impls)
+ }
+ }).collect();
-fn encode_dylib_dependency_formats(rbml_w: &mut Encoder, ecx: &EncodeContext) {
- let tag = tag_dylib_dependency_formats;
- match ecx.tcx.sess.dependency_formats.borrow().get(&config::CrateTypeDylib) {
- Some(arr) => {
- let s = arr.iter().enumerate().filter_map(|(i, slot)| {
- let kind = match *slot {
- Linkage::NotLinked |
- Linkage::IncludedFromDylib => return None,
- Linkage::Dynamic => "d",
- Linkage::Static => "s",
- };
- Some(format!("{}:{}", i + 1, kind))
- }).collect::<Vec<String>>();
- rbml_w.wr_tagged_str(tag, &s.join(","));
- }
- None => {
- rbml_w.wr_tagged_str(tag, "");
- }
+ self.lazy_seq(all_impls)
}
-}
-fn encode_panic_strategy(rbml_w: &mut Encoder, ecx: &EncodeContext) {
- match ecx.tcx.sess.opts.cg.panic {
- PanicStrategy::Unwind => {
- rbml_w.wr_tagged_u8(tag_panic_strategy, b'U');
- }
- PanicStrategy::Abort => {
- rbml_w.wr_tagged_u8(tag_panic_strategy, b'A');
+ // Encodes all reachable symbols in this crate into the metadata.
+ //
+ // This pass is seeded off the reachability list calculated in the
+ // middle::reachable module but filters out items that either don't have a
+ // symbol associated with them (they weren't translated) or if they're an FFI
+ // definition (as that's not defined in this crate).
+ fn encode_reachable(&mut self) -> LazySeq<DefIndex> {
+ let reachable = self.reachable;
+ let tcx = self.tcx;
+ self.lazy_seq(reachable.iter().map(|&id| tcx.map.local_def_id(id).index))
+ }
+
+ fn encode_dylib_dependency_formats(&mut self) -> LazySeq<Option<LinkagePreference>> {
+ match self.tcx.sess.dependency_formats.borrow().get(&config::CrateTypeDylib) {
+ Some(arr) => {
+ self.lazy_seq(arr.iter().map(|slot| {
+ match *slot {
+ Linkage::NotLinked |
+ Linkage::IncludedFromDylib => None,
+
+ Linkage::Dynamic => Some(LinkagePreference::RequireDynamic),
+ Linkage::Static => Some(LinkagePreference::RequireStatic),
+ }
+ }))
+ }
+ None => LazySeq::empty()
}
}
-}
-
-pub fn encode_metadata(ecx: EncodeContext, krate: &hir::Crate) -> Vec<u8> {
- let mut wr = Cursor::new(Vec::new());
- {
- let mut rbml_w = Encoder::new(&mut wr);
- encode_metadata_inner(&mut rbml_w, &ecx, krate)
- }
+ fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
+ let mut i = self.position();
+ let crate_deps = self.encode_crate_deps();
+ let dylib_dependency_formats = self.encode_dylib_dependency_formats();
+ let dep_bytes = self.position() - i;
+
+ // Encode the language items.
+ i = self.position();
+ let (lang_items, lang_items_missing) = self.encode_lang_items();
+ let lang_item_bytes = self.position() - i;
+
+ // Encode the native libraries used
+ i = self.position();
+ let native_libraries = self.encode_native_libraries();
+ let native_lib_bytes = self.position() - i;
+
+ // Encode codemap
+ i = self.position();
+ let codemap = self.encode_codemap();
+ let codemap_bytes = self.position() - i;
+
+ // Encode macro definitions
+ i = self.position();
+ let macro_defs = self.encode_macro_defs();
+ let macro_defs_bytes = self.position() - i;
+
+ // Encode the def IDs of impls, for coherence checking.
+ i = self.position();
+ let impls = self.encode_impls();
+ let impl_bytes = self.position() - i;
+
+ // Encode reachability info.
+ i = self.position();
+ let reachable_ids = self.encode_reachable();
+ let reachable_bytes = self.position() - i;
+
+ // Encode and index the items.
+ i = self.position();
+ let items = self.encode_info_for_items();
+ let item_bytes = self.position() - i;
+
+ i = self.position();
+ let index = items.write_index(&mut self.opaque.cursor);
+ let index_bytes = self.position() - i;
+
+ let tcx = self.tcx;
+ let link_meta = self.link_meta;
+ let is_rustc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeRustcMacro);
+ let root = self.lazy(&CrateRoot {
+ rustc_version: RUSTC_VERSION.to_string(),
+ name: link_meta.crate_name.clone(),
+ triple: tcx.sess.opts.target_triple.clone(),
+ hash: link_meta.crate_hash,
+ disambiguator: tcx.sess.local_crate_disambiguator().to_string(),
+ panic_strategy: tcx.sess.panic_strategy(),
+ plugin_registrar_fn: tcx.sess.plugin_registrar_fn.get().map(|id| {
+ tcx.map.local_def_id(id).index
+ }),
+ macro_derive_registrar: if is_rustc_macro {
+ let id = tcx.sess.derive_registrar_fn.get().unwrap();
+ Some(tcx.map.local_def_id(id).index)
+ } else {
+ None
+ },
+
+ crate_deps: crate_deps,
+ dylib_dependency_formats: dylib_dependency_formats,
+ lang_items: lang_items,
+ lang_items_missing: lang_items_missing,
+ native_libraries: native_libraries,
+ codemap: codemap,
+ macro_defs: macro_defs,
+ impls: impls,
+ reachable_ids: reachable_ids,
+ index: index,
+ });
- // RBML compacts the encoded bytes whenever appropriate,
- // so there are some garbages left after the end of the data.
- let metalen = wr.seek(SeekFrom::Current(0)).unwrap() as usize;
- let mut v = wr.into_inner();
- v.truncate(metalen);
- assert_eq!(v.len(), metalen);
-
- // And here we run into yet another obscure archive bug: in which metadata
- // loaded from archives may have trailing garbage bytes. Awhile back one of
- // our tests was failing sporadically on the OSX 64-bit builders (both nopt
- // and opt) by having rbml generate an out-of-bounds panic when looking at
- // metadata.
- //
- // Upon investigation it turned out that the metadata file inside of an rlib
- // (and ar archive) was being corrupted. Some compilations would generate a
- // metadata file which would end in a few extra bytes, while other
- // compilations would not have these extra bytes appended to the end. These
- // extra bytes were interpreted by rbml as an extra tag, so they ended up
- // being interpreted causing the out-of-bounds.
- //
- // The root cause of why these extra bytes were appearing was never
- // discovered, and in the meantime the solution we're employing is to insert
- // the length of the metadata to the start of the metadata. Later on this
- // will allow us to slice the metadata to the precise length that we just
- // generated regardless of trailing bytes that end up in it.
- //
- // We also need to store the metadata encoding version here, because
- // rlibs don't have it. To get older versions of rustc to ignore
- // this metadata, there are 4 zero bytes at the start, which are
- // treated as a length of 0 by old compilers.
-
- let len = v.len();
- let mut result = vec![];
- result.push(0);
- result.push(0);
- result.push(0);
- result.push(0);
- result.extend(metadata_encoding_version.iter().cloned());
- result.push((len >> 24) as u8);
- result.push((len >> 16) as u8);
- result.push((len >> 8) as u8);
- result.push((len >> 0) as u8);
- result.extend(v);
- result
-}
+ let total_bytes = self.position();
-fn encode_metadata_inner(rbml_w: &mut Encoder,
- ecx: &EncodeContext,
- krate: &hir::Crate) {
- struct Stats {
- attr_bytes: u64,
- dep_bytes: u64,
- lang_item_bytes: u64,
- native_lib_bytes: u64,
- plugin_registrar_fn_bytes: u64,
- codemap_bytes: u64,
- macro_defs_bytes: u64,
- impl_bytes: u64,
- reachable_bytes: u64,
- item_bytes: u64,
- index_bytes: u64,
- xref_bytes: u64,
- zero_bytes: u64,
- total_bytes: u64,
- }
- let mut stats = Stats {
- attr_bytes: 0,
- dep_bytes: 0,
- lang_item_bytes: 0,
- native_lib_bytes: 0,
- plugin_registrar_fn_bytes: 0,
- codemap_bytes: 0,
- macro_defs_bytes: 0,
- impl_bytes: 0,
- reachable_bytes: 0,
- item_bytes: 0,
- index_bytes: 0,
- xref_bytes: 0,
- zero_bytes: 0,
- total_bytes: 0,
- };
-
- encode_rustc_version(rbml_w);
- encode_crate_name(rbml_w, &ecx.link_meta.crate_name);
- encode_crate_triple(rbml_w, &ecx.tcx.sess.opts.target_triple);
- encode_hash(rbml_w, &ecx.link_meta.crate_hash);
- encode_crate_disambiguator(rbml_w, &ecx.tcx.sess.local_crate_disambiguator());
- encode_dylib_dependency_formats(rbml_w, &ecx);
- encode_panic_strategy(rbml_w, &ecx);
-
- let mut i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- encode_attributes(rbml_w, &krate.attrs);
- stats.attr_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
-
- i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- encode_crate_deps(rbml_w, ecx.cstore);
- stats.dep_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
-
- // Encode the language items.
- i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- encode_lang_items(&ecx, rbml_w);
- stats.lang_item_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
-
- // Encode the native libraries used
- i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- encode_native_libraries(&ecx, rbml_w);
- stats.native_lib_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
-
- // Encode the plugin registrar function
- i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- encode_plugin_registrar_fn(&ecx, rbml_w);
- stats.plugin_registrar_fn_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
-
- // Encode codemap
- i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- encode_codemap(&ecx, rbml_w);
- stats.codemap_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
-
- // Encode macro definitions
- i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- encode_macro_defs(rbml_w, krate, ecx.tcx);
- stats.macro_defs_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
-
- // Encode the def IDs of impls, for coherence checking.
- i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- encode_impls(&ecx, krate, rbml_w);
- stats.impl_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
-
- // Encode reachability info.
- i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- encode_reachable(&ecx, rbml_w);
- stats.reachable_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
-
- // Encode and index the items.
- rbml_w.start_tag(tag_items);
- i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- let (items, xrefs) = encode_info_for_items(&ecx, rbml_w);
- stats.item_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
- rbml_w.end_tag();
-
- i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- encode_item_index(rbml_w, items);
- stats.index_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
-
- i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
- encode_xrefs(&ecx, rbml_w, xrefs);
- stats.xref_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i;
-
- encode_struct_field_attrs(&ecx, rbml_w, krate);
-
- stats.total_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap();
-
- if ecx.tcx.sess.meta_stats() {
- for e in rbml_w.writer.get_ref() {
- if *e == 0 {
- stats.zero_bytes += 1;
+ if self.tcx.sess.meta_stats() {
+ let mut zero_bytes = 0;
+ for e in self.opaque.cursor.get_ref() {
+ if *e == 0 {
+ zero_bytes += 1;
+ }
}
+
+ println!("metadata stats:");
+ println!(" dep bytes: {}", dep_bytes);
+ println!(" lang item bytes: {}", lang_item_bytes);
+ println!(" native bytes: {}", native_lib_bytes);
+ println!(" codemap bytes: {}", codemap_bytes);
+ println!(" macro def bytes: {}", macro_defs_bytes);
+ println!(" impl bytes: {}", impl_bytes);
+ println!(" reachable bytes: {}", reachable_bytes);
+ println!(" item bytes: {}", item_bytes);
+ println!(" index bytes: {}", index_bytes);
+ println!(" zero bytes: {}", zero_bytes);
+ println!(" total bytes: {}", total_bytes);
}
- println!("metadata stats:");
- println!(" attribute bytes: {}", stats.attr_bytes);
- println!(" dep bytes: {}", stats.dep_bytes);
- println!(" lang item bytes: {}", stats.lang_item_bytes);
- println!(" native bytes: {}", stats.native_lib_bytes);
- println!("plugin registrar bytes: {}", stats.plugin_registrar_fn_bytes);
- println!(" codemap bytes: {}", stats.codemap_bytes);
- println!(" macro def bytes: {}", stats.macro_defs_bytes);
- println!(" impl bytes: {}", stats.impl_bytes);
- println!(" reachable bytes: {}", stats.reachable_bytes);
- println!(" item bytes: {}", stats.item_bytes);
- println!(" index bytes: {}", stats.index_bytes);
- println!(" xref bytes: {}", stats.xref_bytes);
- println!(" zero bytes: {}", stats.zero_bytes);
- println!(" total bytes: {}", stats.total_bytes);
+ root
}
}
-// Get the encoded string for a type
-pub fn encoded_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- t: Ty<'tcx>,
- def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String)
- -> Vec<u8> {
- let mut wr = Cursor::new(Vec::new());
- tyencode::enc_ty(&mut wr, &tyencode::ctxt {
- diag: tcx.sess.diagnostic(),
- ds: def_id_to_string,
+// NOTE(eddyb) The following comment was preserved for posterity, even
+// though it's no longer relevant as EBML (which uses nested & tagged
+// "documents") was replaced with a scheme that can't go out of bounds.
+//
+// And here we run into yet another obscure archive bug: in which metadata
+// loaded from archives may have trailing garbage bytes. Awhile back one of
+// our tests was failing sporadically on the OSX 64-bit builders (both nopt
+// and opt) by having ebml generate an out-of-bounds panic when looking at
+// metadata.
+//
+// Upon investigation it turned out that the metadata file inside of an rlib
+// (and ar archive) was being corrupted. Some compilations would generate a
+// metadata file which would end in a few extra bytes, while other
+// compilations would not have these extra bytes appended to the end. These
+// extra bytes were interpreted by ebml as an extra tag, so they ended up
+// being interpreted causing the out-of-bounds.
+//
+// The root cause of why these extra bytes were appearing was never
+// discovered, and in the meantime the solution we're employing is to insert
+// the length of the metadata to the start of the metadata. Later on this
+// will allow us to slice the metadata to the precise length that we just
+// generated regardless of trailing bytes that end up in it.
+
+pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ cstore: &cstore::CStore,
+ reexports: &def::ExportMap,
+ link_meta: &LinkMeta,
+ reachable: &NodeSet,
+ mir_map: &MirMap<'tcx>) -> Vec<u8> {
+ let mut cursor = Cursor::new(vec![]);
+ cursor.write_all(METADATA_HEADER).unwrap();
+
+ // Will be filed with the root position after encoding everything.
+ cursor.write_all(&[0, 0, 0, 0]).unwrap();
+
+ let root = EncodeContext {
+ opaque: opaque::Encoder::new(&mut cursor),
tcx: tcx,
- abbrevs: &RefCell::new(FnvHashMap())
- }, t);
- wr.into_inner()
+ reexports: reexports,
+ link_meta: link_meta,
+ cstore: cstore,
+ reachable: reachable,
+ mir_map: mir_map,
+ lazy_state: LazyState::NoNode,
+ type_shorthands: Default::default(),
+ predicate_shorthands: Default::default()
+ }.encode_crate_root();
+ let mut result = cursor.into_inner();
+
+ // Encode the root position.
+ let header = METADATA_HEADER.len();
+ let pos = root.position;
+ result[header + 0] = (pos >> 24) as u8;
+ result[header + 1] = (pos >> 16) as u8;
+ result[header + 2] = (pos >> 8) as u8;
+ result[header + 3] = (pos >> 0) as u8;
+
+ result
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use schema::*;
+
use rustc::hir::def_id::{DefId, DefIndex};
-use rbml;
use std::io::{Cursor, Write};
use std::slice;
use std::u32;
-/// As part of the metadata, we generate an index that stores, for
-/// each DefIndex, the position of the corresponding RBML document (if
-/// any). This is just a big `[u32]` slice, where an entry of
-/// `u32::MAX` indicates that there is no RBML document. This little
-/// struct just stores the offsets within the metadata of the start
-/// and end of this slice. These are actually part of an RBML
-/// document, but for looking things up in the metadata, we just
-/// discard the RBML positioning and jump directly to the data.
-pub struct Index {
- data_start: usize,
- data_end: usize,
-}
-
-impl Index {
- /// Given the RBML doc representing the index, save the offests
- /// for later.
- pub fn from_rbml(index: rbml::Doc) -> Index {
- Index { data_start: index.start, data_end: index.end }
- }
-
- /// Given the metadata, extract out the offset of a particular
- /// DefIndex (if any).
- #[inline(never)]
- pub fn lookup_item(&self, bytes: &[u8], def_index: DefIndex) -> Option<u32> {
- let words = bytes_to_words(&bytes[self.data_start..self.data_end]);
- let index = def_index.as_usize();
-
- debug!("lookup_item: index={:?} words.len={:?}",
- index, words.len());
-
- let position = u32::from_be(words[index]);
- if position == u32::MAX {
- debug!("lookup_item: position=u32::MAX");
- None
- } else {
- debug!("lookup_item: position={:?}", position);
- Some(position)
- }
- }
-}
-
/// While we are generating the metadata, we also track the position
/// of each DefIndex. It is not required that all definitions appear
/// in the metadata, nor that they are serialized in order, and
/// `u32::MAX`. Whenever an index is visited, we fill in the
/// appropriate spot by calling `record_position`. We should never
/// visit the same index twice.
-pub struct IndexData {
+pub struct Index {
positions: Vec<u32>,
}
-impl IndexData {
- pub fn new(max_index: usize) -> IndexData {
- IndexData {
+impl Index {
+ pub fn new(max_index: usize) -> Index {
+ Index {
positions: vec![u32::MAX; max_index]
}
}
- pub fn record(&mut self, def_id: DefId, position: u64) {
+ pub fn record(&mut self, def_id: DefId, entry: Lazy<Entry>) {
assert!(def_id.is_local());
- self.record_index(def_id.index, position);
+ self.record_index(def_id.index, entry);
}
- pub fn record_index(&mut self, item: DefIndex, position: u64) {
+ pub fn record_index(&mut self, item: DefIndex, entry: Lazy<Entry>) {
let item = item.as_usize();
- assert!(position < (u32::MAX as u64));
- let position = position as u32;
+ assert!(entry.position < (u32::MAX as usize));
+ let position = entry.position as u32;
assert!(self.positions[item] == u32::MAX,
"recorded position for item {:?} twice, first at {:?} and now at {:?}",
item, self.positions[item], position);
- self.positions[item] = position;
+ self.positions[item] = position.to_le();
}
- pub fn write_index(&self, buf: &mut Cursor<Vec<u8>>) {
- for &position in &self.positions {
- write_be_u32(buf, position);
- }
+ pub fn write_index(&self, buf: &mut Cursor<Vec<u8>>) -> LazySeq<Index> {
+ let pos = buf.position();
+ buf.write_all(words_to_bytes(&self.positions)).unwrap();
+ LazySeq::with_position_and_length(pos as usize, self.positions.len())
}
}
-/// A dense index with integer keys. Different API from IndexData (should
-/// these be merged?)
-pub struct DenseIndex {
- start: usize,
- end: usize
-}
+impl<'tcx> LazySeq<Index> {
+ /// Given the metadata, extract out the offset of a particular
+ /// DefIndex (if any).
+ #[inline(never)]
+ pub fn lookup(&self, bytes: &[u8], def_index: DefIndex) -> Option<Lazy<Entry<'tcx>>> {
+ let words = &bytes_to_words(&bytes[self.position..])[..self.len];
+ let index = def_index.as_usize();
-impl DenseIndex {
- pub fn lookup(&self, buf: &[u8], ix: u32) -> Option<u32> {
- let data = bytes_to_words(&buf[self.start..self.end]);
- data.get(ix as usize).map(|d| u32::from_be(*d))
- }
- pub fn from_buf(buf: &[u8], start: usize, end: usize) -> Self {
- assert!((end-start)%4 == 0 && start <= end && end <= buf.len());
- DenseIndex {
- start: start,
- end: end
+ debug!("Index::lookup: index={:?} words.len={:?}",
+ index, words.len());
+
+ let position = u32::from_le(words[index]);
+ if position == u32::MAX {
+ debug!("Index::lookup: position=u32::MAX");
+ None
+ } else {
+ debug!("Index::lookup: position={:?}", position);
+ Some(Lazy::with_position(position as usize))
}
}
-}
-
-pub fn write_dense_index(entries: Vec<u32>, buf: &mut Cursor<Vec<u8>>) {
- let elen = entries.len();
- assert!(elen < u32::MAX as usize);
- for entry in entries {
- write_be_u32(buf, entry);
+ pub fn iter_enumerated<'a>(&self, bytes: &'a [u8])
+ -> impl Iterator<Item=(DefIndex, Lazy<Entry<'tcx>>)> + 'a {
+ let words = &bytes_to_words(&bytes[self.position..])[..self.len];
+ words.iter().enumerate().filter_map(|(index, &position)| {
+ if position == u32::MAX {
+ None
+ } else {
+ let position = u32::from_le(position) as usize;
+ Some((DefIndex::new(index), Lazy::with_position(position)))
+ }
+ })
}
-
- info!("write_dense_index: {} entries", elen);
}
-fn write_be_u32<W: Write>(w: &mut W, u: u32) {
- let _ = w.write_all(&[
- (u >> 24) as u8,
- (u >> 16) as u8,
- (u >> 8) as u8,
- (u >> 0) as u8,
- ]);
+fn bytes_to_words(b: &[u8]) -> &[u32] {
+ unsafe { slice::from_raw_parts(b.as_ptr() as *const u32, b.len() / 4) }
}
-fn bytes_to_words(b: &[u8]) -> &[u32] {
- assert!(b.len() % 4 == 0);
- unsafe { slice::from_raw_parts(b.as_ptr() as *const u32, b.len()/4) }
+fn words_to_bytes(w: &[u32]) -> &[u8] {
+ unsafe { slice::from_raw_parts(w.as_ptr() as *const u8, w.len() * 4) }
}
//! incremental compilation purposes.
//!
//! The `IndexBuilder` facilitates both of these. It is created
-//! with an RBML encoder isntance (`rbml_w`) along with an
-//! `EncodingContext` (`ecx`), which it encapsulates. It has one main
-//! method, `record()`. You invoke `record` like so to create a new
-//! `data_item` element in the list:
+//! with an `EncodingContext` (`ecx`), which it encapsulates.
+//! It has one main method, `record()`. You invoke `record`
+//! like so to create a new `data_item` element in the list:
//!
//! ```
//! index.record(some_def_id, callback_fn, data)
//!
//! What record will do is to (a) record the current offset, (b) emit
//! the `common::data_item` tag, and then call `callback_fn` with the
-//! given data as well as an `ItemContentBuilder`. Once `callback_fn`
+//! given data as well as the `EncodingContext`. Once `callback_fn`
//! returns, the `common::data_item` tag will be closed.
//!
-//! The `ItemContentBuilder` is another type that just offers access
-//! to the `ecx` and `rbml_w` that were given in, as well as
-//! maintaining a list of `xref` instances, which are used to extract
-//! common data so it is not re-serialized.
-//!
-//! `ItemContentBuilder` is a distinct type which does not offer the
-//! `record` method, so that we can ensure that `common::data_item` elements
-//! are never nested.
+//! `EncodingContext` does not offer the `record` method, so that we
+//! can ensure that `common::data_item` elements are never nested.
//!
//! In addition, while the `callback_fn` is executing, we will push a
//! task `MetaData(some_def_id)`, which can then observe the
//! give a callback fn, rather than taking a closure: it allows us to
//! easily control precisely what data is given to that fn.
-use common::tag_items_data_item;
use encoder::EncodeContext;
-use index::IndexData;
-use rbml::writer::Encoder;
+use index::Index;
+use schema::*;
+
use rustc::dep_graph::DepNode;
use rustc::hir;
use rustc::hir::def_id::DefId;
-use rustc::ty::{self, TyCtxt};
-use rustc_data_structures::fnv::FnvHashMap;
+use rustc::ty::TyCtxt;
use syntax::ast;
+use std::ops::{Deref, DerefMut};
+
/// Builder that can encode new items, adding them into the index.
/// Item encoding cannot be nested.
-pub struct IndexBuilder<'a, 'tcx: 'a, 'encoder: 'a> {
- items: IndexData,
- builder: ItemContentBuilder<'a, 'tcx, 'encoder>,
+pub struct IndexBuilder<'a, 'b: 'a, 'tcx: 'b> {
+ items: Index,
+ pub ecx: &'a mut EncodeContext<'b, 'tcx>,
}
-/// Builder that can encode the content of items, but can't start a
-/// new item itself. Most code is attached to here.
-pub struct ItemContentBuilder<'a, 'tcx: 'a, 'encoder: 'a> {
- xrefs: FnvHashMap<XRef<'tcx>, u32>, // sequentially-assigned
- pub ecx: &'a EncodeContext<'a, 'tcx>,
- pub rbml_w: &'a mut Encoder<'encoder>,
+impl<'a, 'b, 'tcx> Deref for IndexBuilder<'a, 'b, 'tcx> {
+ type Target = EncodeContext<'b, 'tcx>;
+ fn deref(&self) -> &Self::Target {
+ self.ecx
+ }
}
-/// "interned" entries referenced by id
-#[derive(PartialEq, Eq, Hash)]
-pub enum XRef<'tcx> { Predicate(ty::Predicate<'tcx>) }
+impl<'a, 'b, 'tcx> DerefMut for IndexBuilder<'a, 'b, 'tcx> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.ecx
+ }
+}
-impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> {
- pub fn new(ecx: &'a EncodeContext<'a, 'tcx>,
- rbml_w: &'a mut Encoder<'encoder>)
- -> Self {
+impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
+ pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self {
IndexBuilder {
- items: IndexData::new(ecx.tcx.map.num_local_def_ids()),
- builder: ItemContentBuilder {
- ecx: ecx,
- xrefs: FnvHashMap(),
- rbml_w: rbml_w,
- },
+ items: Index::new(ecx.tcx.map.num_local_def_ids()),
+ ecx: ecx,
}
}
- pub fn ecx(&self) -> &'a EncodeContext<'a, 'tcx> {
- self.builder.ecx()
- }
-
/// Emit the data for a def-id to the metadata. The function to
/// emit the data is `op`, and it will be given `data` as
- /// arguments. This `record` function will start/end an RBML tag
- /// and record the current offset for use in the index, calling
- /// `op` to generate the data in the RBML tag.
+ /// arguments. This `record` function will call `op` to generate
+ /// the `Entry` (which may point to other encoded information)
+ /// and will then record the `Lazy<Entry>` for use in the index.
///
/// In addition, it will setup a dep-graph task to track what data
/// `op` accesses to generate the metadata, which is later used by
/// content system.
pub fn record<DATA>(&mut self,
id: DefId,
- op: fn(&mut ItemContentBuilder<'a, 'tcx, 'encoder>, DATA),
+ op: fn(&mut EncodeContext<'b, 'tcx>, DATA) -> Entry<'tcx>,
data: DATA)
where DATA: DepGraphRead
{
- let position = self.builder.rbml_w.mark_stable_position();
- self.items.record(id, position);
- let _task = self.ecx().tcx.dep_graph.in_task(DepNode::MetaData(id));
- self.builder.rbml_w.start_tag(tag_items_data_item).unwrap();
- data.read(self.ecx().tcx);
- op(&mut self.builder, data);
- self.builder.rbml_w.end_tag().unwrap();
- }
-
- pub fn into_fields(self) -> (IndexData, FnvHashMap<XRef<'tcx>, u32>) {
- (self.items, self.builder.xrefs)
- }
-}
-
-impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> {
- pub fn ecx(&self) -> &'a EncodeContext<'a, 'tcx> {
- self.ecx
+ let _task = self.tcx.dep_graph.in_task(DepNode::MetaData(id));
+ data.read(self.tcx);
+ let entry = op(&mut self.ecx, data);
+ self.items.record(id, self.ecx.lazy(&entry));
}
- pub fn add_xref(&mut self, xref: XRef<'tcx>) -> u32 {
- let old_len = self.xrefs.len() as u32;
- *self.xrefs.entry(xref).or_insert(old_len)
+ pub fn into_items(self) -> Index {
+ self.items
}
}
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![cfg_attr(not(stage0), deny(warnings))]
+#![feature(conservative_impl_trait)]
+#![feature(core_intrinsics)]
#![feature(box_patterns)]
#![feature(dotdot_in_tuple_patterns)]
-#![feature(enumset)]
#![feature(question_mark)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_macro_lib)]
#![feature(rustc_macro_internals)]
#![feature(rustc_private)]
+#![feature(specialization)]
#![feature(staged_api)]
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
-#[macro_use] #[no_link] extern crate rustc_bitflags;
extern crate syntax_pos;
extern crate flate;
-extern crate rbml;
extern crate serialize as rustc_serialize; // used by deriving
extern crate rustc_errors as errors;
extern crate syntax_ext;
extern crate rustc_macro;
extern crate rustc_const_math;
-pub use rustc::middle;
+mod diagnostics;
-#[macro_use]
-mod macros;
-
-pub mod diagnostics;
-
-pub mod astencode;
-pub mod common;
-pub mod def_key;
-pub mod tyencode;
-pub mod tydecode;
-pub mod encoder;
+mod astencode;
mod index_builder;
-pub mod decoder;
+mod index;
+mod encoder;
+mod decoder;
+mod csearch;
+mod schema;
+
pub mod creader;
-pub mod csearch;
pub mod cstore;
-pub mod index;
pub mod loader;
pub mod macro_import;
-pub mod tls_context;
+
+__build_diagnostic_array! { librustc_metadata, DIAGNOSTICS }
//! no means all of the necessary details. Take a look at the rest of
//! metadata::loader or metadata::creader for all the juicy details!
-use cstore::{MetadataBlob, MetadataVec, MetadataArchive};
-use common::{metadata_encoding_version, rustc_version};
-use decoder;
+use cstore::MetadataBlob;
+use schema::{METADATA_HEADER, RUSTC_VERSION};
use rustc::hir::svh::Svh;
use rustc::session::Session;
}
if !self.rejected_via_version.is_empty() {
err.help(&format!("please recompile that crate using this compiler ({})",
- rustc_version()));
+ RUSTC_VERSION));
let mismatches = self.rejected_via_version.iter();
for (i, &CrateMismatch { ref path, ref got }) in mismatches.enumerate() {
err.note(&format!("crate `{}` path #{}: {} compiled by {:?}",
if let Some((ref p, _)) = lib.rlib {
err.note(&format!("path: {}", p.display()));
}
- let data = lib.metadata.as_slice();
- let name = decoder::get_crate_name(data);
- note_crate_name(&mut err, &name);
+ note_crate_name(&mut err, &lib.metadata.get_root().name);
}
err.emit();
None
info!("{} reading metadata from: {}", flavor, lib.display());
let (hash, metadata) = match get_metadata_section(self.target, flavor, &lib) {
Ok(blob) => {
- if let Some(h) = self.crate_matches(blob.as_slice(), &lib) {
+ if let Some(h) = self.crate_matches(&blob, &lib) {
(h, blob)
} else {
info!("metadata mismatch");
}
}
- fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> Option<Svh> {
- let crate_rustc_version = decoder::crate_rustc_version(crate_data);
- if crate_rustc_version != Some(rustc_version()) {
- let message = crate_rustc_version.unwrap_or(format!("an unknown compiler"));
- info!("Rejecting via version: expected {} got {}", rustc_version(), message);
+ fn crate_matches(&mut self, metadata: &MetadataBlob, libpath: &Path) -> Option<Svh> {
+ let root = metadata.get_root();
+ if root.rustc_version != RUSTC_VERSION {
+ info!("Rejecting via version: expected {} got {}",
+ RUSTC_VERSION, root.rustc_version);
self.rejected_via_version.push(CrateMismatch {
path: libpath.to_path_buf(),
- got: message
+ got: root.rustc_version
});
return None;
}
if self.should_match_name {
- match decoder::maybe_get_crate_name(crate_data) {
- Some(ref name) if self.crate_name == *name => {}
- _ => { info!("Rejecting via crate name"); return None }
+ if self.crate_name != root.name {
+ info!("Rejecting via crate name"); return None;
}
}
- let hash = match decoder::maybe_get_crate_hash(crate_data) {
- None => { info!("Rejecting via lack of crate hash"); return None; }
- Some(h) => h,
- };
- let triple = match decoder::get_crate_triple(crate_data) {
- None => { debug!("triple not present"); return None }
- Some(t) => t,
- };
- if triple != self.triple {
- info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
+ if root.triple != self.triple {
+ info!("Rejecting via crate triple: expected {} got {}",
+ self.triple, root.triple);
self.rejected_via_triple.push(CrateMismatch {
path: libpath.to_path_buf(),
- got: triple.to_string()
+ got: root.triple
});
return None;
}
if let Some(myhash) = self.hash {
- if *myhash != hash {
- info!("Rejecting via hash: expected {} got {}", *myhash, hash);
+ if *myhash != root.hash {
+ info!("Rejecting via hash: expected {} got {}",
+ *myhash, root.hash);
self.rejected_via_hash.push(CrateMismatch {
path: libpath.to_path_buf(),
got: myhash.to_string()
}
}
- Some(hash)
+ Some(root.hash)
}
fn verify_decompressed_encoding_version(blob: &MetadataBlob, filename: &Path)
-> Result<(), String>
{
- let data = blob.as_slice_raw();
- if data.len() < 4+metadata_encoding_version.len() ||
- !<[u8]>::eq(&data[..4], &[0, 0, 0, 0]) ||
- &data[4..4+metadata_encoding_version.len()] != metadata_encoding_version
- {
+ if !blob.is_compatible() {
Err((format!("incompatible metadata version found: '{}'",
filename.display())))
} else {
filename.display()));
}
};
- return match ArchiveMetadata::new(archive).map(|ar| MetadataArchive(ar)) {
+ return match ArchiveMetadata::new(archive).map(|ar| MetadataBlob::Archive(ar)) {
None => Err(format!("failed to read rlib metadata: '{}'",
filename.display())),
Some(blob) => {
let cbuf = llvm::LLVMGetSectionContents(si.llsi);
let csz = llvm::LLVMGetSectionSize(si.llsi) as usize;
let cvbuf: *const u8 = cbuf as *const u8;
- let vlen = metadata_encoding_version.len();
+ let vlen = METADATA_HEADER.len();
debug!("checking {} bytes of metadata-version stamp",
vlen);
let minsz = cmp::min(vlen, csz);
let buf0 = slice::from_raw_parts(cvbuf, minsz);
- let version_ok = buf0 == metadata_encoding_version;
+ let version_ok = buf0 == METADATA_HEADER;
if !version_ok {
return Err((format!("incompatible metadata version found: '{}'",
filename.display())));
let bytes = slice::from_raw_parts(cvbuf1, csz - vlen);
match flate::inflate_bytes(bytes) {
Ok(inflated) => {
- let blob = MetadataVec(inflated);
+ let blob = MetadataBlob::Inflated(inflated);
verify_decompressed_encoding_version(&blob, filename)?;
return Ok(blob);
}
let filename = path.file_name().unwrap().to_str().unwrap();
let flavor = if filename.ends_with(".rlib") { CrateFlavor::Rlib } else { CrateFlavor::Dylib };
match get_metadata_section(target, flavor, path) {
- Ok(bytes) => decoder::list_crate_metadata(bytes.as_slice(), out),
+ Ok(metadata) => metadata.list_crate_metadata(out),
Err(msg) => {
write!(out, "{}\n", msg)
}
//! Used by `rustc` when loading a crate with exported macros.
use std::collections::HashSet;
+use std::rc::Rc;
use std::env;
use std::mem;
-use creader::{CrateReader, Macros};
-use cstore::CStore;
+use creader::{CrateLoader, Macros};
use rustc::hir::def_id::DefIndex;
-use rustc::middle;
+use rustc::middle::cstore::LoadedMacro;
use rustc::session::Session;
use rustc::util::nodemap::FnvHashMap;
use rustc_back::dynamic_lib::DynamicLibrary;
use rustc_macro::__internal::Registry;
use syntax::ast;
use syntax::attr;
-use syntax::ext::base::LoadedMacro;
use syntax::parse::token;
use syntax_ext::deriving::custom::CustomDerive;
use syntax_pos::Span;
-pub struct MacroLoader<'a> {
- sess: &'a Session,
- reader: CrateReader<'a>,
-}
-
-impl<'a> MacroLoader<'a> {
- pub fn new(sess: &'a Session,
- cstore: &'a CStore,
- crate_name: &str,
- crate_config: ast::CrateConfig)
- -> MacroLoader<'a> {
- MacroLoader {
- sess: sess,
- reader: CrateReader::new(sess, cstore, crate_name, crate_config),
- }
- }
-}
-
pub fn call_bad_macro_reexport(a: &Session, b: Span) {
span_err!(a, b, E0467, "bad macro reexport");
}
pub type MacroSelection = FnvHashMap<token::InternedString, Span>;
-impl<'a> middle::cstore::MacroLoader for MacroLoader<'a> {
+pub fn load_macros(loader: &mut CrateLoader, extern_crate: &ast::Item, allows_macros: bool)
+ -> Vec<LoadedMacro> {
+ loader.load_crate(extern_crate, allows_macros)
+}
+
+impl<'a> CrateLoader<'a> {
fn load_crate(&mut self,
extern_crate: &ast::Item,
allows_macros: bool) -> Vec<LoadedMacro> {
self.load_macros(extern_crate, allows_macros, import, reexport)
}
-}
-impl<'a> MacroLoader<'a> {
fn load_macros<'b>(&mut self,
vi: &ast::Item,
allows_macros: bool,
return Vec::new();
}
- let mut macros = self.reader.read_macros(vi);
+ let mut macros = self.creader.read_macros(vi);
let mut ret = Vec::new();
let mut seen = HashSet::new();
fn register_custom_derive(&mut self,
trait_name: &str,
expand: fn(TokenStream) -> TokenStream) {
- let derive = Box::new(CustomDerive::new(expand));
- self.0.push(LoadedMacro::CustomDerive(trait_name.to_string(),
- derive));
+ let derive = Rc::new(CustomDerive::new(expand));
+ self.0.push(LoadedMacro::CustomDerive(trait_name.to_string(), derive));
}
}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-macro_rules! enum_from_u32 {
- ($(#[$attr:meta])* pub enum $name:ident {
- $($variant:ident = $e:expr,)*
- }) => {
- $(#[$attr])*
- pub enum $name {
- $($variant = $e),*
- }
-
- impl $name {
- pub fn from_u32(u: u32) -> Option<$name> {
- $(if u == $name::$variant as u32 {
- return Some($name::$variant)
- })*
- None
- }
- }
- };
- ($(#[$attr:meta])* pub enum $name:ident {
- $($variant:ident,)*
- }) => {
- $(#[$attr])*
- pub enum $name {
- $($variant,)*
- }
-
- impl $name {
- pub fn from_u32(u: u32) -> Option<$name> {
- $(if u == $name::$variant as u32 {
- return Some($name::$variant)
- })*
- None
- }
- }
- }
-}
--- /dev/null
+// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use astencode;
+use index;
+
+use rustc::hir;
+use rustc::hir::def;
+use rustc::hir::def_id::{DefIndex, DefId};
+use rustc::middle::cstore::{LinkagePreference, NativeLibraryKind};
+use rustc::middle::lang_items;
+use rustc::mir;
+use rustc::ty::{self, Ty};
+use rustc_back::PanicStrategy;
+
+use rustc_serialize as serialize;
+use syntax::{ast, attr};
+use syntax_pos::{self, Span};
+
+use std::marker::PhantomData;
+
+#[cfg(not(test))]
+pub const RUSTC_VERSION: &'static str = concat!("rustc ", env!("CFG_VERSION"));
+
+#[cfg(test)]
+pub const RUSTC_VERSION: &'static str = "rustc 0.0.0-unit-test";
+
+/// Metadata encoding version.
+/// NB: increment this if you change the format of metadata such that
+/// the rustc version can't be found to compare with `RUSTC_VERSION`.
+pub const METADATA_VERSION: u8 = 3;
+
+/// Metadata header which includes `METADATA_VERSION`.
+/// To get older versions of rustc to ignore this metadata,
+/// there are 4 zero bytes at the start, which are treated
+/// as a length of 0 by old compilers.
+///
+/// This header is followed by the position of the `CrateRoot`.
+pub const METADATA_HEADER: &'static [u8; 12] = &[
+ 0, 0, 0, 0,
+ b'r', b'u', b's', b't',
+ 0, 0, 0, METADATA_VERSION
+];
+
+/// The shorthand encoding uses an enum's variant index `usize`
+/// and is offset by this value so it never matches a real variant.
+/// This offset is also chosen so that the first byte is never < 0x80.
+pub const SHORTHAND_OFFSET: usize = 0x80;
+
+/// A value of type T referred to by its absolute position
+/// in the metadata, and which can be decoded lazily.
+///
+/// Metadata is effective a tree, encoded in post-order,
+/// and with the root's position written next to the header.
+/// That means every single `Lazy` points to some previous
+/// location in the metadata and is part of a larger node.
+///
+/// The first `Lazy` in a node is encoded as the backwards
+/// distance from the position where the containing node
+/// starts and where the `Lazy` points to, while the rest
+/// use the forward distance from the previous `Lazy`.
+/// Distances start at 1, as 0-byte nodes are invalid.
+/// Also invalid are nodes being referred in a different
+/// order than they were encoded in.
+#[must_use]
+pub struct Lazy<T> {
+ pub position: usize,
+ _marker: PhantomData<T>
+}
+
+impl<T> Lazy<T> {
+ pub fn with_position(position: usize) -> Lazy<T> {
+ Lazy {
+ position: position,
+ _marker: PhantomData
+ }
+ }
+
+ /// Returns the minimum encoded size of a value of type `T`.
+ // FIXME(eddyb) Give better estimates for certain types.
+ pub fn min_size() -> usize {
+ 1
+ }
+}
+
+impl<T> Copy for Lazy<T> {}
+impl<T> Clone for Lazy<T> {
+ fn clone(&self) -> Self { *self }
+}
+
+impl<T> serialize::UseSpecializedEncodable for Lazy<T> {}
+impl<T> serialize::UseSpecializedDecodable for Lazy<T> {}
+
+/// A sequence of type T referred to by its absolute position
+/// in the metadata and length, and which can be decoded lazily.
+/// The sequence is a single node for the purposes of `Lazy`.
+///
+/// Unlike `Lazy<Vec<T>>`, the length is encoded next to the
+/// position, not at the position, which means that the length
+/// doesn't need to be known before encoding all the elements.
+///
+/// If the length is 0, no position is encoded, but otherwise,
+/// the encoding is that of `Lazy`, with the distinction that
+/// the minimal distance the length of the sequence, i.e.
+/// it's assumed there's no 0-byte element in the sequence.
+#[must_use]
+pub struct LazySeq<T> {
+ pub len: usize,
+ pub position: usize,
+ _marker: PhantomData<T>
+}
+
+impl<T> LazySeq<T> {
+ pub fn empty() -> LazySeq<T> {
+ LazySeq::with_position_and_length(0, 0)
+ }
+
+ pub fn with_position_and_length(position: usize, len: usize) -> LazySeq<T> {
+ LazySeq {
+ len: len,
+ position: position,
+ _marker: PhantomData
+ }
+ }
+
+ /// Returns the minimum encoded size of `length` values of type `T`.
+ pub fn min_size(length: usize) -> usize {
+ length
+ }
+}
+
+impl<T> Copy for LazySeq<T> {}
+impl<T> Clone for LazySeq<T> {
+ fn clone(&self) -> Self { *self }
+}
+
+impl<T> serialize::UseSpecializedEncodable for LazySeq<T> {}
+impl<T> serialize::UseSpecializedDecodable for LazySeq<T> {}
+
+/// Encoding / decoding state for `Lazy` and `LazySeq`.
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub enum LazyState {
+ /// Outside of a metadata node.
+ NoNode,
+
+ /// Inside a metadata node, and before any `Lazy` or `LazySeq`.
+ /// The position is that of the node itself.
+ NodeStart(usize),
+
+ /// Inside a metadata node, with a previous `Lazy` or `LazySeq`.
+ /// The position is a conservative estimate of where that
+ /// previous `Lazy` / `LazySeq` would end (see their comments).
+ Previous(usize)
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct CrateRoot {
+ pub rustc_version: String,
+ pub name: String,
+ pub triple: String,
+ pub hash: hir::svh::Svh,
+ pub disambiguator: String,
+ pub panic_strategy: PanicStrategy,
+ pub plugin_registrar_fn: Option<DefIndex>,
+ pub macro_derive_registrar: Option<DefIndex>,
+
+ pub crate_deps: LazySeq<CrateDep>,
+ pub dylib_dependency_formats: LazySeq<Option<LinkagePreference>>,
+ pub lang_items: LazySeq<(DefIndex, usize)>,
+ pub lang_items_missing: LazySeq<lang_items::LangItem>,
+ pub native_libraries: LazySeq<(NativeLibraryKind, String)>,
+ pub codemap: LazySeq<syntax_pos::FileMap>,
+ pub macro_defs: LazySeq<MacroDef>,
+ pub impls: LazySeq<TraitImpls>,
+ pub reachable_ids: LazySeq<DefIndex>,
+ pub index: LazySeq<index::Index>,
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct CrateDep {
+ pub name: ast::Name,
+ pub hash: hir::svh::Svh,
+ pub explicitly_linked: bool
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct TraitImpls {
+ pub trait_id: (u32, DefIndex),
+ pub impls: LazySeq<DefIndex>
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct MacroDef {
+ pub name: ast::Name,
+ pub attrs: Vec<ast::Attribute>,
+ pub span: Span,
+ pub body: String
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct Entry<'tcx> {
+ pub kind: EntryKind<'tcx>,
+ pub visibility: ty::Visibility,
+ pub def_key: Lazy<hir::map::DefKey>,
+ pub attributes: LazySeq<ast::Attribute>,
+ pub children: LazySeq<DefIndex>,
+ pub stability: Option<Lazy<attr::Stability>>,
+ pub deprecation: Option<Lazy<attr::Deprecation>>,
+
+ pub ty: Option<Lazy<Ty<'tcx>>>,
+ pub inherent_impls: LazySeq<DefIndex>,
+ pub variances: LazySeq<ty::Variance>,
+ pub generics: Option<Lazy<ty::Generics<'tcx>>>,
+ pub predicates: Option<Lazy<ty::GenericPredicates<'tcx>>>,
+
+ pub ast: Option<Lazy<astencode::Ast<'tcx>>>,
+ pub mir: Option<Lazy<mir::repr::Mir<'tcx>>>
+}
+
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
+pub enum EntryKind<'tcx> {
+ Const,
+ ImmStatic,
+ MutStatic,
+ ForeignImmStatic,
+ ForeignMutStatic,
+ ForeignMod,
+ Type,
+ Enum,
+ Field,
+ Variant(Lazy<VariantData>),
+ Struct(Lazy<VariantData>),
+ Union(Lazy<VariantData>),
+ Fn(Lazy<FnData>),
+ ForeignFn(Lazy<FnData>),
+ Mod(Lazy<ModData>),
+ Closure(Lazy<ClosureData<'tcx>>),
+ Trait(Lazy<TraitData<'tcx>>),
+ Impl(Lazy<ImplData<'tcx>>),
+ DefaultImpl(Lazy<ImplData<'tcx>>),
+ Method(Lazy<MethodData<'tcx>>),
+ AssociatedType(AssociatedContainer),
+ AssociatedConst(AssociatedContainer)
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct ModData {
+ pub reexports: LazySeq<def::Export>
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct FnData {
+ pub constness: hir::Constness,
+ pub arg_names: LazySeq<ast::Name>
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct VariantData {
+ pub kind: ty::VariantKind,
+ pub disr: u64,
+
+ /// If this is a struct's only variant, this
+ /// is the index of the "struct ctor" item.
+ pub struct_ctor: Option<DefIndex>
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct TraitData<'tcx> {
+ pub unsafety: hir::Unsafety,
+ pub paren_sugar: bool,
+ pub has_default_impl: bool,
+ pub trait_ref: Lazy<ty::TraitRef<'tcx>>,
+ pub super_predicates: Lazy<ty::GenericPredicates<'tcx>>
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct ImplData<'tcx> {
+ pub polarity: hir::ImplPolarity,
+ pub parent_impl: Option<DefId>,
+ pub coerce_unsized_kind: Option<ty::adjustment::CustomCoerceUnsized>,
+ pub trait_ref: Option<Lazy<ty::TraitRef<'tcx>>>
+}
+
+/// Describes whether the container of an associated item
+/// is a trait or an impl and whether, in a trait, it has
+/// a default, or an in impl, whether it's marked "default".
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
+pub enum AssociatedContainer {
+ TraitRequired,
+ TraitWithDefault,
+ ImplDefault,
+ ImplFinal
+}
+
+impl AssociatedContainer {
+ pub fn with_def_id(&self, def_id: DefId) -> ty::ImplOrTraitItemContainer {
+ match *self {
+ AssociatedContainer::TraitRequired |
+ AssociatedContainer::TraitWithDefault => {
+ ty::TraitContainer(def_id)
+ }
+
+ AssociatedContainer::ImplDefault |
+ AssociatedContainer::ImplFinal => {
+ ty::ImplContainer(def_id)
+ }
+ }
+ }
+
+ pub fn has_body(&self) -> bool {
+ match *self {
+ AssociatedContainer::TraitRequired => false,
+
+ AssociatedContainer::TraitWithDefault |
+ AssociatedContainer::ImplDefault |
+ AssociatedContainer::ImplFinal => true
+ }
+ }
+
+ pub fn defaultness(&self) -> hir::Defaultness {
+ match *self {
+ AssociatedContainer::TraitRequired |
+ AssociatedContainer::TraitWithDefault |
+ AssociatedContainer::ImplDefault => hir::Defaultness::Default,
+
+ AssociatedContainer::ImplFinal => hir::Defaultness::Final
+ }
+ }
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct MethodData<'tcx> {
+ pub fn_data: FnData,
+ pub container: AssociatedContainer,
+ pub explicit_self: Lazy<ty::ExplicitSelfCategory<'tcx>>
+}
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct ClosureData<'tcx> {
+ pub kind: ty::ClosureKind,
+ pub ty: Lazy<ty::ClosureTy<'tcx>>
+}
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// This module provides implementations for the thread-local encoding and
-// decoding context traits in rustc::middle::cstore::tls.
-
-use rbml::opaque::Encoder as OpaqueEncoder;
-use rbml::opaque::Decoder as OpaqueDecoder;
-use rustc::middle::cstore::tls;
-use rustc::hir::def_id::DefId;
-use rustc::ty::subst::Substs;
-use rustc::ty::{self, TyCtxt};
-
-use decoder::{self, Cmd};
-use encoder;
-use tydecode::TyDecoder;
-use tyencode;
-
-impl<'a, 'tcx: 'a> tls::EncodingContext<'tcx> for encoder::EncodeContext<'a, 'tcx> {
-
- fn tcx<'s>(&'s self) -> TyCtxt<'s, 'tcx, 'tcx> {
- self.tcx
- }
-
- fn encode_ty(&self, encoder: &mut OpaqueEncoder, t: ty::Ty<'tcx>) {
- tyencode::enc_ty(encoder.cursor, &self.ty_str_ctxt(), t);
- }
-
- fn encode_substs(&self, encoder: &mut OpaqueEncoder, substs: &Substs<'tcx>) {
- tyencode::enc_substs(encoder.cursor, &self.ty_str_ctxt(), substs);
- }
-}
-
-pub struct DecodingContext<'a, 'tcx: 'a> {
- pub crate_metadata: Cmd<'a>,
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
-}
-
-impl<'a, 'tcx: 'a> tls::DecodingContext<'tcx> for DecodingContext<'a, 'tcx> {
-
- fn tcx<'s>(&'s self) -> TyCtxt<'s, 'tcx, 'tcx> {
- self.tcx
- }
-
- fn decode_ty(&self, decoder: &mut OpaqueDecoder) -> ty::Ty<'tcx> {
- let def_id_convert = &mut |did| {
- decoder::translate_def_id(self.crate_metadata, did)
- };
-
- let starting_position = decoder.position();
-
- let mut ty_decoder = TyDecoder::new(
- self.crate_metadata.data.as_slice(),
- self.crate_metadata.cnum,
- starting_position,
- self.tcx,
- def_id_convert);
-
- let ty = ty_decoder.parse_ty();
-
- let end_position = ty_decoder.position();
-
- // We can just reuse the tydecode implementation for parsing types, but
- // we have to make sure to leave the rbml reader at the position just
- // after the type.
- decoder.advance(end_position - starting_position);
- ty
- }
-
- fn decode_substs(&self, decoder: &mut OpaqueDecoder) -> &'tcx Substs<'tcx> {
- let def_id_convert = &mut |did| {
- decoder::translate_def_id(self.crate_metadata, did)
- };
-
- let starting_position = decoder.position();
-
- let mut ty_decoder = TyDecoder::new(
- self.crate_metadata.data.as_slice(),
- self.crate_metadata.cnum,
- starting_position,
- self.tcx,
- def_id_convert);
-
- let substs = ty_decoder.parse_substs();
-
- let end_position = ty_decoder.position();
-
- decoder.advance(end_position - starting_position);
- substs
- }
-
- fn translate_def_id(&self, def_id: DefId) -> DefId {
- decoder::translate_def_id(self.crate_metadata, def_id)
- }
-}
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-
-// Type decoding
-
-// tjc note: Would be great to have a `match check` macro equivalent
-// for some of these
-
-#![allow(non_camel_case_types)]
-
-use rustc::hir;
-
-use rustc::hir::def_id::{DefId, DefIndex};
-use middle::region;
-use rustc::ty::subst::{Kind, Substs};
-use rustc::ty::{self, ToPredicate, Ty, TyCtxt, TypeFoldable};
-
-use rbml;
-use rbml::leb128;
-use std::str;
-use syntax::abi;
-use syntax::ast;
-use syntax::parse::token;
-
-// Compact string representation for Ty values. API TyStr &
-// parse_from_str. Extra parameters are for converting to/from def_ids in the
-// data buffer. Whatever format you choose should not contain pipe characters.
-
-pub type DefIdConvert<'a> = &'a mut FnMut(DefId) -> DefId;
-
-pub struct TyDecoder<'a, 'tcx: 'a> {
- data: &'a [u8],
- krate: ast::CrateNum,
- pos: usize,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- conv_def_id: DefIdConvert<'a>,
-}
-
-impl<'a,'tcx> TyDecoder<'a,'tcx> {
- pub fn with_doc(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- crate_num: ast::CrateNum,
- doc: rbml::Doc<'a>,
- conv: DefIdConvert<'a>)
- -> TyDecoder<'a,'tcx> {
- TyDecoder::new(doc.data, crate_num, doc.start, tcx, conv)
- }
-
- pub fn new(data: &'a [u8],
- crate_num: ast::CrateNum,
- pos: usize,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- conv: DefIdConvert<'a>)
- -> TyDecoder<'a, 'tcx> {
- TyDecoder {
- data: data,
- krate: crate_num,
- pos: pos,
- tcx: tcx,
- conv_def_id: conv,
- }
- }
-
- pub fn position(&self) -> usize {
- self.pos
- }
-
- fn peek(&self) -> char {
- self.data[self.pos] as char
- }
-
- fn next(&mut self) -> char {
- let ch = self.data[self.pos] as char;
- self.pos = self.pos + 1;
- return ch;
- }
-
- fn next_byte(&mut self) -> u8 {
- let b = self.data[self.pos];
- self.pos = self.pos + 1;
- return b;
- }
-
- fn scan<F>(&mut self, mut is_last: F) -> &'a [u8]
- where F: FnMut(char) -> bool,
- {
- let start_pos = self.pos;
- debug!("scan: '{}' (start)", self.data[self.pos] as char);
- while !is_last(self.data[self.pos] as char) {
- self.pos += 1;
- debug!("scan: '{}'", self.data[self.pos] as char);
- }
- let end_pos = self.pos;
- self.pos += 1;
- return &self.data[start_pos..end_pos];
- }
-
- fn parse_vuint(&mut self) -> usize {
- let (value, bytes_read) = leb128::read_unsigned_leb128(self.data,
- self.pos);
- self.pos += bytes_read;
- value as usize
- }
-
- fn parse_name(&mut self, last: char) -> ast::Name {
- fn is_last(b: char, c: char) -> bool { return c == b; }
- let bytes = self.scan(|a| is_last(last, a));
- token::intern(str::from_utf8(bytes).unwrap())
- }
-
- fn parse_size(&mut self) -> Option<usize> {
- assert_eq!(self.next(), '/');
-
- if self.peek() == '|' {
- assert_eq!(self.next(), '|');
- None
- } else {
- let n = self.parse_uint();
- assert_eq!(self.next(), '|');
- Some(n)
- }
- }
-
- pub fn parse_substs(&mut self) -> &'tcx Substs<'tcx> {
- let mut params = vec![];
- assert_eq!(self.next(), '[');
- while self.peek() != ']' {
- let k = match self.next() {
- 'r' => Kind::from(self.parse_region()),
- 't' => Kind::from(self.parse_ty()),
- _ => bug!()
- };
- params.push(k);
- }
- assert_eq!(self.next(), ']');
-
- Substs::new(self.tcx, params)
- }
-
- pub fn parse_generics(&mut self) -> &'tcx ty::Generics<'tcx> {
- let parent = self.parse_opt(|this| this.parse_def());
- let parent_regions = self.parse_u32();
- assert_eq!(self.next(), '|');
- let parent_types = self.parse_u32();
-
- let mut regions = vec![];
- let mut types = vec![];
- assert_eq!(self.next(), '[');
- while self.peek() != '|' {
- regions.push(self.parse_region_param_def());
- }
- assert_eq!(self.next(), '|');
- while self.peek() != ']' {
- types.push(self.parse_type_param_def());
- }
- assert_eq!(self.next(), ']');
-
- self.tcx.alloc_generics(ty::Generics {
- parent: parent,
- parent_regions: parent_regions,
- parent_types: parent_types,
- regions: regions,
- types: types,
- has_self: self.next() == 'S'
- })
- }
-
- fn parse_bound_region(&mut self) -> ty::BoundRegion {
- match self.next() {
- 'a' => {
- let id = self.parse_u32();
- assert_eq!(self.next(), '|');
- ty::BrAnon(id)
- }
- '[' => {
- let def = self.parse_def();
- let name = token::intern(&self.parse_str('|'));
- let issue32330 = match self.next() {
- 'n' => {
- assert_eq!(self.next(), ']');
- ty::Issue32330::WontChange
- }
- 'y' => {
- ty::Issue32330::WillChange {
- fn_def_id: self.parse_def(),
- region_name: token::intern(&self.parse_str(']')),
- }
- }
- c => panic!("expected n or y not {}", c)
- };
- ty::BrNamed(def, name, issue32330)
- }
- 'f' => {
- let id = self.parse_u32();
- assert_eq!(self.next(), '|');
- ty::BrFresh(id)
- }
- 'e' => ty::BrEnv,
- _ => bug!("parse_bound_region: bad input")
- }
- }
-
- pub fn parse_region(&mut self) -> &'tcx ty::Region {
- self.tcx.mk_region(match self.next() {
- 'b' => {
- assert_eq!(self.next(), '[');
- let id = ty::DebruijnIndex::new(self.parse_u32());
- assert_eq!(self.next(), '|');
- let br = self.parse_bound_region();
- assert_eq!(self.next(), ']');
- ty::ReLateBound(id, br)
- }
- 'B' => {
- assert_eq!(self.next(), '[');
- let index = self.parse_u32();
- assert_eq!(self.next(), '|');
- let name = token::intern(&self.parse_str(']'));
- ty::ReEarlyBound(ty::EarlyBoundRegion {
- index: index,
- name: name
- })
- }
- 'f' => {
- assert_eq!(self.next(), '[');
- let scope = self.parse_scope();
- assert_eq!(self.next(), '|');
- let br = self.parse_bound_region();
- assert_eq!(self.next(), ']');
- ty::ReFree(ty::FreeRegion { scope: scope,
- bound_region: br})
- }
- 's' => {
- let scope = self.parse_scope();
- assert_eq!(self.next(), '|');
- ty::ReScope(scope)
- }
- 't' => ty::ReStatic,
- 'e' => ty::ReEmpty,
- 'E' => ty::ReErased,
- _ => bug!("parse_region: bad input")
- })
- }
-
- fn parse_scope(&mut self) -> region::CodeExtent {
- self.tcx.region_maps.bogus_code_extent(match self.next() {
- // This creates scopes with the wrong NodeId. This isn't
- // actually a problem because scopes only exist *within*
- // functions, and functions aren't loaded until trans which
- // doesn't care about regions.
- //
- // May still be worth fixing though.
- 'C' => {
- assert_eq!(self.next(), '[');
- let fn_id = self.parse_uint() as ast::NodeId;
- assert_eq!(self.next(), '|');
- let body_id = self.parse_uint() as ast::NodeId;
- assert_eq!(self.next(), ']');
- region::CodeExtentData::CallSiteScope {
- fn_id: fn_id, body_id: body_id
- }
- }
- // This creates scopes with the wrong NodeId. (See note above.)
- 'P' => {
- assert_eq!(self.next(), '[');
- let fn_id = self.parse_uint() as ast::NodeId;
- assert_eq!(self.next(), '|');
- let body_id = self.parse_uint() as ast::NodeId;
- assert_eq!(self.next(), ']');
- region::CodeExtentData::ParameterScope {
- fn_id: fn_id, body_id: body_id
- }
- }
- 'M' => {
- let node_id = self.parse_uint() as ast::NodeId;
- region::CodeExtentData::Misc(node_id)
- }
- 'D' => {
- let node_id = self.parse_uint() as ast::NodeId;
- region::CodeExtentData::DestructionScope(node_id)
- }
- 'B' => {
- assert_eq!(self.next(), '[');
- let node_id = self.parse_uint() as ast::NodeId;
- assert_eq!(self.next(), '|');
- let first_stmt_index = self.parse_u32();
- assert_eq!(self.next(), ']');
- let block_remainder = region::BlockRemainder {
- block: node_id, first_statement_index: first_stmt_index,
- };
- region::CodeExtentData::Remainder(block_remainder)
- }
- _ => bug!("parse_scope: bad input")
- })
- }
-
- fn parse_opt<T, F>(&mut self, f: F) -> Option<T>
- where F: FnOnce(&mut TyDecoder<'a, 'tcx>) -> T,
- {
- match self.next() {
- 'n' => None,
- 's' => Some(f(self)),
- _ => bug!("parse_opt: bad input")
- }
- }
-
- fn parse_str(&mut self, term: char) -> String {
- let mut result = String::new();
- while self.peek() != term {
- unsafe {
- result.as_mut_vec().extend_from_slice(&[self.next_byte()])
- }
- }
- self.next();
- result
- }
-
- pub fn parse_trait_ref(&mut self) -> ty::TraitRef<'tcx> {
- ty::TraitRef {
- def_id: self.parse_def(),
- substs: self.parse_substs()
- }
- }
-
- pub fn parse_existential_trait_ref(&mut self) -> ty::ExistentialTraitRef<'tcx> {
- ty::ExistentialTraitRef {
- def_id: self.parse_def(),
- substs: self.parse_substs()
- }
- }
-
- pub fn parse_ty(&mut self) -> Ty<'tcx> {
- let tcx = self.tcx;
- match self.next() {
- 'b' => return tcx.types.bool,
- '!' => return tcx.types.never,
- 'i' => { /* eat the s of is */ self.next(); return tcx.types.isize },
- 'u' => { /* eat the s of us */ self.next(); return tcx.types.usize },
- 'M' => {
- match self.next() {
- 'b' => return tcx.types.u8,
- 'w' => return tcx.types.u16,
- 'l' => return tcx.types.u32,
- 'd' => return tcx.types.u64,
- 'B' => return tcx.types.i8,
- 'W' => return tcx.types.i16,
- 'L' => return tcx.types.i32,
- 'D' => return tcx.types.i64,
- 'f' => return tcx.types.f32,
- 'F' => return tcx.types.f64,
- _ => bug!("parse_ty: bad numeric type")
- }
- }
- 'c' => return tcx.types.char,
- 'x' => {
- assert_eq!(self.next(), '[');
- let trait_ref = ty::Binder(self.parse_existential_trait_ref());
- let builtin_bounds = self.parse_builtin_bounds();
- let region_bound = self.parse_region();
- let mut projection_bounds = Vec::new();
-
- loop {
- match self.next() {
- 'P' => {
- let bound = self.parse_existential_projection();
- projection_bounds.push(ty::Binder(bound));
- }
- '.' => { break; }
- c => {
- bug!("parse_bounds: bad bounds ('{}')", c)
- }
- }
- }
- assert_eq!(self.next(), ']');
- return tcx.mk_trait(ty::TraitObject {
- principal: trait_ref,
- region_bound: region_bound,
- builtin_bounds: builtin_bounds,
- projection_bounds: projection_bounds
- });
- }
- 'p' => {
- assert_eq!(self.next(), '[');
- let index = self.parse_u32();
- assert_eq!(self.next(), '|');
- let name = token::intern(&self.parse_str(']'));
- return tcx.mk_param(index, name);
- }
- '~' => return tcx.mk_box(self.parse_ty()),
- '*' => return tcx.mk_ptr(self.parse_mt()),
- '&' => {
- return tcx.mk_ref(self.parse_region(), self.parse_mt());
- }
- 'V' => {
- let t = self.parse_ty();
- return match self.parse_size() {
- Some(n) => tcx.mk_array(t, n),
- None => tcx.mk_slice(t)
- };
- }
- 'v' => {
- return tcx.mk_str();
- }
- 'T' => {
- assert_eq!(self.next(), '[');
- let mut params = Vec::new();
- while self.peek() != ']' { params.push(self.parse_ty()); }
- self.pos = self.pos + 1;
- return tcx.mk_tup(params);
- }
- 'F' => {
- let def_id = self.parse_def();
- let substs = self.parse_substs();
- return tcx.mk_fn_def(def_id, substs, self.parse_bare_fn_ty());
- }
- 'G' => {
- return tcx.mk_fn_ptr(self.parse_bare_fn_ty());
- }
- '#' => {
- // This is a hacky little caching scheme. The idea is that if we encode
- // the same type twice, the second (and third, and fourth...) time we will
- // just write `#123`, where `123` is the offset in the metadata of the
- // first appearance. Now when we are *decoding*, if we see a `#123`, we
- // can first check a cache (`tcx.rcache`) for that offset. If we find something,
- // we return it (modulo closure types, see below). But if not, then we
- // jump to offset 123 and read the type from there.
-
- let pos = self.parse_vuint();
- let key = ty::CReaderCacheKey { cnum: self.krate, pos: pos };
- if let Some(tt) = tcx.rcache.borrow().get(&key).cloned() {
- // If there is a closure buried in the type some where, then we
- // need to re-convert any def ids (see case 'k', below). That means
- // we can't reuse the cached version.
- if !tt.has_closure_types() {
- return tt;
- }
- }
-
- let mut substate = TyDecoder::new(self.data,
- self.krate,
- pos,
- self.tcx,
- self.conv_def_id);
- let tt = substate.parse_ty();
- tcx.rcache.borrow_mut().insert(key, tt);
- return tt;
- }
- '\"' => {
- let _ = self.parse_def();
- let inner = self.parse_ty();
- inner
- }
- 'a' => {
- assert_eq!(self.next(), '[');
- let did = self.parse_def();
- let substs = self.parse_substs();
- assert_eq!(self.next(), ']');
- let def = self.tcx.lookup_adt_def(did);
- return self.tcx.mk_adt(def, substs);
- }
- 'k' => {
- assert_eq!(self.next(), '[');
- let did = self.parse_def();
- let substs = self.parse_substs();
- let mut tys = vec![];
- while self.peek() != '.' {
- tys.push(self.parse_ty());
- }
- assert_eq!(self.next(), '.');
- assert_eq!(self.next(), ']');
- return self.tcx.mk_closure(did, substs, tys);
- }
- 'P' => {
- assert_eq!(self.next(), '[');
- let trait_ref = self.parse_trait_ref();
- let name = token::intern(&self.parse_str(']'));
- return tcx.mk_projection(trait_ref, name);
- }
- 'A' => {
- assert_eq!(self.next(), '[');
- let def_id = self.parse_def();
- let substs = self.parse_substs();
- assert_eq!(self.next(), ']');
- return self.tcx.mk_anon(def_id, substs);
- }
- 'e' => {
- return tcx.types.err;
- }
- c => { bug!("unexpected char in type string: {}", c);}
- }
- }
-
- fn parse_mutability(&mut self) -> hir::Mutability {
- match self.peek() {
- 'm' => { self.next(); hir::MutMutable }
- _ => { hir::MutImmutable }
- }
- }
-
- fn parse_mt(&mut self) -> ty::TypeAndMut<'tcx> {
- let m = self.parse_mutability();
- ty::TypeAndMut { ty: self.parse_ty(), mutbl: m }
- }
-
- fn parse_def(&mut self) -> DefId {
- let def_id = parse_defid(self.scan(|c| c == '|'));
- return (self.conv_def_id)(def_id);
- }
-
- fn parse_uint(&mut self) -> usize {
- let mut n = 0;
- loop {
- let cur = self.peek();
- if cur < '0' || cur > '9' { return n; }
- self.pos = self.pos + 1;
- n *= 10;
- n += (cur as usize) - ('0' as usize);
- };
- }
-
- fn parse_u32(&mut self) -> u32 {
- let n = self.parse_uint();
- let m = n as u32;
- assert_eq!(m as usize, n);
- m
- }
-
- fn parse_abi_set(&mut self) -> abi::Abi {
- assert_eq!(self.next(), '[');
- let bytes = self.scan(|c| c == ']');
- let abi_str = str::from_utf8(bytes).unwrap();
- abi::lookup(&abi_str[..]).expect(abi_str)
- }
-
- pub fn parse_closure_ty(&mut self) -> ty::ClosureTy<'tcx> {
- let unsafety = parse_unsafety(self.next());
- let sig = self.parse_sig();
- let abi = self.parse_abi_set();
- ty::ClosureTy {
- unsafety: unsafety,
- sig: sig,
- abi: abi,
- }
- }
-
- pub fn parse_bare_fn_ty(&mut self) -> &'tcx ty::BareFnTy<'tcx> {
- let unsafety = parse_unsafety(self.next());
- let abi = self.parse_abi_set();
- let sig = self.parse_sig();
- self.tcx.mk_bare_fn(ty::BareFnTy {
- unsafety: unsafety,
- abi: abi,
- sig: sig
- })
- }
-
- fn parse_sig(&mut self) -> ty::PolyFnSig<'tcx> {
- assert_eq!(self.next(), '[');
- let mut inputs = Vec::new();
- while self.peek() != ']' {
- inputs.push(self.parse_ty());
- }
- self.pos += 1; // eat the ']'
- let variadic = match self.next() {
- 'V' => true,
- 'N' => false,
- r => bug!("bad variadic: {}", r),
- };
- let output = self.parse_ty();
- ty::Binder(ty::FnSig {inputs: inputs,
- output: output,
- variadic: variadic})
- }
-
- pub fn parse_predicate(&mut self) -> ty::Predicate<'tcx> {
- match self.next() {
- 't' => ty::Binder(self.parse_trait_ref()).to_predicate(),
- 'e' => ty::Binder(ty::EquatePredicate(self.parse_ty(),
- self.parse_ty())).to_predicate(),
- 'r' => ty::Binder(ty::OutlivesPredicate(self.parse_region(),
- self.parse_region())).to_predicate(),
- 'o' => ty::Binder(ty::OutlivesPredicate(self.parse_ty(),
- self.parse_region())).to_predicate(),
- 'p' => ty::Binder(self.parse_projection_predicate()).to_predicate(),
- 'w' => ty::Predicate::WellFormed(self.parse_ty()),
- 'O' => {
- let def_id = self.parse_def();
- assert_eq!(self.next(), '|');
- ty::Predicate::ObjectSafe(def_id)
- }
- 'c' => {
- let def_id = self.parse_def();
- assert_eq!(self.next(), '|');
- let kind = match self.next() {
- 'f' => ty::ClosureKind::Fn,
- 'm' => ty::ClosureKind::FnMut,
- 'o' => ty::ClosureKind::FnOnce,
- c => bug!("Encountered invalid character in metadata: {}", c)
- };
- assert_eq!(self.next(), '|');
- ty::Predicate::ClosureKind(def_id, kind)
- }
- c => bug!("Encountered invalid character in metadata: {}", c)
- }
- }
-
- fn parse_projection_predicate(&mut self) -> ty::ProjectionPredicate<'tcx> {
- ty::ProjectionPredicate {
- projection_ty: ty::ProjectionTy {
- trait_ref: self.parse_trait_ref(),
- item_name: token::intern(&self.parse_str('|')),
- },
- ty: self.parse_ty(),
- }
- }
-
- fn parse_existential_projection(&mut self) -> ty::ExistentialProjection<'tcx> {
- ty::ExistentialProjection {
- trait_ref: self.parse_existential_trait_ref(),
- item_name: token::intern(&self.parse_str('|')),
- ty: self.parse_ty(),
- }
- }
-
- fn parse_type_param_def(&mut self) -> ty::TypeParameterDef<'tcx> {
- let name = self.parse_name(':');
- let def_id = self.parse_def();
- let index = self.parse_u32();
- assert_eq!(self.next(), '|');
- let default_def_id = self.parse_def();
- let default = self.parse_opt(|this| this.parse_ty());
- let object_lifetime_default = self.parse_object_lifetime_default();
-
- ty::TypeParameterDef {
- name: name,
- def_id: def_id,
- index: index,
- default_def_id: default_def_id,
- default: default,
- object_lifetime_default: object_lifetime_default,
- }
- }
-
- fn parse_region_param_def(&mut self) -> ty::RegionParameterDef<'tcx> {
- let name = self.parse_name(':');
- let def_id = self.parse_def();
- let index = self.parse_u32();
- assert_eq!(self.next(), '|');
- let mut bounds = vec![];
- loop {
- match self.next() {
- 'R' => bounds.push(self.parse_region()),
- '.' => { break; }
- c => {
- bug!("parse_region_param_def: bad bounds ('{}')", c)
- }
- }
- }
- ty::RegionParameterDef {
- name: name,
- def_id: def_id,
- index: index,
- bounds: bounds,
- }
- }
-
-
- fn parse_object_lifetime_default(&mut self) -> ty::ObjectLifetimeDefault<'tcx> {
- match self.next() {
- 'a' => ty::ObjectLifetimeDefault::Ambiguous,
- 'b' => ty::ObjectLifetimeDefault::BaseDefault,
- 's' => {
- let region = self.parse_region();
- ty::ObjectLifetimeDefault::Specific(region)
- }
- _ => bug!("parse_object_lifetime_default: bad input")
- }
- }
-
- fn parse_builtin_bounds(&mut self) -> ty::BuiltinBounds {
- let mut builtin_bounds = ty::BuiltinBounds::empty();
- loop {
- match self.next() {
- 'S' => {
- builtin_bounds.insert(ty::BoundSend);
- }
- 'Z' => {
- builtin_bounds.insert(ty::BoundSized);
- }
- 'P' => {
- builtin_bounds.insert(ty::BoundCopy);
- }
- 'T' => {
- builtin_bounds.insert(ty::BoundSync);
- }
- '.' => {
- return builtin_bounds;
- }
- c => {
- bug!("parse_bounds: bad builtin bounds ('{}')", c)
- }
- }
- }
- }
-}
-
-// Rust metadata parsing
-fn parse_defid(buf: &[u8]) -> DefId {
- let mut colon_idx = 0;
- let len = buf.len();
- while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1; }
- if colon_idx == len {
- error!("didn't find ':' when parsing def id");
- bug!();
- }
-
- let crate_part = &buf[0..colon_idx];
- let def_part = &buf[colon_idx + 1..len];
-
- let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| {
- s.parse::<usize>().ok()
- }) {
- Some(cn) => cn as ast::CrateNum,
- None => bug!("internal error: parse_defid: crate number expected, found {:?}",
- crate_part)
- };
- let def_num = match str::from_utf8(def_part).ok().and_then(|s| {
- s.parse::<usize>().ok()
- }) {
- Some(dn) => dn,
- None => bug!("internal error: parse_defid: id expected, found {:?}",
- def_part)
- };
- let index = DefIndex::new(def_num);
- DefId { krate: crate_num, index: index }
-}
-
-fn parse_unsafety(c: char) -> hir::Unsafety {
- match c {
- 'u' => hir::Unsafety::Unsafe,
- 'n' => hir::Unsafety::Normal,
- _ => bug!("parse_unsafety: bad unsafety {}", c)
- }
-}
+++ /dev/null
-// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Type encoding
-
-#![allow(unused_must_use)] // as with encoding, everything is a no-fail MemWriter
-#![allow(non_camel_case_types)]
-
-use std::cell::RefCell;
-use std::io::Cursor;
-use std::io::prelude::*;
-
-use rustc::hir::def_id::DefId;
-use middle::region;
-use rustc::ty::subst::Substs;
-use rustc::ty::{self, Ty, TyCtxt};
-use rustc::util::nodemap::FnvHashMap;
-
-use rustc::hir;
-
-use syntax::abi::Abi;
-use syntax::ast;
-use errors::Handler;
-
-use rbml::leb128;
-use encoder;
-
-pub struct ctxt<'a, 'tcx: 'a> {
- pub diag: &'a Handler,
- // Def -> str Callback:
- pub ds: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String,
- // The type context.
- pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
- pub abbrevs: &'a abbrev_map<'tcx>
-}
-
-impl<'a, 'tcx> encoder::EncodeContext<'a, 'tcx> {
- pub fn ty_str_ctxt<'b>(&'b self) -> ctxt<'b, 'tcx> {
- ctxt {
- diag: self.tcx.sess.diagnostic(),
- ds: encoder::def_to_string,
- tcx: self.tcx,
- abbrevs: &self.type_abbrevs
- }
- }
-}
-
-// Compact string representation for Ty values. API TyStr & parse_from_str.
-// Extra parameters are for converting to/from def_ids in the string rep.
-// Whatever format you choose should not contain pipe characters.
-pub struct ty_abbrev {
- s: Vec<u8>
-}
-
-pub type abbrev_map<'tcx> = RefCell<FnvHashMap<Ty<'tcx>, ty_abbrev>>;
-
-pub fn enc_ty<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>, t: Ty<'tcx>) {
- if let Some(a) = cx.abbrevs.borrow_mut().get(&t) {
- w.write_all(&a.s);
- return;
- }
-
- let pos = w.position();
-
- match t.sty {
- ty::TyBool => { write!(w, "b"); }
- ty::TyChar => { write!(w, "c"); }
- ty::TyNever => { write!(w, "!"); }
- ty::TyInt(t) => {
- match t {
- ast::IntTy::Is => write!(w, "is"),
- ast::IntTy::I8 => write!(w, "MB"),
- ast::IntTy::I16 => write!(w, "MW"),
- ast::IntTy::I32 => write!(w, "ML"),
- ast::IntTy::I64 => write!(w, "MD")
- };
- }
- ty::TyUint(t) => {
- match t {
- ast::UintTy::Us => write!(w, "us"),
- ast::UintTy::U8 => write!(w, "Mb"),
- ast::UintTy::U16 => write!(w, "Mw"),
- ast::UintTy::U32 => write!(w, "Ml"),
- ast::UintTy::U64 => write!(w, "Md")
- };
- }
- ty::TyFloat(t) => {
- match t {
- ast::FloatTy::F32 => write!(w, "Mf"),
- ast::FloatTy::F64 => write!(w, "MF"),
- };
- }
- ty::TyTrait(ref obj) => {
- write!(w, "x[");
- enc_existential_trait_ref(w, cx, obj.principal.0);
- enc_builtin_bounds(w, cx, &obj.builtin_bounds);
-
- enc_region(w, cx, obj.region_bound);
-
- for tp in &obj.projection_bounds {
- write!(w, "P");
- enc_existential_projection(w, cx, &tp.0);
- }
-
- write!(w, ".");
- write!(w, "]");
- }
- ty::TyTuple(ts) => {
- write!(w, "T[");
- for t in ts { enc_ty(w, cx, *t); }
- write!(w, "]");
- }
- ty::TyBox(typ) => { write!(w, "~"); enc_ty(w, cx, typ); }
- ty::TyRawPtr(mt) => { write!(w, "*"); enc_mt(w, cx, mt); }
- ty::TyRef(r, mt) => {
- write!(w, "&");
- enc_region(w, cx, r);
- enc_mt(w, cx, mt);
- }
- ty::TyArray(t, sz) => {
- write!(w, "V");
- enc_ty(w, cx, t);
- write!(w, "/{}|", sz);
- }
- ty::TySlice(t) => {
- write!(w, "V");
- enc_ty(w, cx, t);
- write!(w, "/|");
- }
- ty::TyStr => {
- write!(w, "v");
- }
- ty::TyFnDef(def_id, substs, f) => {
- write!(w, "F");
- write!(w, "{}|", (cx.ds)(cx.tcx, def_id));
- enc_substs(w, cx, substs);
- enc_bare_fn_ty(w, cx, f);
- }
- ty::TyFnPtr(f) => {
- write!(w, "G");
- enc_bare_fn_ty(w, cx, f);
- }
- ty::TyInfer(_) => {
- bug!("cannot encode inference variable types");
- }
- ty::TyParam(p) => {
- write!(w, "p[{}|{}]", p.idx, p.name);
- }
- ty::TyAdt(def, substs) => {
- write!(w, "a[{}|", (cx.ds)(cx.tcx, def.did));
- enc_substs(w, cx, substs);
- write!(w, "]");
- }
- ty::TyClosure(def, substs) => {
- write!(w, "k[{}|", (cx.ds)(cx.tcx, def));
- enc_substs(w, cx, substs.func_substs);
- for ty in substs.upvar_tys {
- enc_ty(w, cx, ty);
- }
- write!(w, ".");
- write!(w, "]");
- }
- ty::TyProjection(ref data) => {
- write!(w, "P[");
- enc_trait_ref(w, cx, data.trait_ref);
- write!(w, "{}]", data.item_name);
- }
- ty::TyAnon(def_id, substs) => {
- write!(w, "A[{}|", (cx.ds)(cx.tcx, def_id));
- enc_substs(w, cx, substs);
- write!(w, "]");
- }
- ty::TyError => {
- write!(w, "e");
- }
- }
-
- let end = w.position();
- let len = end - pos;
-
- let mut abbrev = Cursor::new(Vec::with_capacity(16));
- abbrev.write_all(b"#");
- {
- let start_position = abbrev.position() as usize;
- let bytes_written = leb128::write_unsigned_leb128(abbrev.get_mut(),
- start_position,
- pos);
- abbrev.set_position((start_position + bytes_written) as u64);
- }
-
- cx.abbrevs.borrow_mut().insert(t, ty_abbrev {
- s: if abbrev.position() < len {
- abbrev.get_ref()[..abbrev.position() as usize].to_owned()
- } else {
- // if the abbreviation is longer than the real type,
- // don't use #-notation. However, insert it here so
- // other won't have to `mark_stable_position`
- w.get_ref()[pos as usize .. end as usize].to_owned()
- }
- });
-}
-
-fn enc_mutability(w: &mut Cursor<Vec<u8>>, mt: hir::Mutability) {
- match mt {
- hir::MutImmutable => (),
- hir::MutMutable => {
- write!(w, "m");
- }
- };
-}
-
-fn enc_mt<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
- mt: ty::TypeAndMut<'tcx>) {
- enc_mutability(w, mt.mutbl);
- enc_ty(w, cx, mt.ty);
-}
-
-fn enc_opt<T, F>(w: &mut Cursor<Vec<u8>>, t: Option<T>, enc_f: F) where
- F: FnOnce(&mut Cursor<Vec<u8>>, T),
-{
- match t {
- None => {
- write!(w, "n");
- }
- Some(v) => {
- write!(w, "s");
- enc_f(w, v);
- }
- }
-}
-
-pub fn enc_substs<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
- substs: &Substs<'tcx>) {
- write!(w, "[");
- for &k in substs.params() {
- if let Some(ty) = k.as_type() {
- write!(w, "t");
- enc_ty(w, cx, ty);
- } else if let Some(r) = k.as_region() {
- write!(w, "r");
- enc_region(w, cx, r);
- } else {
- bug!()
- }
- }
- write!(w, "]");
-}
-
-pub fn enc_generics<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
- generics: &ty::Generics<'tcx>) {
- enc_opt(w, generics.parent, |w, def_id| {
- write!(w, "{}|", (cx.ds)(cx.tcx, def_id));
- });
- write!(w, "{}|{}[",
- generics.parent_regions,
- generics.parent_types);
-
- for r in &generics.regions {
- enc_region_param_def(w, cx, r)
- }
- write!(w, "|");
- for t in &generics.types {
- enc_type_param_def(w, cx, t);
- }
- write!(w, "]");
-
- if generics.has_self {
- write!(w, "S");
- } else {
- write!(w, "N");
- }
-}
-
-pub fn enc_region(w: &mut Cursor<Vec<u8>>, cx: &ctxt, r: &ty::Region) {
- match *r {
- ty::ReLateBound(id, br) => {
- write!(w, "b[{}|", id.depth);
- enc_bound_region(w, cx, br);
- write!(w, "]");
- }
- ty::ReEarlyBound(ref data) => {
- write!(w, "B[{}|{}]",
- data.index,
- data.name);
- }
- ty::ReFree(ref fr) => {
- write!(w, "f[");
- enc_scope(w, cx, fr.scope);
- write!(w, "|");
- enc_bound_region(w, cx, fr.bound_region);
- write!(w, "]");
- }
- ty::ReScope(scope) => {
- write!(w, "s");
- enc_scope(w, cx, scope);
- write!(w, "|");
- }
- ty::ReStatic => {
- write!(w, "t");
- }
- ty::ReEmpty => {
- write!(w, "e");
- }
- ty::ReErased => {
- write!(w, "E");
- }
- ty::ReVar(_) | ty::ReSkolemized(..) => {
- // these should not crop up after typeck
- bug!("cannot encode region variables");
- }
- }
-}
-
-fn enc_scope(w: &mut Cursor<Vec<u8>>, cx: &ctxt, scope: region::CodeExtent) {
- match cx.tcx.region_maps.code_extent_data(scope) {
- region::CodeExtentData::CallSiteScope {
- fn_id, body_id } => write!(w, "C[{}|{}]", fn_id, body_id),
- region::CodeExtentData::ParameterScope {
- fn_id, body_id } => write!(w, "P[{}|{}]", fn_id, body_id),
- region::CodeExtentData::Misc(node_id) => write!(w, "M{}", node_id),
- region::CodeExtentData::Remainder(region::BlockRemainder {
- block: b, first_statement_index: i }) => write!(w, "B[{}|{}]", b, i),
- region::CodeExtentData::DestructionScope(node_id) => write!(w, "D{}", node_id),
- };
-}
-
-fn enc_bound_region(w: &mut Cursor<Vec<u8>>, cx: &ctxt, br: ty::BoundRegion) {
- match br {
- ty::BrAnon(idx) => {
- write!(w, "a{}|", idx);
- }
- ty::BrNamed(d, name, issue32330) => {
- write!(w, "[{}|{}|",
- (cx.ds)(cx.tcx, d),
- name);
-
- match issue32330 {
- ty::Issue32330::WontChange =>
- write!(w, "n]"),
- ty::Issue32330::WillChange { fn_def_id, region_name } =>
- write!(w, "y{}|{}]", (cx.ds)(cx.tcx, fn_def_id), region_name),
- };
- }
- ty::BrFresh(id) => {
- write!(w, "f{}|", id);
- }
- ty::BrEnv => {
- write!(w, "e|");
- }
- }
-}
-
-pub fn enc_trait_ref<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
- s: ty::TraitRef<'tcx>) {
- write!(w, "{}|", (cx.ds)(cx.tcx, s.def_id));
- enc_substs(w, cx, s.substs);
-}
-
-fn enc_existential_trait_ref<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
- s: ty::ExistentialTraitRef<'tcx>) {
- write!(w, "{}|", (cx.ds)(cx.tcx, s.def_id));
- enc_substs(w, cx, s.substs);
-}
-
-fn enc_unsafety(w: &mut Cursor<Vec<u8>>, p: hir::Unsafety) {
- match p {
- hir::Unsafety::Normal => write!(w, "n"),
- hir::Unsafety::Unsafe => write!(w, "u"),
- };
-}
-
-fn enc_abi(w: &mut Cursor<Vec<u8>>, abi: Abi) {
- write!(w, "[");
- write!(w, "{}", abi.name());
- write!(w, "]");
-}
-
-pub fn enc_bare_fn_ty<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
- ft: &ty::BareFnTy<'tcx>) {
- enc_unsafety(w, ft.unsafety);
- enc_abi(w, ft.abi);
- enc_fn_sig(w, cx, &ft.sig);
-}
-
-pub fn enc_closure_ty<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
- ft: &ty::ClosureTy<'tcx>) {
- enc_unsafety(w, ft.unsafety);
- enc_fn_sig(w, cx, &ft.sig);
- enc_abi(w, ft.abi);
-}
-
-fn enc_fn_sig<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
- fsig: &ty::PolyFnSig<'tcx>) {
- write!(w, "[");
- for ty in &fsig.0.inputs {
- enc_ty(w, cx, *ty);
- }
- write!(w, "]");
- if fsig.0.variadic {
- write!(w, "V");
- } else {
- write!(w, "N");
- }
- enc_ty(w, cx, fsig.0.output);
-}
-
-fn enc_builtin_bounds(w: &mut Cursor<Vec<u8>>, _cx: &ctxt, bs: &ty::BuiltinBounds) {
- for bound in bs {
- match bound {
- ty::BoundSend => write!(w, "S"),
- ty::BoundSized => write!(w, "Z"),
- ty::BoundCopy => write!(w, "P"),
- ty::BoundSync => write!(w, "T"),
- };
- }
-
- write!(w, ".");
-}
-
-fn enc_type_param_def<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
- v: &ty::TypeParameterDef<'tcx>) {
- write!(w, "{}:{}|{}|{}|",
- v.name, (cx.ds)(cx.tcx, v.def_id),
- v.index, (cx.ds)(cx.tcx, v.default_def_id));
- enc_opt(w, v.default, |w, t| enc_ty(w, cx, t));
- enc_object_lifetime_default(w, cx, v.object_lifetime_default);
-}
-
-fn enc_region_param_def(w: &mut Cursor<Vec<u8>>, cx: &ctxt,
- v: &ty::RegionParameterDef) {
- write!(w, "{}:{}|{}|",
- v.name, (cx.ds)(cx.tcx, v.def_id), v.index);
- for &r in &v.bounds {
- write!(w, "R");
- enc_region(w, cx, r);
- }
- write!(w, ".");
-}
-
-fn enc_object_lifetime_default<'a, 'tcx>(w: &mut Cursor<Vec<u8>>,
- cx: &ctxt<'a, 'tcx>,
- default: ty::ObjectLifetimeDefault)
-{
- match default {
- ty::ObjectLifetimeDefault::Ambiguous => {
- write!(w, "a");
- }
- ty::ObjectLifetimeDefault::BaseDefault => {
- write!(w, "b");
- }
- ty::ObjectLifetimeDefault::Specific(r) => {
- write!(w, "s");
- enc_region(w, cx, r);
- }
- }
-}
-
-pub fn enc_predicate<'a, 'tcx>(w: &mut Cursor<Vec<u8>>,
- cx: &ctxt<'a, 'tcx>,
- p: &ty::Predicate<'tcx>)
-{
- match *p {
- ty::Predicate::Trait(ref trait_ref) => {
- write!(w, "t");
- enc_trait_ref(w, cx, trait_ref.0.trait_ref);
- }
- ty::Predicate::Equate(ty::Binder(ty::EquatePredicate(a, b))) => {
- write!(w, "e");
- enc_ty(w, cx, a);
- enc_ty(w, cx, b);
- }
- ty::Predicate::RegionOutlives(ty::Binder(ty::OutlivesPredicate(a, b))) => {
- write!(w, "r");
- enc_region(w, cx, a);
- enc_region(w, cx, b);
- }
- ty::Predicate::TypeOutlives(ty::Binder(ty::OutlivesPredicate(a, b))) => {
- write!(w, "o");
- enc_ty(w, cx, a);
- enc_region(w, cx, b);
- }
- ty::Predicate::Projection(ty::Binder(ref data)) => {
- write!(w, "p");
- enc_trait_ref(w, cx, data.projection_ty.trait_ref);
- write!(w, "{}|", data.projection_ty.item_name);
- enc_ty(w, cx, data.ty);
- }
- ty::Predicate::WellFormed(data) => {
- write!(w, "w");
- enc_ty(w, cx, data);
- }
- ty::Predicate::ObjectSafe(trait_def_id) => {
- write!(w, "O{}|", (cx.ds)(cx.tcx, trait_def_id));
- }
- ty::Predicate::ClosureKind(closure_def_id, kind) => {
- let kind_char = match kind {
- ty::ClosureKind::Fn => 'f',
- ty::ClosureKind::FnMut => 'm',
- ty::ClosureKind::FnOnce => 'o',
- };
- write!(w, "c{}|{}|", (cx.ds)(cx.tcx, closure_def_id), kind_char);
- }
- }
-}
-
-fn enc_existential_projection<'a, 'tcx>(w: &mut Cursor<Vec<u8>>,
- cx: &ctxt<'a, 'tcx>,
- data: &ty::ExistentialProjection<'tcx>) {
- enc_existential_trait_ref(w, cx, data.trait_ref);
- write!(w, "{}|", data.item_name);
- enc_ty(w, cx, data.ty);
-}
success.and(slice.index(idx))
}
ExprKind::SelfRef => {
- block.and(Lvalue::Arg(Arg::new(0)))
+ block.and(Lvalue::Local(Local::new(1)))
}
ExprKind::VarRef { id } => {
let index = this.var_indices[&id];
- block.and(Lvalue::Var(index))
+ block.and(Lvalue::Local(index))
}
ExprKind::StaticRef { id } => {
block.and(Lvalue::Static(id))
}
ExprKind::Return { value } => {
block = match value {
- Some(value) => unpack!(this.into(&Lvalue::ReturnPointer, block, value)),
+ Some(value) => {
+ unpack!(this.into(&Lvalue::Local(RETURN_POINTER), block, value))
+ }
None => {
- this.cfg.push_assign_unit(block, source_info, &Lvalue::ReturnPointer);
+ this.cfg.push_assign_unit(block,
+ source_info,
+ &Lvalue::Local(RETURN_POINTER));
block
}
};
var,
subpattern: None, .. } => {
self.storage_live_for_bindings(block, &irrefutable_pat);
- let lvalue = Lvalue::Var(self.var_indices[&var]);
+ let lvalue = Lvalue::Local(self.var_indices[&var]);
return self.into(&lvalue, block, initializer);
}
_ => {}
pattern: &Pattern<'tcx>) {
match *pattern.kind {
PatternKind::Binding { var, ref subpattern, .. } => {
- let lvalue = Lvalue::Var(self.var_indices[&var]);
+ let lvalue = Lvalue::Local(self.var_indices[&var]);
let source_info = self.source_info(pattern.span);
self.cfg.push(block, Statement {
source_info: source_info,
let source_info = self.source_info(binding.span);
self.cfg.push(block, Statement {
source_info: source_info,
- kind: StatementKind::StorageLive(Lvalue::Var(var_index))
+ kind: StatementKind::StorageLive(Lvalue::Local(var_index))
});
self.cfg.push_assign(block, source_info,
- &Lvalue::Var(var_index), rvalue);
+ &Lvalue::Local(var_index), rvalue);
}
}
name: Name,
var_id: NodeId,
var_ty: Ty<'tcx>)
- -> Var
+ -> Local
{
debug!("declare_binding(var_id={:?}, name={:?}, var_ty={:?}, source_info={:?})",
var_id, name, var_ty, source_info);
- let var = self.var_decls.push(VarDecl::<'tcx> {
- source_info: source_info,
+ let var = self.local_decls.push(LocalDecl::<'tcx> {
mutability: mutability,
- name: name,
ty: var_ty.clone(),
+ name: Some(name),
+ source_info: Some(source_info),
});
let extent = self.extent_of_innermost_scope();
- self.schedule_drop(source_info.span, extent, &Lvalue::Var(var), var_ty);
+ self.schedule_drop(source_info.span, extent, &Lvalue::Local(var), var_ty);
self.var_indices.insert(var_id, var);
debug!("declare_binding: var={:?}", var);
/// NB: **No cleanup is scheduled for this temporary.** You should
/// call `schedule_drop` once the temporary is initialized.
pub fn temp(&mut self, ty: Ty<'tcx>) -> Lvalue<'tcx> {
- let temp = self.temp_decls.push(TempDecl { ty: ty });
- let lvalue = Lvalue::Temp(temp);
+ let temp = self.local_decls.push(LocalDecl::new_temp(ty));
+ let lvalue = Lvalue::Local(temp);
debug!("temp: created temp {:?} with type {:?}",
- lvalue, self.temp_decls[temp].ty);
+ lvalue, self.local_decls[temp].ty);
lvalue
}
cfg: CFG<'tcx>,
fn_span: Span,
+ arg_count: usize,
/// the current set of scopes, updated as we traverse;
/// see the `scope` module for more details
visibility_scopes: IndexVec<VisibilityScope, VisibilityScopeData>,
visibility_scope: VisibilityScope,
- var_decls: IndexVec<Var, VarDecl<'tcx>>,
- var_indices: NodeMap<Var>,
- temp_decls: IndexVec<Temp, TempDecl<'tcx>>,
+ /// Maps node ids of variable bindings to the `Local`s created for them.
+ var_indices: NodeMap<Local>,
+ local_decls: IndexVec<Local, LocalDecl<'tcx>>,
unit_temp: Option<Lvalue<'tcx>>,
/// cached block with the RESUME terminator; this is created
-> (Mir<'tcx>, ScopeAuxiliaryVec)
where A: Iterator<Item=(Ty<'gcx>, Option<&'gcx hir::Pat>)>
{
+ let arguments: Vec<_> = arguments.collect();
+
let tcx = hir.tcx();
let span = tcx.map.span(fn_id);
- let mut builder = Builder::new(hir, span);
+ let mut builder = Builder::new(hir, span, arguments.len(), return_ty);
let body_id = ast_block.id;
let call_site_extent =
tcx.region_maps.lookup_code_extent(
CodeExtentData::ParameterScope { fn_id: fn_id, body_id: body_id });
let mut block = START_BLOCK;
- let mut arg_decls = unpack!(block = builder.in_scope(call_site_extent, block, |builder| {
- let arg_decls = unpack!(block = builder.in_scope(arg_extent, block, |builder| {
- builder.args_and_body(block, return_ty, arguments, arg_extent, ast_block)
+ unpack!(block = builder.in_scope(call_site_extent, block, |builder| {
+ unpack!(block = builder.in_scope(arg_extent, block, |builder| {
+ builder.args_and_body(block, return_ty, &arguments, arg_extent, ast_block)
}));
let source_info = builder.source_info(span);
TerminatorKind::Goto { target: return_block });
builder.cfg.terminate(return_block, source_info,
TerminatorKind::Return);
- return_block.and(arg_decls)
+ return_block.unit()
}));
assert_eq!(block, builder.return_block());
+ let mut spread_arg = None;
match tcx.node_id_to_type(fn_id).sty {
ty::TyFnDef(_, _, f) if f.abi == Abi::RustCall => {
// RustCall pseudo-ABI untuples the last argument.
- if let Some(last_arg) = arg_decls.last() {
- arg_decls[last_arg].spread = true;
- }
+ spread_arg = Some(Local::new(arguments.len()));
}
_ => {}
}
// Gather the upvars of a closure, if any.
let upvar_decls: Vec<_> = tcx.with_freevars(fn_id, |freevars| {
freevars.iter().map(|fv| {
+ let var_id = tcx.map.as_local_node_id(fv.def.def_id()).unwrap();
let by_ref = tcx.upvar_capture(ty::UpvarId {
- var_id: fv.def.var_id(),
+ var_id: var_id,
closure_expr_id: fn_id
}).map_or(false, |capture| match capture {
ty::UpvarCapture::ByValue => false,
debug_name: keywords::Invalid.name(),
by_ref: by_ref
};
- if let Some(hir::map::NodeLocal(pat)) = tcx.map.find(fv.def.var_id()) {
+ if let Some(hir::map::NodeLocal(pat)) = tcx.map.find(var_id) {
if let hir::PatKind::Binding(_, ref ident, _) = pat.node {
decl.debug_name = ident.node;
}
}).collect()
});
- builder.finish(upvar_decls, arg_decls, return_ty)
+ let (mut mir, aux) = builder.finish(upvar_decls, return_ty);
+ mir.spread_arg = spread_arg;
+ (mir, aux)
}
pub fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
ast_expr: &'tcx hir::Expr)
-> (Mir<'tcx>, ScopeAuxiliaryVec) {
let tcx = hir.tcx();
+ let ty = tcx.expr_ty_adjusted(ast_expr);
let span = tcx.map.span(item_id);
- let mut builder = Builder::new(hir, span);
+ let mut builder = Builder::new(hir, span, 0, ty);
let extent = tcx.region_maps.temporary_scope(ast_expr.id)
.unwrap_or(ROOT_CODE_EXTENT);
let mut block = START_BLOCK;
let _ = builder.in_scope(extent, block, |builder| {
let expr = builder.hir.mirror(ast_expr);
- unpack!(block = builder.into(&Lvalue::ReturnPointer, block, expr));
+ unpack!(block = builder.into(&Lvalue::Local(RETURN_POINTER), block, expr));
let source_info = builder.source_info(span);
let return_block = builder.return_block();
return_block.unit()
});
- let ty = tcx.expr_ty_adjusted(ast_expr);
- builder.finish(vec![], IndexVec::new(), ty)
+ builder.finish(vec![], ty)
}
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
- fn new(hir: Cx<'a, 'gcx, 'tcx>, span: Span) -> Builder<'a, 'gcx, 'tcx> {
+ fn new(hir: Cx<'a, 'gcx, 'tcx>,
+ span: Span,
+ arg_count: usize,
+ return_ty: Ty<'tcx>)
+ -> Builder<'a, 'gcx, 'tcx> {
let mut builder = Builder {
hir: hir,
cfg: CFG { basic_blocks: IndexVec::new() },
fn_span: span,
+ arg_count: arg_count,
scopes: vec![],
visibility_scopes: IndexVec::new(),
visibility_scope: ARGUMENT_VISIBILITY_SCOPE,
scope_auxiliary: IndexVec::new(),
loop_scopes: vec![],
- temp_decls: IndexVec::new(),
- var_decls: IndexVec::new(),
+ local_decls: IndexVec::from_elem_n(LocalDecl::new_return_pointer(return_ty), 1),
var_indices: NodeMap(),
unit_temp: None,
cached_resume_block: None,
fn finish(self,
upvar_decls: Vec<UpvarDecl>,
- arg_decls: IndexVec<Arg, ArgDecl<'tcx>>,
return_ty: Ty<'tcx>)
-> (Mir<'tcx>, ScopeAuxiliaryVec) {
for (index, block) in self.cfg.basic_blocks.iter().enumerate() {
self.visibility_scopes,
IndexVec::new(),
return_ty,
- self.var_decls,
- arg_decls,
- self.temp_decls,
+ self.local_decls,
+ self.arg_count,
upvar_decls,
self.fn_span
), self.scope_auxiliary)
}
- fn args_and_body<A>(&mut self,
- mut block: BasicBlock,
- return_ty: Ty<'tcx>,
- arguments: A,
- argument_extent: CodeExtent,
- ast_block: &'gcx hir::Block)
- -> BlockAnd<IndexVec<Arg, ArgDecl<'tcx>>>
- where A: Iterator<Item=(Ty<'gcx>, Option<&'gcx hir::Pat>)>
+ fn args_and_body(&mut self,
+ mut block: BasicBlock,
+ return_ty: Ty<'tcx>,
+ arguments: &[(Ty<'gcx>, Option<&'gcx hir::Pat>)],
+ argument_extent: CodeExtent,
+ ast_block: &'gcx hir::Block)
+ -> BlockAnd<()>
{
- // to start, translate the argument patterns and collect the argument types.
+ // Allocate locals for the function arguments
+ for &(ty, pattern) in arguments.iter() {
+ // If this is a simple binding pattern, give the local a nice name for debuginfo.
+ let mut name = None;
+ if let Some(pat) = pattern {
+ if let hir::PatKind::Binding(_, ref ident, _) = pat.node {
+ name = Some(ident.node);
+ }
+ }
+
+ self.local_decls.push(LocalDecl {
+ mutability: Mutability::Not,
+ ty: ty,
+ source_info: None,
+ name: name,
+ });
+ }
+
let mut scope = None;
- let arg_decls = arguments.enumerate().map(|(index, (ty, pattern))| {
- let lvalue = Lvalue::Arg(Arg::new(index));
+ // Bind the argument patterns
+ for (index, &(ty, pattern)) in arguments.iter().enumerate() {
+ // Function arguments always get the first Local indices after the return pointer
+ let lvalue = Lvalue::Local(Local::new(index + 1));
+
if let Some(pattern) = pattern {
let pattern = self.hir.irrefutable_pat(pattern);
scope = self.declare_bindings(scope, ast_block.span, &pattern);
self.schedule_drop(pattern.as_ref().map_or(ast_block.span, |pat| pat.span),
argument_extent, &lvalue, ty);
- let mut name = keywords::Invalid.name();
- if let Some(pat) = pattern {
- if let hir::PatKind::Binding(_, ref ident, _) = pat.node {
- name = ident.node;
- }
- }
-
- ArgDecl {
- ty: ty,
- spread: false,
- debug_name: name
- }
- }).collect();
+ }
// Enter the argument pattern bindings visibility scope, if it exists.
if let Some(visibility_scope) = scope {
// FIXME(#32959): temporary hack for the issue at hand
let return_is_unit = return_ty.is_nil();
// start the first basic block and translate the body
- unpack!(block = self.ast_block(&Lvalue::ReturnPointer, return_is_unit, block, ast_block));
+ unpack!(block = self.ast_block(&Lvalue::Local(RETURN_POINTER),
+ return_is_unit, block, ast_block));
- block.and(arg_decls)
+ block.unit()
}
fn get_unit_temp(&mut self) -> Lvalue<'tcx> {
For now, we keep a mapping from each `CodeExtent` to its
corresponding SEME region for later reference (see caveat in next
paragraph). This is because region scopes are tied to
-them. Eventually, when we shift to non-lexical lifetimes, three should
+them. Eventually, when we shift to non-lexical lifetimes, there should
be no need to remember this mapping.
There is one additional wrinkle, actually, that I wanted to hide from
early exit occurs, the method `exit_scope` is called. It is given the
current point in execution where the early exit occurs, as well as the
scope you want to branch to (note that all early exits from to some
-other enclosing scope). `exit_scope` will record thid exit point and
+other enclosing scope). `exit_scope` will record this exit point and
also add all drops.
Panics are handled in a similar fashion, except that a panic always
self.diverge_cleanup();
let scope = self.scopes.pop().unwrap();
assert_eq!(scope.extent, extent);
- unpack!(block = build_scope_drops(&mut self.cfg, &scope, &self.scopes, block));
+ unpack!(block = build_scope_drops(&mut self.cfg,
+ &scope,
+ &self.scopes,
+ block,
+ self.arg_count));
self.scope_auxiliary[scope.id]
.postdoms
.push(self.cfg.current_location(block));
scope.cached_exits.insert((target, extent), b);
b
};
- unpack!(block = build_scope_drops(&mut self.cfg, scope, rest, block));
+ unpack!(block = build_scope_drops(&mut self.cfg,
+ scope,
+ rest,
+ block,
+ self.arg_count));
if let Some(ref free_data) = scope.free {
let next = self.cfg.start_new_block();
let free = build_free(self.hir.tcx(), &tmp, free_data, next);
} else {
// Only temps and vars need their storage dead.
match *lvalue {
- Lvalue::Temp(_) | Lvalue::Var(_) => DropKind::Storage,
+ Lvalue::Local(index) if index.index() > self.arg_count => DropKind::Storage,
_ => return
}
};
fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>,
scope: &Scope<'tcx>,
earlier_scopes: &[Scope<'tcx>],
- mut block: BasicBlock)
+ mut block: BasicBlock,
+ arg_count: usize)
-> BlockAnd<()> {
let mut iter = scope.drops.iter().rev().peekable();
while let Some(drop_data) = iter.next() {
DropKind::Storage => {
// Only temps and vars need their storage dead.
match drop_data.location {
- Lvalue::Temp(_) | Lvalue::Var(_) => {}
+ Lvalue::Local(index) if index.index() > arg_count => {}
_ => continue
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Def-use analysis.
+
+use rustc::mir::repr::{Local, Location, Lvalue, Mir};
+use rustc::mir::visit::{LvalueContext, MutVisitor, Visitor};
+use rustc_data_structures::indexed_vec::IndexVec;
+use std::marker::PhantomData;
+use std::mem;
+
+pub struct DefUseAnalysis<'tcx> {
+ info: IndexVec<Local, Info<'tcx>>,
+}
+
+#[derive(Clone)]
+pub struct Info<'tcx> {
+ pub defs_and_uses: Vec<Use<'tcx>>,
+}
+
+#[derive(Clone)]
+pub struct Use<'tcx> {
+ pub context: LvalueContext<'tcx>,
+ pub location: Location,
+}
+
+impl<'tcx> DefUseAnalysis<'tcx> {
+ pub fn new(mir: &Mir<'tcx>) -> DefUseAnalysis<'tcx> {
+ DefUseAnalysis {
+ info: IndexVec::from_elem_n(Info::new(), mir.local_decls.len()),
+ }
+ }
+
+ pub fn analyze(&mut self, mir: &Mir<'tcx>) {
+ let mut finder = DefUseFinder {
+ info: mem::replace(&mut self.info, IndexVec::new()),
+ };
+ finder.visit_mir(mir);
+ self.info = finder.info
+ }
+
+ pub fn local_info(&self, local: Local) -> &Info<'tcx> {
+ &self.info[local]
+ }
+
+ pub fn local_info_mut(&mut self, local: Local) -> &mut Info<'tcx> {
+ &mut self.info[local]
+ }
+
+ fn mutate_defs_and_uses<F>(&self, local: Local, mir: &mut Mir<'tcx>, mut callback: F)
+ where F: for<'a> FnMut(&'a mut Lvalue<'tcx>,
+ LvalueContext<'tcx>,
+ Location) {
+ for lvalue_use in &self.info[local].defs_and_uses {
+ MutateUseVisitor::new(local,
+ &mut callback,
+ mir).visit_location(mir, lvalue_use.location)
+ }
+ }
+
+ /// FIXME(pcwalton): This should update the def-use chains.
+ pub fn replace_all_defs_and_uses_with(&self,
+ local: Local,
+ mir: &mut Mir<'tcx>,
+ new_lvalue: Lvalue<'tcx>) {
+ self.mutate_defs_and_uses(local, mir, |lvalue, _, _| *lvalue = new_lvalue.clone())
+ }
+}
+
+struct DefUseFinder<'tcx> {
+ info: IndexVec<Local, Info<'tcx>>,
+}
+
+impl<'tcx> DefUseFinder<'tcx> {
+ fn lvalue_mut_info(&mut self, lvalue: &Lvalue<'tcx>) -> Option<&mut Info<'tcx>> {
+ let info = &mut self.info;
+
+ if let Lvalue::Local(local) = *lvalue {
+ Some(&mut info[local])
+ } else {
+ None
+ }
+ }
+}
+
+impl<'tcx> Visitor<'tcx> for DefUseFinder<'tcx> {
+ fn visit_lvalue(&mut self,
+ lvalue: &Lvalue<'tcx>,
+ context: LvalueContext<'tcx>,
+ location: Location) {
+ if let Some(ref mut info) = self.lvalue_mut_info(lvalue) {
+ info.defs_and_uses.push(Use {
+ context: context,
+ location: location,
+ })
+ }
+ self.super_lvalue(lvalue, context, location)
+ }
+}
+
+impl<'tcx> Info<'tcx> {
+ fn new() -> Info<'tcx> {
+ Info {
+ defs_and_uses: vec![],
+ }
+ }
+
+ pub fn def_count(&self) -> usize {
+ self.defs_and_uses.iter().filter(|lvalue_use| lvalue_use.context.is_mutating_use()).count()
+ }
+
+ pub fn def_count_not_including_drop(&self) -> usize {
+ self.defs_and_uses.iter().filter(|lvalue_use| {
+ lvalue_use.context.is_mutating_use() && !lvalue_use.context.is_drop()
+ }).count()
+ }
+
+ pub fn use_count(&self) -> usize {
+ self.defs_and_uses.iter().filter(|lvalue_use| {
+ lvalue_use.context.is_nonmutating_use()
+ }).count()
+ }
+}
+
+struct MutateUseVisitor<'tcx, F> {
+ query: Local,
+ callback: F,
+ phantom: PhantomData<&'tcx ()>,
+}
+
+impl<'tcx, F> MutateUseVisitor<'tcx, F> {
+ fn new(query: Local, callback: F, _: &Mir<'tcx>)
+ -> MutateUseVisitor<'tcx, F>
+ where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, LvalueContext<'tcx>, Location) {
+ MutateUseVisitor {
+ query: query,
+ callback: callback,
+ phantom: PhantomData,
+ }
+ }
+}
+
+impl<'tcx, F> MutVisitor<'tcx> for MutateUseVisitor<'tcx, F>
+ where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, LvalueContext<'tcx>, Location) {
+ fn visit_lvalue(&mut self,
+ lvalue: &mut Lvalue<'tcx>,
+ context: LvalueContext<'tcx>,
+ location: Location) {
+ if let Lvalue::Local(local) = *lvalue {
+ if local == self.query {
+ (self.callback)(lvalue, context, location)
+ }
+ }
+ self.super_lvalue(lvalue, context, location)
+ }
+}
write!(w, " label=<fn {}(", dot::escape_html(&tcx.node_path_str(nid)))?;
// fn argument types.
- for (i, arg) in mir.arg_decls.iter().enumerate() {
+ for (i, arg) in mir.args_iter().enumerate() {
if i > 0 {
write!(w, ", ")?;
}
- write!(w, "{:?}: {}", Lvalue::Arg(Arg::new(i)), escape(&arg.ty))?;
+ write!(w, "{:?}: {}", Lvalue::Local(arg), escape(&mir.local_decls[arg].ty))?;
}
write!(w, ") -> {}", escape(mir.return_ty))?;
write!(w, r#"<br align="left"/>"#)?;
- // User variable types (including the user's name in a comment).
- for (i, var) in mir.var_decls.iter().enumerate() {
+ for local in mir.vars_and_temps_iter() {
+ let decl = &mir.local_decls[local];
+
write!(w, "let ")?;
- if var.mutability == Mutability::Mut {
+ if decl.mutability == Mutability::Mut {
write!(w, "mut ")?;
}
- write!(w, r#"{:?}: {}; // {}<br align="left"/>"#,
- Lvalue::Var(Var::new(i)), escape(&var.ty), var.name)?;
- }
- // Compiler-introduced temporary types.
- for (i, temp) in mir.temp_decls.iter().enumerate() {
- write!(w, r#"let mut {:?}: {};<br align="left"/>"#,
- Lvalue::Temp(Temp::new(i)), escape(&temp.ty))?;
+ if let Some(name) = decl.name {
+ write!(w, r#"{:?}: {}; // {}<br align="left"/>"#,
+ Lvalue::Local(local), escape(&decl.ty), name)?;
+ } else {
+ write!(w, r#"let mut {:?}: {};<br align="left"/>"#,
+ Lvalue::Local(local), escape(&decl.ty))?;
+ }
}
writeln!(w, ">;")
// Tuple-like ADTs are represented as ExprCall. We convert them here.
expr_ty.ty_adt_def().and_then(|adt_def|{
match cx.tcx.expect_def(fun.id) {
- Def::Variant(_, variant_id) => {
+ Def::Variant(variant_id) => {
Some((adt_def, adt_def.variant_index_with_id(variant_id)))
},
Def::Struct(..) => {
}
AdtKind::Enum => {
match cx.tcx.expect_def(expr.id) {
- Def::Variant(enum_id, variant_id) => {
- debug_assert!(adt.did == enum_id);
+ Def::Variant(variant_id) => {
assert!(base.is_none());
let index = adt.variant_index_with_id(variant_id);
},
ref sty => bug!("unexpected sty: {:?}", sty)
},
- Def::Variant(enum_id, variant_id) => match cx.tcx.node_id_to_type(expr.id).sty {
+ Def::Variant(variant_id) => match cx.tcx.node_id_to_type(expr.id).sty {
// A variant constructor. Should only be reached if not called in the same
// expression.
ty::TyFnDef(..) => variant_id,
// A unit variant, similar special case to the struct case above.
ty::TyAdt(adt_def, substs) => {
- debug_assert!(adt_def.did == enum_id);
let index = adt_def.variant_index_with_id(variant_id);
return ExprKind::Adt {
adt_def: adt_def,
let temp_lifetime = cx.tcx.region_maps.temporary_scope(expr.id);
match def {
- Def::Local(_, node_id) => {
+ Def::Local(def_id) => {
+ let node_id = cx.tcx.map.as_local_node_id(def_id).unwrap();
ExprKind::VarRef {
id: node_id,
}
}
- Def::Upvar(_, id_var, index, closure_expr_id) => {
+ Def::Upvar(def_id, index, closure_expr_id) => {
+ let id_var = cx.tcx.map.as_local_node_id(def_id).unwrap();
debug!("convert_var(upvar({:?}, {:?}, {:?}))", id_var, index, closure_expr_id);
let var_ty = cx.tcx.node_id_to_type(id_var);
freevar: &hir::Freevar,
freevar_ty: Ty<'tcx>)
-> ExprRef<'tcx> {
- let id_var = freevar.def.var_id();
+ let id_var = cx.tcx.map.as_local_node_id(freevar.def.def_id()).unwrap();
let upvar_id = ty::UpvarId {
var_id: id_var,
closure_expr_id: closure_expr.id,
}
PatKind::Binding(bm, ref ident, ref sub) => {
- let id = self.cx.tcx.expect_def(pat.id).var_id();
+ let def_id = self.cx.tcx.expect_def(pat.id).def_id();
+ let id = self.cx.tcx.map.as_local_node_id(def_id).unwrap();
let var_ty = self.cx.tcx.node_id_to_type(pat.id);
let region = match var_ty.sty {
ty::TyRef(r, _) => Some(r),
subpatterns: Vec<FieldPattern<'tcx>>)
-> PatternKind<'tcx> {
match self.cx.tcx.expect_def(pat.id) {
- Def::Variant(enum_id, variant_id) => {
+ Def::Variant(variant_id) => {
+ let enum_id = self.cx.tcx.parent_def_id(variant_id).unwrap();
let adt_def = self.cx.tcx.lookup_adt_def(enum_id);
if adt_def.variants.len() > 1 {
PatternKind::Variant {
pub mod diagnostics;
pub mod build;
+pub mod def_use;
pub mod graphviz;
mod hair;
pub mod mir_map;
pub mod pretty;
pub mod transform;
+
format!("scope {} at {}", scope.index(), tcx.sess.codemap().span_to_string(span))
}
+/// Prints user-defined variables in a scope tree.
+///
+/// Returns the total number of variables printed.
fn write_scope_tree(tcx: TyCtxt,
mir: &Mir,
scope_tree: &FnvHashMap<VisibilityScope, Vec<VisibilityScope>>,
writeln!(w, "{0:1$}scope {2} {{", "", indent, child.index())?;
// User variable types (including the user's name in a comment).
- for (id, var) in mir.var_decls.iter_enumerated() {
- // Skip if not declared in this scope.
- if var.source_info.scope != child {
+ for local in mir.vars_iter() {
+ let var = &mir.local_decls[local];
+ let (name, source_info) = if var.source_info.unwrap().scope == child {
+ (var.name.unwrap(), var.source_info.unwrap())
+ } else {
+ // Not a variable or not declared in this scope.
continue;
- }
+ };
let mut_str = if var.mutability == Mutability::Mut {
"mut "
INDENT,
indent,
mut_str,
- id,
+ local,
var.ty);
writeln!(w, "{0:1$} // \"{2}\" in {3}",
indented_var,
ALIGN,
- var.name,
- comment(tcx, var.source_info))?;
+ name,
+ comment(tcx, source_info))?;
}
write_scope_tree(tcx, mir, scope_tree, w, child, depth + 1)?;
}
}
+ // Print return pointer
+ let indented_retptr = format!("{}let mut {:?}: {};",
+ INDENT,
+ RETURN_POINTER,
+ mir.return_ty);
+ writeln!(w, "{0:1$} // return pointer",
+ indented_retptr,
+ ALIGN)?;
+
write_scope_tree(tcx, mir, &scope_tree, w, ARGUMENT_VISIBILITY_SCOPE, 1)?;
- write_mir_decls(mir, w)
+ write_temp_decls(mir, w)?;
+
+ // Add an empty line before the first block is printed.
+ writeln!(w, "")?;
+
+ Ok(())
}
fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)
write!(w, "(")?;
// fn argument types.
- for (i, arg) in mir.arg_decls.iter_enumerated() {
- if i.index() != 0 {
+ for (i, arg) in mir.args_iter().enumerate() {
+ if i != 0 {
write!(w, ", ")?;
}
- write!(w, "{:?}: {}", Lvalue::Arg(i), arg.ty)?;
+ write!(w, "{:?}: {}", Lvalue::Local(arg), mir.local_decls[arg].ty)?;
}
write!(w, ") -> {}", mir.return_ty)
} else {
- assert!(mir.arg_decls.is_empty());
+ assert_eq!(mir.arg_count, 0);
write!(w, ": {} =", mir.return_ty)
}
}
-fn write_mir_decls(mir: &Mir, w: &mut Write) -> io::Result<()> {
+fn write_temp_decls(mir: &Mir, w: &mut Write) -> io::Result<()> {
// Compiler-introduced temporary types.
- for (id, temp) in mir.temp_decls.iter_enumerated() {
- writeln!(w, "{}let mut {:?}: {};", INDENT, id, temp.ty)?;
- }
-
- // Wrote any declaration? Add an empty line before the first block is printed.
- if !mir.var_decls.is_empty() || !mir.temp_decls.is_empty() {
- writeln!(w, "")?;
+ for temp in mir.temps_iter() {
+ writeln!(w, "{}let mut {:?}: {};", INDENT, temp, mir.local_decls[temp].ty)?;
}
Ok(())
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Trivial copy propagation pass.
+//!
+//! This uses def-use analysis to remove values that have exactly one def and one use, which must
+//! be an assignment.
+//!
+//! To give an example, we look for patterns that look like:
+//!
+//! DEST = SRC
+//! ...
+//! USE(DEST)
+//!
+//! where `DEST` and `SRC` are both locals of some form. We replace that with:
+//!
+//! NOP
+//! ...
+//! USE(SRC)
+//!
+//! The assignment `DEST = SRC` must be (a) the only mutation of `DEST` and (b) the only
+//! (non-mutating) use of `SRC`. These restrictions are conservative and may be relaxed in the
+//! future.
+
+use def_use::DefUseAnalysis;
+use rustc::mir::repr::{Constant, Local, Location, Lvalue, Mir, Operand, Rvalue, StatementKind};
+use rustc::mir::transform::{MirPass, MirSource, Pass};
+use rustc::mir::visit::MutVisitor;
+use rustc::ty::TyCtxt;
+use transform::qualify_consts;
+
+pub struct CopyPropagation;
+
+impl Pass for CopyPropagation {}
+
+impl<'tcx> MirPass<'tcx> for CopyPropagation {
+ fn run_pass<'a>(&mut self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ source: MirSource,
+ mir: &mut Mir<'tcx>) {
+ match source {
+ MirSource::Const(_) => {
+ // Don't run on constants, because constant qualification might reject the
+ // optimized IR.
+ return
+ }
+ MirSource::Static(..) | MirSource::Promoted(..) => {
+ // Don't run on statics and promoted statics, because trans might not be able to
+ // evaluate the optimized IR.
+ return
+ }
+ MirSource::Fn(function_node_id) => {
+ if qualify_consts::is_const_fn(tcx, tcx.map.local_def_id(function_node_id)) {
+ // Don't run on const functions, as, again, trans might not be able to evaluate
+ // the optimized IR.
+ return
+ }
+ }
+ }
+
+ // We only run when the MIR optimization level is at least 1. This avoids messing up debug
+ // info.
+ match tcx.sess.opts.debugging_opts.mir_opt_level {
+ Some(0) | None => return,
+ _ => {}
+ }
+
+ loop {
+ let mut def_use_analysis = DefUseAnalysis::new(mir);
+ def_use_analysis.analyze(mir);
+
+ let mut changed = false;
+ for dest_local in mir.local_decls.indices() {
+ debug!("Considering destination local: {:?}", dest_local);
+
+ let action;
+ let location;
+ {
+ // The destination must have exactly one def.
+ let dest_use_info = def_use_analysis.local_info(dest_local);
+ let dest_def_count = dest_use_info.def_count_not_including_drop();
+ if dest_def_count == 0 {
+ debug!(" Can't copy-propagate local: dest {:?} undefined",
+ dest_local);
+ continue
+ }
+ if dest_def_count > 1 {
+ debug!(" Can't copy-propagate local: dest {:?} defined {} times",
+ dest_local,
+ dest_use_info.def_count());
+ continue
+ }
+ if dest_use_info.use_count() == 0 {
+ debug!(" Can't copy-propagate local: dest {:?} unused",
+ dest_local);
+ continue
+ }
+ let dest_lvalue_def = dest_use_info.defs_and_uses.iter().filter(|lvalue_def| {
+ lvalue_def.context.is_mutating_use() && !lvalue_def.context.is_drop()
+ }).next().unwrap();
+ location = dest_lvalue_def.location;
+
+ let basic_block = &mir[location.block];
+ let statement_index = location.statement_index;
+ let statement = match basic_block.statements.get(statement_index) {
+ Some(statement) => statement,
+ None => {
+ debug!(" Can't copy-propagate local: used in terminator");
+ continue
+ }
+ };
+
+ // That use of the source must be an assignment.
+ match statement.kind {
+ StatementKind::Assign(Lvalue::Local(local), Rvalue::Use(ref operand)) if
+ local == dest_local => {
+ let maybe_action = match *operand {
+ Operand::Consume(ref src_lvalue) => {
+ Action::local_copy(&def_use_analysis, src_lvalue)
+ }
+ Operand::Constant(ref src_constant) => {
+ Action::constant(src_constant)
+ }
+ };
+ match maybe_action {
+ Some(this_action) => action = this_action,
+ None => continue,
+ }
+ }
+ _ => {
+ debug!(" Can't copy-propagate local: source use is not an \
+ assignment");
+ continue
+ }
+ }
+ }
+
+ changed = action.perform(mir, &def_use_analysis, dest_local, location) || changed;
+ // FIXME(pcwalton): Update the use-def chains to delete the instructions instead of
+ // regenerating the chains.
+ break
+ }
+ if !changed {
+ break
+ }
+ }
+ }
+}
+
+enum Action<'tcx> {
+ PropagateLocalCopy(Local),
+ PropagateConstant(Constant<'tcx>),
+}
+
+impl<'tcx> Action<'tcx> {
+ fn local_copy(def_use_analysis: &DefUseAnalysis, src_lvalue: &Lvalue<'tcx>)
+ -> Option<Action<'tcx>> {
+ // The source must be a local.
+ let src_local = if let Lvalue::Local(local) = *src_lvalue {
+ local
+ } else {
+ debug!(" Can't copy-propagate local: source is not a local");
+ return None;
+ };
+
+ // We're trying to copy propagate a local.
+ // There must be exactly one use of the source used in a statement (not in a terminator).
+ let src_use_info = def_use_analysis.local_info(src_local);
+ let src_use_count = src_use_info.use_count();
+ if src_use_count == 0 {
+ debug!(" Can't copy-propagate local: no uses");
+ return None
+ }
+ if src_use_count != 1 {
+ debug!(" Can't copy-propagate local: {} uses", src_use_info.use_count());
+ return None
+ }
+
+ // Verify that the source doesn't change in between. This is done conservatively for now,
+ // by ensuring that the source has exactly one mutation. The goal is to prevent things
+ // like:
+ //
+ // DEST = SRC;
+ // SRC = X;
+ // USE(DEST);
+ //
+ // From being misoptimized into:
+ //
+ // SRC = X;
+ // USE(SRC);
+ let src_def_count = src_use_info.def_count_not_including_drop();
+ if src_def_count != 1 {
+ debug!(" Can't copy-propagate local: {} defs of src",
+ src_use_info.def_count_not_including_drop());
+ return None
+ }
+
+ Some(Action::PropagateLocalCopy(src_local))
+ }
+
+ fn constant(src_constant: &Constant<'tcx>) -> Option<Action<'tcx>> {
+ Some(Action::PropagateConstant((*src_constant).clone()))
+ }
+
+ fn perform(self,
+ mir: &mut Mir<'tcx>,
+ def_use_analysis: &DefUseAnalysis<'tcx>,
+ dest_local: Local,
+ location: Location)
+ -> bool {
+ match self {
+ Action::PropagateLocalCopy(src_local) => {
+ // Eliminate the destination and the assignment.
+ //
+ // First, remove all markers.
+ //
+ // FIXME(pcwalton): Don't do this. Merge live ranges instead.
+ debug!(" Replacing all uses of {:?} with {:?} (local)",
+ dest_local,
+ src_local);
+ for lvalue_use in &def_use_analysis.local_info(dest_local).defs_and_uses {
+ if lvalue_use.context.is_storage_marker() {
+ mir.make_statement_nop(lvalue_use.location)
+ }
+ }
+ for lvalue_use in &def_use_analysis.local_info(src_local).defs_and_uses {
+ if lvalue_use.context.is_storage_marker() {
+ mir.make_statement_nop(lvalue_use.location)
+ }
+ }
+
+ // Replace all uses of the destination local with the source local.
+ let src_lvalue = Lvalue::Local(src_local);
+ def_use_analysis.replace_all_defs_and_uses_with(dest_local, mir, src_lvalue);
+
+ // Finally, zap the now-useless assignment instruction.
+ debug!(" Deleting assignment");
+ mir.make_statement_nop(location);
+
+ true
+ }
+ Action::PropagateConstant(src_constant) => {
+ // First, remove all markers.
+ //
+ // FIXME(pcwalton): Don't do this. Merge live ranges instead.
+ debug!(" Replacing all uses of {:?} with {:?} (constant)",
+ dest_local,
+ src_constant);
+ let dest_local_info = def_use_analysis.local_info(dest_local);
+ for lvalue_use in &dest_local_info.defs_and_uses {
+ if lvalue_use.context.is_storage_marker() {
+ mir.make_statement_nop(lvalue_use.location)
+ }
+ }
+
+ // Replace all uses of the destination local with the constant.
+ let mut visitor = ConstantPropagationVisitor::new(dest_local,
+ src_constant);
+ for dest_lvalue_use in &dest_local_info.defs_and_uses {
+ visitor.visit_location(mir, dest_lvalue_use.location)
+ }
+
+ // Zap the assignment instruction if we eliminated all the uses. We won't have been
+ // able to do that if the destination was used in a projection, because projections
+ // must have lvalues on their LHS.
+ let use_count = dest_local_info.use_count();
+ if visitor.uses_replaced == use_count {
+ debug!(" {} of {} use(s) replaced; deleting assignment",
+ visitor.uses_replaced,
+ use_count);
+ mir.make_statement_nop(location);
+ true
+ } else if visitor.uses_replaced == 0 {
+ debug!(" No uses replaced; not deleting assignment");
+ false
+ } else {
+ debug!(" {} of {} use(s) replaced; not deleting assignment",
+ visitor.uses_replaced,
+ use_count);
+ true
+ }
+ }
+ }
+ }
+}
+
+struct ConstantPropagationVisitor<'tcx> {
+ dest_local: Local,
+ constant: Constant<'tcx>,
+ uses_replaced: usize,
+}
+
+impl<'tcx> ConstantPropagationVisitor<'tcx> {
+ fn new(dest_local: Local, constant: Constant<'tcx>)
+ -> ConstantPropagationVisitor<'tcx> {
+ ConstantPropagationVisitor {
+ dest_local: dest_local,
+ constant: constant,
+ uses_replaced: 0,
+ }
+ }
+}
+
+impl<'tcx> MutVisitor<'tcx> for ConstantPropagationVisitor<'tcx> {
+ fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
+ self.super_operand(operand, location);
+
+ match *operand {
+ Operand::Consume(Lvalue::Local(local)) if local == self.dest_local => {}
+ _ => return,
+ }
+
+ *operand = Operand::Constant(self.constant.clone());
+ self.uses_replaced += 1
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Performs various peephole optimizations.
+
+use rustc::mir::repr::{Location, Lvalue, Mir, Operand, ProjectionElem, Rvalue, Local};
+use rustc::mir::transform::{MirPass, MirSource, Pass};
+use rustc::mir::visit::{MutVisitor, Visitor};
+use rustc::ty::TyCtxt;
+use rustc::util::nodemap::FnvHashSet;
+use rustc_data_structures::indexed_vec::Idx;
+use std::mem;
+
+pub struct InstCombine {
+ optimizations: OptimizationList,
+}
+
+impl InstCombine {
+ pub fn new() -> InstCombine {
+ InstCombine {
+ optimizations: OptimizationList::default(),
+ }
+ }
+}
+
+impl Pass for InstCombine {}
+
+impl<'tcx> MirPass<'tcx> for InstCombine {
+ fn run_pass<'a>(&mut self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ _: MirSource,
+ mir: &mut Mir<'tcx>) {
+ // We only run when optimizing MIR (at any level).
+ if tcx.sess.opts.debugging_opts.mir_opt_level == Some(0) {
+ return
+ }
+
+ // First, find optimization opportunities. This is done in a pre-pass to keep the MIR
+ // read-only so that we can do global analyses on the MIR in the process (e.g.
+ // `Lvalue::ty()`).
+ {
+ let mut optimization_finder = OptimizationFinder::new(mir, tcx);
+ optimization_finder.visit_mir(mir);
+ self.optimizations = optimization_finder.optimizations
+ }
+
+ // Then carry out those optimizations.
+ MutVisitor::visit_mir(&mut *self, mir);
+ }
+}
+
+impl<'tcx> MutVisitor<'tcx> for InstCombine {
+ fn visit_rvalue(&mut self, rvalue: &mut Rvalue<'tcx>, location: Location) {
+ if self.optimizations.and_stars.remove(&location) {
+ debug!("Replacing `&*`: {:?}", rvalue);
+ let new_lvalue = match *rvalue {
+ Rvalue::Ref(_, _, Lvalue::Projection(ref mut projection)) => {
+ // Replace with dummy
+ mem::replace(&mut projection.base, Lvalue::Local(Local::new(0)))
+ }
+ _ => bug!("Detected `&*` but didn't find `&*`!"),
+ };
+ *rvalue = Rvalue::Use(Operand::Consume(new_lvalue))
+ }
+
+ self.super_rvalue(rvalue, location)
+ }
+}
+
+/// Finds optimization opportunities on the MIR.
+struct OptimizationFinder<'b, 'a, 'tcx:'a+'b> {
+ mir: &'b Mir<'tcx>,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ optimizations: OptimizationList,
+}
+
+impl<'b, 'a, 'tcx:'b> OptimizationFinder<'b, 'a, 'tcx> {
+ fn new(mir: &'b Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> OptimizationFinder<'b, 'a, 'tcx> {
+ OptimizationFinder {
+ mir: mir,
+ tcx: tcx,
+ optimizations: OptimizationList::default(),
+ }
+ }
+}
+
+impl<'b, 'a, 'tcx> Visitor<'tcx> for OptimizationFinder<'b, 'a, 'tcx> {
+ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
+ if let Rvalue::Ref(_, _, Lvalue::Projection(ref projection)) = *rvalue {
+ if let ProjectionElem::Deref = projection.elem {
+ if projection.base.ty(self.mir, self.tcx).to_ty(self.tcx).is_region_ptr() {
+ self.optimizations.and_stars.insert(location);
+ }
+ }
+ }
+
+ self.super_rvalue(rvalue, location)
+ }
+}
+
+#[derive(Default)]
+struct OptimizationList {
+ and_stars: FnvHashSet<Location>,
+}
pub mod qualify_consts;
pub mod dump_mir;
pub mod deaggregator;
+pub mod instcombine;
+pub mod copy_prop;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
+use std::iter;
use std::mem;
use std::usize;
ShuffleIndices(BasicBlock)
}
-struct TempCollector {
- temps: IndexVec<Temp, TempState>,
- span: Span
+struct TempCollector<'tcx> {
+ temps: IndexVec<Local, TempState>,
+ span: Span,
+ mir: &'tcx Mir<'tcx>,
}
-impl<'tcx> Visitor<'tcx> for TempCollector {
- fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: LvalueContext, location: Location) {
+impl<'tcx> Visitor<'tcx> for TempCollector<'tcx> {
+ fn visit_lvalue(&mut self,
+ lvalue: &Lvalue<'tcx>,
+ context: LvalueContext<'tcx>,
+ location: Location) {
self.super_lvalue(lvalue, context, location);
- if let Lvalue::Temp(index) = *lvalue {
+ if let Lvalue::Local(index) = *lvalue {
+ // We're only interested in temporaries
+ if self.mir.local_kind(index) != LocalKind::Temp {
+ return;
+ }
+
// Ignore drops, if the temp gets promoted,
// then it's constant and thus drop is noop.
// Storage live ranges are also irrelevant.
}
}
-pub fn collect_temps(mir: &Mir, rpo: &mut ReversePostorder) -> IndexVec<Temp, TempState> {
+pub fn collect_temps(mir: &Mir, rpo: &mut ReversePostorder) -> IndexVec<Local, TempState> {
let mut collector = TempCollector {
- temps: IndexVec::from_elem(TempState::Undefined, &mir.temp_decls),
- span: mir.span
+ temps: IndexVec::from_elem(TempState::Undefined, &mir.local_decls),
+ span: mir.span,
+ mir: mir,
};
for (bb, data) in rpo {
collector.visit_basic_block_data(bb, data);
struct Promoter<'a, 'tcx: 'a> {
source: &'a mut Mir<'tcx>,
promoted: Mir<'tcx>,
- temps: &'a mut IndexVec<Temp, TempState>,
+ temps: &'a mut IndexVec<Local, TempState>,
/// If true, all nested temps are also kept in the
/// source MIR, not moved to the promoted MIR.
})
}
- fn assign(&mut self, dest: Lvalue<'tcx>, rvalue: Rvalue<'tcx>, span: Span) {
+ fn assign(&mut self, dest: Local, rvalue: Rvalue<'tcx>, span: Span) {
let last = self.promoted.basic_blocks().last().unwrap();
let data = &mut self.promoted[last];
data.statements.push(Statement {
span: span,
scope: ARGUMENT_VISIBILITY_SCOPE
},
- kind: StatementKind::Assign(dest, rvalue)
+ kind: StatementKind::Assign(Lvalue::Local(dest), rvalue)
});
}
/// Copy the initialization of this temp to the
/// promoted MIR, recursing through temps.
- fn promote_temp(&mut self, temp: Temp) -> Temp {
+ fn promote_temp(&mut self, temp: Local) -> Local {
let old_keep_original = self.keep_original;
let (bb, stmt_idx) = match self.temps[temp] {
TempState::Defined {
});
}
- let new_temp = self.promoted.temp_decls.push(TempDecl {
- ty: self.source.temp_decls[temp].ty
- });
+ let new_temp = self.promoted.local_decls.push(
+ LocalDecl::new_temp(self.source.local_decls[temp].ty));
// Inject the Rvalue or Call into the promoted MIR.
if stmt_idx < no_stmts {
- self.assign(Lvalue::Temp(new_temp), rvalue.unwrap(), source_info.span);
+ self.assign(new_temp, rvalue.unwrap(), source_info.span);
} else {
let last = self.promoted.basic_blocks().last().unwrap();
let new_target = self.new_block();
let mut call = call.unwrap();
match call {
TerminatorKind::Call { ref mut destination, ..} => {
- *destination = Some((Lvalue::Temp(new_temp), new_target));
+ *destination = Some((Lvalue::Local(new_temp), new_target));
}
_ => bug!()
}
}
}
};
- self.visit_rvalue(&mut rvalue, Location{
+ self.visit_rvalue(&mut rvalue, Location {
block: BasicBlock::new(0),
statement_index: usize::MAX
});
- self.assign(Lvalue::ReturnPointer, rvalue, span);
+
+ self.assign(RETURN_POINTER, rvalue, span);
self.source.promoted.push(self.promoted);
}
}
impl<'a, 'tcx> MutVisitor<'tcx> for Promoter<'a, 'tcx> {
fn visit_lvalue(&mut self,
lvalue: &mut Lvalue<'tcx>,
- context: LvalueContext,
+ context: LvalueContext<'tcx>,
location: Location) {
- if let Lvalue::Temp(ref mut temp) = *lvalue {
- *temp = self.promote_temp(*temp);
+ if let Lvalue::Local(ref mut temp) = *lvalue {
+ if self.source.local_kind(*temp) == LocalKind::Temp {
+ *temp = self.promote_temp(*temp);
+ }
}
self.super_lvalue(lvalue, context, location);
}
pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- mut temps: IndexVec<Temp, TempState>,
+ mut temps: IndexVec<Local, TempState>,
candidates: Vec<Candidate>) {
// Visit candidates in reverse, in case they're nested.
for candidate in candidates.into_iter().rev() {
"expected assignment to promote");
}
};
- if let Lvalue::Temp(index) = *dest {
+ if let Lvalue::Local(index) = *dest {
if temps[index] == TempState::PromotedOut {
// Already promoted.
continue;
}
};
+ // Declare return pointer local
+ let initial_locals = iter::once(LocalDecl::new_return_pointer(ty)).collect();
+
let mut promoter = Promoter {
- source: mir,
promoted: Mir::new(
IndexVec::new(),
Some(VisibilityScopeData {
}).into_iter().collect(),
IndexVec::new(),
ty,
- IndexVec::new(),
- IndexVec::new(),
- IndexVec::new(),
+ initial_locals,
+ 0,
vec![],
span
),
+ source: mir,
temps: &mut temps,
keep_original: false
};
}
// Eliminate assignments to, and drops of promoted temps.
- let promoted = |index: Temp| temps[index] == TempState::PromotedOut;
+ let promoted = |index: Local| temps[index] == TempState::PromotedOut;
for block in mir.basic_blocks_mut() {
block.statements.retain(|statement| {
match statement.kind {
- StatementKind::Assign(Lvalue::Temp(index), _) |
- StatementKind::StorageLive(Lvalue::Temp(index)) |
- StatementKind::StorageDead(Lvalue::Temp(index)) => {
+ StatementKind::Assign(Lvalue::Local(index), _) |
+ StatementKind::StorageLive(Lvalue::Local(index)) |
+ StatementKind::StorageDead(Lvalue::Local(index)) => {
!promoted(index)
}
_ => true
});
let terminator = block.terminator_mut();
match terminator.kind {
- TerminatorKind::Drop { location: Lvalue::Temp(index), target, .. } => {
+ TerminatorKind::Drop { location: Lvalue::Local(index), target, .. } => {
if promoted(index) {
terminator.kind = TerminatorKind::Goto {
target: target
}
}
-fn is_const_fn(tcx: TyCtxt, def_id: DefId) -> bool {
+pub fn is_const_fn(tcx: TyCtxt, def_id: DefId) -> bool {
if let Some(node_id) = tcx.map.as_local_node_id(def_id) {
let fn_like = FnLikeNode::from_node(tcx.map.get(node_id));
match fn_like.map(|f| f.kind()) {
param_env: ty::ParameterEnvironment<'tcx>,
qualif_map: &'a mut DefIdMap<Qualif>,
mir_map: Option<&'a MirMap<'tcx>>,
- temp_qualif: IndexVec<Temp, Option<Qualif>>,
+ temp_qualif: IndexVec<Local, Option<Qualif>>,
return_qualif: Option<Qualif>,
qualif: Qualif,
const_fn_arg_vars: BitVector,
- temp_promotion_state: IndexVec<Temp, TempState>,
+ temp_promotion_state: IndexVec<Local, TempState>,
promotion_candidates: Vec<Candidate>
}
param_env: param_env,
qualif_map: qualif_map,
mir_map: mir_map,
- temp_qualif: IndexVec::from_elem(None, &mir.temp_decls),
+ temp_qualif: IndexVec::from_elem(None, &mir.local_decls),
return_qualif: None,
qualif: Qualif::empty(),
- const_fn_arg_vars: BitVector::new(mir.var_decls.len()),
+ const_fn_arg_vars: BitVector::new(mir.local_decls.len()),
temp_promotion_state: temps,
promotion_candidates: vec![]
}
// Only handle promotable temps in non-const functions.
if self.mode == Mode::Fn {
- if let Lvalue::Temp(index) = *dest {
- if self.temp_promotion_state[index].is_promotable() {
+ if let Lvalue::Local(index) = *dest {
+ if self.mir.local_kind(index) == LocalKind::Temp
+ && self.temp_promotion_state[index].is_promotable() {
+ debug!("store to promotable temp {:?}", index);
store(&mut self.temp_qualif[index]);
}
}
}
match *dest {
- Lvalue::Temp(index) => store(&mut self.temp_qualif[index]),
- Lvalue::ReturnPointer => store(&mut self.return_qualif),
+ Lvalue::Local(index) if self.mir.local_kind(index) == LocalKind::Temp => {
+ debug!("store to temp {:?}", index);
+ store(&mut self.temp_qualif[index])
+ }
+ Lvalue::Local(index) if self.mir.local_kind(index) == LocalKind::ReturnPointer => {
+ debug!("store to return pointer {:?}", index);
+ store(&mut self.return_qualif)
+ }
Lvalue::Projection(box Projection {
- base: Lvalue::Temp(index),
+ base: Lvalue::Local(index),
elem: ProjectionElem::Deref
- }) if self.mir.temp_decls[index].ty.is_unique()
+ }) if self.mir.local_kind(index) == LocalKind::Temp
+ && self.mir.local_decls[index].ty.is_unique()
&& self.temp_qualif[index].map_or(false, |qualif| {
qualif.intersects(Qualif::NOT_CONST)
}) => {
/// Qualify a whole const, static initializer or const fn.
fn qualify_const(&mut self) -> Qualif {
+ debug!("qualifying {} {}", self.mode, self.tcx.item_path_str(self.def_id));
+
let mir = self.mir;
let mut seen_blocks = BitVector::new(mir.basic_blocks().len());
TerminatorKind::Return => {
// Check for unused values. This usually means
// there are extra statements in the AST.
- for temp in mir.temp_decls.indices() {
+ for temp in mir.temps_iter() {
if self.temp_qualif[temp].is_none() {
continue;
}
// Make sure there are no extra unassigned variables.
self.qualif = Qualif::NOT_CONST;
- for index in 0..mir.var_decls.len() {
- if !self.const_fn_arg_vars.contains(index) {
- self.assign(&Lvalue::Var(Var::new(index)), Location {
+ for index in mir.vars_iter() {
+ if !self.const_fn_arg_vars.contains(index.index()) {
+ debug!("unassigned variable {:?}", index);
+ self.assign(&Lvalue::Local(index), Location {
block: bb,
statement_index: usize::MAX,
});
/// For functions (constant or not), it also records
/// candidates for promotion in promotion_candidates.
impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> {
- fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: LvalueContext, location: Location) {
+ fn visit_lvalue(&mut self,
+ lvalue: &Lvalue<'tcx>,
+ context: LvalueContext<'tcx>,
+ location: Location) {
match *lvalue {
- Lvalue::Arg(_) => {
- self.add(Qualif::FN_ARGUMENT);
- }
- Lvalue::Var(_) => {
- self.add(Qualif::NOT_CONST);
- }
- Lvalue::Temp(index) => {
- if !self.temp_promotion_state[index].is_promotable() {
- self.add(Qualif::NOT_PROMOTABLE);
+ Lvalue::Local(local) => match self.mir.local_kind(local) {
+ LocalKind::ReturnPointer => {
+ self.not_const();
+ }
+ LocalKind::Arg => {
+ self.add(Qualif::FN_ARGUMENT);
}
+ LocalKind::Var => {
+ self.add(Qualif::NOT_CONST);
+ }
+ LocalKind::Temp => {
+ if !self.temp_promotion_state[local].is_promotable() {
+ self.add(Qualif::NOT_PROMOTABLE);
+ }
- if let Some(qualif) = self.temp_qualif[index] {
- self.add(qualif);
- } else {
- self.not_const();
+ if let Some(qualif) = self.temp_qualif[local] {
+ self.add(qualif);
+ } else {
+ self.not_const();
+ }
}
- }
+ },
Lvalue::Static(_) => {
self.add(Qualif::STATIC);
if self.mode == Mode::Const || self.mode == Mode::ConstFn {
a constant instead", self.mode);
}
}
- Lvalue::ReturnPointer => {
- self.not_const();
- }
Lvalue::Projection(ref proj) => {
self.nest(|this| {
this.super_lvalue(lvalue, context, location);
if self.mode == Mode::Fn || self.mode == Mode::ConstFn {
if !self.qualif.intersects(Qualif::NEVER_PROMOTE) {
// We can only promote direct borrows of temps.
- if let Lvalue::Temp(_) = *lvalue {
- self.promotion_candidates.push(candidate);
+ if let Lvalue::Local(local) = *lvalue {
+ if self.mir.local_kind(local) == LocalKind::Temp {
+ self.promotion_candidates.push(candidate);
+ }
}
}
}
self.visit_rvalue(rvalue, location);
// Check the allowed const fn argument forms.
- if let (Mode::ConstFn, &Lvalue::Var(index)) = (self.mode, dest) {
- if self.const_fn_arg_vars.insert(index.index()) {
+ if let (Mode::ConstFn, &Lvalue::Local(index)) = (self.mode, dest) {
+ if self.mir.local_kind(index) == LocalKind::Var &&
+ self.const_fn_arg_vars.insert(index.index()) {
+
// Direct use of an argument is permitted.
- if let Rvalue::Use(Operand::Consume(Lvalue::Arg(_))) = *rvalue {
- return;
+ if let Rvalue::Use(Operand::Consume(Lvalue::Local(local))) = *rvalue {
+ if self.mir.local_kind(local) == LocalKind::Arg {
+ return;
+ }
}
// Avoid a generic error for other uses of arguments.
if self.qualif.intersects(Qualif::FN_ARGUMENT) {
- let decl = &self.mir.var_decls[index];
- span_err!(self.tcx.sess, decl.source_info.span, E0022,
+ let decl = &self.mir.local_decls[index];
+ span_err!(self.tcx.sess, decl.source_info.unwrap().span, E0022,
"arguments of constant functions can only \
be immutable by-value bindings");
return;
}
StatementKind::SetDiscriminant { .. } |
StatementKind::StorageLive(_) |
- StatementKind::StorageDead(_) => {}
+ StatementKind::StorageDead(_) |
+ StatementKind::Nop => {}
}
});
}
use rustc::mir::transform::{MirPass, MirSource, Pass};
use rustc::mir::visit::{self, Visitor};
use std::fmt;
+use syntax::ast;
use syntax_pos::{Span, DUMMY_SP};
use rustc_data_structures::indexed_vec::Idx;
fn visit_mir(&mut self, mir: &Mir<'tcx>) {
self.sanitize_type(&"return type", mir.return_ty);
- for var_decl in &mir.var_decls {
- self.sanitize_type(var_decl, var_decl.ty);
- }
- for (n, arg_decl) in mir.arg_decls.iter().enumerate() {
- self.sanitize_type(&(n, arg_decl), arg_decl.ty);
- }
- for (n, tmp_decl) in mir.temp_decls.iter().enumerate() {
- self.sanitize_type(&(n, tmp_decl), tmp_decl.ty);
+ for local_decl in &mir.local_decls {
+ self.sanitize_type(local_decl, local_decl.ty);
}
if self.errors_reported {
return;
fn sanitize_lvalue(&mut self, lvalue: &Lvalue<'tcx>, location: Location) -> LvalueTy<'tcx> {
debug!("sanitize_lvalue: {:?}", lvalue);
match *lvalue {
- Lvalue::Var(index) => LvalueTy::Ty { ty: self.mir.var_decls[index].ty },
- Lvalue::Temp(index) => LvalueTy::Ty { ty: self.mir.temp_decls[index].ty },
- Lvalue::Arg(index) => LvalueTy::Ty { ty: self.mir.arg_decls[index].ty },
+ Lvalue::Local(index) => LvalueTy::Ty { ty: self.mir.local_decls[index].ty },
Lvalue::Static(def_id) =>
LvalueTy::Ty { ty: self.tcx().lookup_item_type(def_id).ty },
- Lvalue::ReturnPointer => {
- LvalueTy::Ty { ty: self.mir.return_ty }
- }
Lvalue::Projection(ref proj) => {
let base_ty = self.sanitize_lvalue(&proj.base, location);
if let LvalueTy::Ty { ty } = base_ty {
StatementKind::StorageLive(ref lv) |
StatementKind::StorageDead(ref lv) => {
match *lv {
- Lvalue::Temp(_) | Lvalue::Var(_) => {}
+ Lvalue::Local(_) => {}
_ => {
- span_mirbug!(self, stmt, "bad lvalue: expected temp or var");
+ span_mirbug!(self, stmt, "bad lvalue: expected local");
}
}
}
+ StatementKind::Nop => {}
}
}
where T: fmt::Debug + TypeFoldable<'tcx>
{
let mut selcx = traits::SelectionContext::new(self.infcx);
- let cause = traits::ObligationCause::misc(self.last_span, 0);
+ let cause = traits::ObligationCause::misc(self.last_span, ast::CRATE_NODE_ID);
let traits::Normalized { value, obligations } =
traits::normalize(&mut selcx, cause, value);
impl<'tcx> MirPass<'tcx> for TypeckMir {
fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
src: MirSource, mir: &mut Mir<'tcx>) {
+ debug!("run_pass: {}", tcx.node_path_str(src.item_id()));
+
if tcx.sess.err_count() > 0 {
// compiling a broken program can obviously result in a
// broken MIR, so try not to report duplicate errors.
span,
E0449,
"unnecessary visibility qualifier");
+ if vis == &Visibility::Public {
+ err.span_label(span, &format!("`pub` not needed here"));
+ }
if let Some(note) = note {
- err.span_note(span, note);
+ err.note(note);
}
err.emit();
}
});
if any_static {
if !self.sess.features.borrow().static_recursion {
- emit_feature_err(&self.sess.parse_sess.span_diagnostic,
+ emit_feature_err(&self.sess.parse_sess,
"static_recursion",
*self.root_span,
GateIssue::Language,
// affect the specific variant used, but we need to check
// the whole enum definition to see what expression that
// might be (if any).
- Some(Def::Variant(enum_id, variant_id)) => {
- if let Some(enum_node_id) = self.ast_map.as_local_node_id(enum_id) {
- if let hir::ItemEnum(ref enum_def, ref generics) = self.ast_map
- .expect_item(enum_node_id)
- .node {
+ Some(Def::Variant(variant_id)) => {
+ if let Some(variant_id) = self.ast_map.as_local_node_id(variant_id) {
+ let variant = self.ast_map.expect_variant(variant_id);
+ let enum_id = self.ast_map.get_parent(variant_id);
+ let enum_item = self.ast_map.expect_item(enum_id);
+ if let hir::ItemEnum(ref enum_def, ref generics) = enum_item.node {
self.populate_enum_discriminants(enum_def);
- let enum_id = self.ast_map.as_local_node_id(enum_id).unwrap();
- let variant_id = self.ast_map.as_local_node_id(variant_id).unwrap();
- let variant = self.ast_map.expect_variant(variant_id);
self.visit_variant(variant, generics, enum_id);
} else {
span_bug!(e.span,
use rustc::mir::transform::MirMapPass;
-use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT};
-use syntax::ext::base::{IdentTT, MultiModifier, MultiDecorator};
+use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT};
use syntax::ext::base::MacroExpanderFn;
use syntax::parse::token;
use syntax::ast;
///
/// This is the most general hook into `libsyntax`'s expansion behavior.
pub fn register_syntax_extension(&mut self, name: ast::Name, extension: SyntaxExtension) {
+ if name.as_str() == "macro_rules" {
+ panic!("user-defined macros may not be named `macro_rules`");
+ }
self.syntax_exts.push((name, match extension {
NormalTT(ext, _, allow_internal_unstable) => {
NormalTT(ext, Some(self.krate_span), allow_internal_unstable)
IdentTT(ext, _, allow_internal_unstable) => {
IdentTT(ext, Some(self.krate_span), allow_internal_unstable)
}
- MultiDecorator(ext) => MultiDecorator(ext),
- MultiModifier(ext) => MultiModifier(ext),
+ _ => extension,
}));
}
let def = self.ev.tcx.expect_def(ty.id);
match def {
Def::Struct(def_id) | Def::Union(def_id) | Def::Enum(def_id) |
- Def::TyAlias(def_id) | Def::Trait(def_id) | Def::AssociatedTy(def_id, _) => {
- if let Some(node_id) = self.ev.tcx.map.as_local_node_id(def_id) {
+ Def::TyAlias(def_id) | Def::Trait(def_id) | Def::AssociatedTy(def_id) => {
+ if let Some(mut node_id) = self.ev.tcx.map.as_local_node_id(def_id) {
+ // Check the trait for associated types.
+ if let hir::map::NodeTraitItem(_) = self.ev.tcx.map.get(node_id) {
+ node_id = self.ev.tcx.map.get_parent(node_id);
+ }
+
let item = self.ev.tcx.map.expect_item(node_id);
if let Def::TyAlias(..) = def {
// Type aliases are substituted. Associated type aliases are not
return
}
Def::Struct(def_id) | Def::Union(def_id) | Def::Enum(def_id) |
- Def::TyAlias(def_id) | Def::Trait(def_id) | Def::AssociatedTy(def_id, _) => {
+ Def::TyAlias(def_id) | Def::Trait(def_id) | Def::AssociatedTy(def_id) => {
// Non-local means public (private items can't leave their crate, modulo bugs)
- if let Some(node_id) = self.tcx.map.as_local_node_id(def_id) {
+ if let Some(mut node_id) = self.tcx.map.as_local_node_id(def_id) {
+ // Check the trait for associated types.
+ if let hir::map::NodeTraitItem(_) = self.tcx.map.get(node_id) {
+ node_id = self.tcx.map.get_parent(node_id);
+ }
+
let item = self.tcx.map.expect_item(node_id);
let vis = match self.substituted_alias_visibility(item, path) {
Some(vis) => vis,
if !vis.is_at_least(self.required_visibility, &self.tcx.map) {
if self.tcx.sess.features.borrow().pub_restricted ||
self.old_error_set.contains(&ty.id) {
- span_err!(self.tcx.sess, ty.span, E0446,
+ let mut err = struct_span_err!(self.tcx.sess, ty.span, E0446,
"private type in public interface");
+ err.span_label(ty.span, &format!("can't leak private type"));
+ err.emit();
} else {
self.tcx.sess.add_lint(lint::builtin::PRIVATE_IN_PUBLIC,
node_id,
//! any imports resolved.
use resolve_imports::ImportDirectiveSubclass::{self, GlobImport};
-use Module;
+use {Module, ModuleS, ModuleKind};
use Namespace::{self, TypeNS, ValueNS};
use {NameBinding, NameBindingKind, ToNameBinding};
-use ParentLink::{ModuleParentLink, BlockParentLink};
use Resolver;
use {resolve_error, resolve_struct_error, ResolutionError};
-use rustc::middle::cstore::{ChildItem, DlDef};
+use rustc::middle::cstore::LoadedMacro;
use rustc::hir::def::*;
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
-use rustc::ty::{self, VariantKind};
+use rustc::hir::map::DefPathData;
+use rustc::ty;
use std::cell::Cell;
+use std::rc::Rc;
use syntax::ast::Name;
use syntax::attr;
use syntax::parse::token;
-use syntax::ast::{Block, Crate};
-use syntax::ast::{ForeignItem, ForeignItemKind, Item, ItemKind};
-use syntax::ast::{Mutability, StmtKind, TraitItemKind};
+use syntax::ast::{self, Block, ForeignItem, ForeignItemKind, Item, ItemKind};
+use syntax::ast::{Mutability, StmtKind, TraitItem, TraitItemKind};
use syntax::ast::{Variant, ViewPathGlob, ViewPathList, ViewPathSimple};
+use syntax::ext::base::{MultiItemModifier, Resolver as SyntaxResolver};
+use syntax::ext::hygiene::Mark;
+use syntax::feature_gate::{self, emit_feature_err};
use syntax::parse::token::keywords;
use syntax::visit::{self, Visitor};
}
impl<'b> Resolver<'b> {
- /// Constructs the reduced graph for the entire crate.
- pub fn build_reduced_graph(&mut self, krate: &Crate) {
- let no_implicit_prelude = attr::contains_name(&krate.attrs, "no_implicit_prelude");
- self.graph_root.no_implicit_prelude.set(no_implicit_prelude);
- visit::walk_crate(&mut BuildReducedGraphVisitor { resolver: self }, krate);
- }
-
/// Defines `name` in namespace `ns` of module `parent` to be `def` if it is not yet defined;
/// otherwise, reports an error.
fn define<T>(&mut self, parent: Module<'b>, name: Name, ns: Namespace, def: T)
fn block_needs_anonymous_module(&mut self, block: &Block) -> bool {
// If any statements are items, we need to create an anonymous module
block.stmts.iter().any(|statement| match statement.node {
- StmtKind::Item(_) => true,
+ StmtKind::Item(_) | StmtKind::Mac(_) => true,
_ => false,
})
}
}
ItemKind::ExternCrate(_) => {
- // n.b. we don't need to look at the path option here, because cstore already
- // did
+ // We need to error on `#[macro_use] extern crate` when it isn't at the
+ // crate root, because `$crate` won't work properly.
+ let is_crate_root = self.current_module.parent.is_none();
+ for def in self.crate_loader.load_macros(item, is_crate_root) {
+ match def {
+ LoadedMacro::Def(def) => self.add_macro(Mark::root(), def),
+ LoadedMacro::CustomDerive(name, ext) => {
+ self.insert_custom_derive(&name, ext, item.span);
+ }
+ }
+ }
+ self.crate_loader.process_item(item, &self.definitions);
+
+ // n.b. we don't need to look at the path option here, because cstore already did
if let Some(crate_id) = self.session.cstore.extern_mod_stmt_cnum(item.id) {
let def_id = DefId {
krate: crate_id,
index: CRATE_DEF_INDEX,
};
- let parent_link = ModuleParentLink(parent, name);
- let def = Def::Mod(def_id);
- let module = self.new_extern_crate_module(parent_link, def, item.id);
+ let module = self.arenas.alloc_module(ModuleS {
+ extern_crate_id: Some(item.id),
+ populated: Cell::new(false),
+ ..ModuleS::new(Some(parent), ModuleKind::Def(Def::Mod(def_id), name))
+ });
self.define(parent, name, TypeNS, (module, sp, vis));
- self.build_reduced_graph_for_external_crate(module);
+ self.populate_module_if_necessary(module);
}
}
+ ItemKind::Mod(..) if item.ident == keywords::Invalid.ident() => {} // Crate root
+
ItemKind::Mod(..) => {
- let parent_link = ModuleParentLink(parent, name);
let def = Def::Mod(self.definitions.local_def_id(item.id));
- let module = self.new_module(parent_link, Some(def), Some(item.id));
- module.no_implicit_prelude.set({
- parent.no_implicit_prelude.get() ||
+ let module = self.arenas.alloc_module(ModuleS {
+ no_implicit_prelude: parent.no_implicit_prelude || {
attr::contains_name(&item.attrs, "no_implicit_prelude")
+ },
+ normal_ancestor_id: Some(item.id),
+ macros_escape: self.contains_macro_use(&item.attrs),
+ ..ModuleS::new(Some(parent), ModuleKind::Def(def, name))
});
self.define(parent, name, TypeNS, (module, sp, vis));
self.module_map.insert(item.id, module);
self.current_module = module;
}
- ItemKind::ForeignMod(..) => {}
+ ItemKind::ForeignMod(..) => self.crate_loader.process_item(item, &self.definitions),
// These items live in the value namespace.
ItemKind::Static(_, m, _) => {
}
ItemKind::Enum(ref enum_definition, _) => {
- let parent_link = ModuleParentLink(parent, name);
let def = Def::Enum(self.definitions.local_def_id(item.id));
- let module = self.new_module(parent_link, Some(def), parent.normal_ancestor_id);
+ let module = self.new_module(parent, ModuleKind::Def(def, name), true);
self.define(parent, name, TypeNS, (module, sp, vis));
for variant in &(*enum_definition).variants {
- let item_def_id = self.definitions.local_def_id(item.id);
- self.build_reduced_graph_for_variant(variant, item_def_id, module, vis);
+ self.build_reduced_graph_for_variant(variant, module, vis);
}
}
ItemKind::DefaultImpl(..) | ItemKind::Impl(..) => {}
- ItemKind::Trait(.., ref items) => {
+ ItemKind::Trait(..) => {
let def_id = self.definitions.local_def_id(item.id);
// Add all the items within to a new module.
- let parent_link = ModuleParentLink(parent, name);
- let def = Def::Trait(def_id);
- let module_parent =
- self.new_module(parent_link, Some(def), parent.normal_ancestor_id);
- self.define(parent, name, TypeNS, (module_parent, sp, vis));
-
- // Add the names of all the items to the trait info.
- for item in items {
- let item_def_id = self.definitions.local_def_id(item.id);
- let mut is_static_method = false;
- let (def, ns) = match item.node {
- TraitItemKind::Const(..) => (Def::AssociatedConst(item_def_id), ValueNS),
- TraitItemKind::Method(ref sig, _) => {
- is_static_method = !sig.decl.has_self();
- (Def::Method(item_def_id), ValueNS)
- }
- TraitItemKind::Type(..) => (Def::AssociatedTy(def_id, item_def_id), TypeNS),
- TraitItemKind::Macro(_) => panic!("unexpanded macro in resolve!"),
- };
-
- self.define(module_parent, item.ident.name, ns, (def, item.span, vis));
-
- self.trait_item_map.insert((item.ident.name, def_id), is_static_method);
- }
+ let module =
+ self.new_module(parent, ModuleKind::Def(Def::Trait(def_id), name), true);
+ self.define(parent, name, TypeNS, (module, sp, vis));
+ self.current_module = module;
}
ItemKind::Mac(_) => panic!("unexpanded macro in resolve!"),
}
-
- visit::walk_item(&mut BuildReducedGraphVisitor { resolver: self }, item);
- self.current_module = parent;
}
// Constructs the reduced graph for one variant. Variants exist in the
// type and value namespaces.
fn build_reduced_graph_for_variant(&mut self,
variant: &Variant,
- item_id: DefId,
parent: Module<'b>,
vis: ty::Visibility) {
let name = variant.node.name.name;
// Variants are always treated as importable to allow them to be glob used.
// All variants are defined in both type and value namespaces as future-proofing.
- let def = Def::Variant(item_id, self.definitions.local_def_id(variant.node.data.id()));
+ let def = Def::Variant(self.definitions.local_def_id(variant.node.data.id()));
self.define(parent, name, ValueNS, (def, variant.span, vis));
self.define(parent, name, TypeNS, (def, variant.span, vis));
}
{}",
block_id);
- let parent_link = BlockParentLink(parent, block_id);
- let new_module = self.new_module(parent_link, None, parent.normal_ancestor_id);
+ let new_module = self.new_module(parent, ModuleKind::Block(block_id), true);
self.module_map.insert(block_id, new_module);
self.current_module = new_module; // Descend into the block.
}
-
- visit::walk_block(&mut BuildReducedGraphVisitor { resolver: self }, block);
- self.current_module = parent;
}
/// Builds the reduced graph for a single item in an external crate.
- fn build_reduced_graph_for_external_crate_def(&mut self, parent: Module<'b>, xcdef: ChildItem) {
- let def = match xcdef.def {
- DlDef(def) => def,
- _ => return,
- };
-
- if let Def::ForeignMod(def_id) = def {
- // Foreign modules have no names. Recur and populate eagerly.
- for child in self.session.cstore.item_children(def_id) {
- self.build_reduced_graph_for_external_crate_def(parent, child);
- }
+ fn build_reduced_graph_for_external_crate_def(&mut self, parent: Module<'b>,
+ child: Export) {
+ let def_id = child.def_id;
+ let name = child.name;
+
+ let def = if let Some(def) = self.session.cstore.describe_def(def_id) {
+ def
+ } else {
return;
- }
+ };
- let name = xcdef.name;
- let vis = if parent.is_trait() { ty::Visibility::Public } else { xcdef.vis };
+ let vis = if parent.is_trait() {
+ ty::Visibility::Public
+ } else {
+ self.session.cstore.visibility(def_id)
+ };
match def {
- Def::Mod(_) | Def::ForeignMod(_) | Def::Enum(..) => {
+ Def::Mod(_) | Def::Enum(..) => {
debug!("(building reduced graph for external crate) building module {} {:?}",
name, vis);
- let parent_link = ModuleParentLink(parent, name);
- let module = self.new_module(parent_link, Some(def), None);
+ let module = self.new_module(parent, ModuleKind::Def(def, name), false);
let _ = self.try_define(parent, name, TypeNS, (module, DUMMY_SP, vis));
}
- Def::Variant(_, variant_id) => {
+ Def::Variant(variant_id) => {
debug!("(building reduced graph for external crate) building variant {}", name);
// Variants are always treated as importable to allow them to be glob used.
// All variants are defined in both type and value namespaces as future-proofing.
let _ = self.try_define(parent, name, TypeNS, (def, DUMMY_SP, vis));
let _ = self.try_define(parent, name, ValueNS, (def, DUMMY_SP, vis));
- if self.session.cstore.variant_kind(variant_id) == Some(VariantKind::Struct) {
+ if self.session.cstore.variant_kind(variant_id) == Some(ty::VariantKind::Struct) {
// Not adding fields for variants as they are not accessed with a self receiver
self.structs.insert(variant_id, Vec::new());
}
name);
let _ = self.try_define(parent, name, ValueNS, (def, DUMMY_SP, vis));
}
- Def::Trait(def_id) => {
+ Def::Trait(_) => {
debug!("(building reduced graph for external crate) building type {}", name);
// If this is a trait, add all the trait item names to the trait
// info.
- let trait_item_def_ids = self.session.cstore.trait_item_def_ids(def_id);
- for trait_item_def in &trait_item_def_ids {
+ let trait_item_def_ids = self.session.cstore.impl_or_trait_items(def_id);
+ for &trait_item_def in &trait_item_def_ids {
let trait_item_name =
- self.session.cstore.item_name(trait_item_def.def_id());
+ self.session.cstore.def_key(trait_item_def)
+ .disambiguated_data.data.get_opt_name()
+ .expect("opt_item_name returned None for trait");
debug!("(building reduced graph for external crate) ... adding trait item \
'{}'",
self.trait_item_map.insert((trait_item_name, def_id), false);
}
- let parent_link = ModuleParentLink(parent, name);
- let module = self.new_module(parent_link, Some(def), None);
+ let module = self.new_module(parent, ModuleKind::Def(def, name), false);
let _ = self.try_define(parent, name, TypeNS, (module, DUMMY_SP, vis));
}
Def::TyAlias(..) | Def::AssociatedTy(..) => {
debug!("(building reduced graph for external crate) building type {}", name);
let _ = self.try_define(parent, name, TypeNS, (def, DUMMY_SP, vis));
}
- Def::Struct(def_id)
- if self.session.cstore.tuple_struct_definition_if_ctor(def_id).is_none() => {
+ Def::Struct(_)
+ if self.session.cstore.def_key(def_id).disambiguated_data.data !=
+ DefPathData::StructCtor
+ => {
debug!("(building reduced graph for external crate) building type and value for {}",
name);
let _ = self.try_define(parent, name, TypeNS, (def, DUMMY_SP, vis));
let fields = self.session.cstore.struct_field_names(def_id);
self.structs.insert(def_id, fields);
}
- Def::Union(def_id) => {
+ Def::Union(_) => {
let _ = self.try_define(parent, name, TypeNS, (def, DUMMY_SP, vis));
// Record the def ID and fields of this union.
}
}
- /// Builds the reduced graph rooted at the 'use' directive for an external
- /// crate.
- fn build_reduced_graph_for_external_crate(&mut self, root: Module<'b>) {
- let root_cnum = root.def_id().unwrap().krate;
- for child in self.session.cstore.crate_top_level_items(root_cnum) {
- self.build_reduced_graph_for_external_crate_def(root, child);
- }
- }
-
/// Ensures that the reduced graph rooted at the given external module
/// is built, building it if it is not.
pub fn populate_module_if_necessary(&mut self, module: Module<'b>) {
}
module.populated.set(true)
}
+
+ // does this attribute list contain "macro_use"?
+ fn contains_macro_use(&mut self, attrs: &[ast::Attribute]) -> bool {
+ for attr in attrs {
+ if attr.check_name("macro_escape") {
+ let msg = "macro_escape is a deprecated synonym for macro_use";
+ let mut err = self.session.struct_span_warn(attr.span, msg);
+ if let ast::AttrStyle::Inner = attr.node.style {
+ err.help("consider an outer attribute, #[macro_use] mod ...").emit();
+ } else {
+ err.emit();
+ }
+ } else if !attr.check_name("macro_use") {
+ continue;
+ }
+
+ if !attr.is_word() {
+ self.session.span_err(attr.span, "arguments to macro_use are not allowed here");
+ }
+ return true;
+ }
+
+ false
+ }
+
+ fn insert_custom_derive(&mut self, name: &str, ext: Rc<MultiItemModifier>, sp: Span) {
+ if !self.session.features.borrow().rustc_macro {
+ let sess = &self.session.parse_sess;
+ let msg = "loading custom derive macro crates is experimentally supported";
+ emit_feature_err(sess, "rustc_macro", sp, feature_gate::GateIssue::Language, msg);
+ }
+ if self.derive_modes.insert(token::intern(name), ext).is_some() {
+ self.session.span_err(sp, &format!("cannot shadow existing derive mode `{}`", name));
+ }
+ }
+}
+
+pub struct BuildReducedGraphVisitor<'a, 'b: 'a> {
+ pub resolver: &'a mut Resolver<'b>,
+}
+
+impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
+ fn visit_invoc(&mut self, id: ast::NodeId) {
+ self.resolver.expansion_data.get_mut(&id.as_u32()).unwrap().module =
+ self.resolver.current_module;
+ }
}
-struct BuildReducedGraphVisitor<'a, 'b: 'a> {
- resolver: &'a mut Resolver<'b>,
+macro_rules! method {
+ ($visit:ident: $ty:ty, $invoc:path, $walk:ident) => {
+ fn $visit(&mut self, node: &$ty) {
+ match node.node {
+ $invoc(..) => self.visit_invoc(node.id),
+ _ => visit::$walk(self, node),
+ }
+ }
+ }
}
impl<'a, 'b> Visitor for BuildReducedGraphVisitor<'a, 'b> {
+ method!(visit_impl_item: ast::ImplItem, ast::ImplItemKind::Macro, walk_impl_item);
+ method!(visit_stmt: ast::Stmt, ast::StmtKind::Mac, walk_stmt);
+ method!(visit_expr: ast::Expr, ast::ExprKind::Mac, walk_expr);
+ method!(visit_pat: ast::Pat, ast::PatKind::Mac, walk_pat);
+ method!(visit_ty: ast::Ty, ast::TyKind::Mac, walk_ty);
+
fn visit_item(&mut self, item: &Item) {
+ match item.node {
+ ItemKind::Mac(..) if item.id == ast::DUMMY_NODE_ID => return, // Scope placeholder
+ ItemKind::Mac(..) => return self.visit_invoc(item.id),
+ _ => {}
+ }
+
+ let parent = self.resolver.current_module;
self.resolver.build_reduced_graph_for_item(item);
+ visit::walk_item(self, item);
+ self.resolver.current_module = parent;
}
fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
self.resolver.build_reduced_graph_for_foreign_item(foreign_item);
+ visit::walk_foreign_item(self, foreign_item);
}
fn visit_block(&mut self, block: &Block) {
+ let parent = self.resolver.current_module;
self.resolver.build_reduced_graph_for_block(block);
+ visit::walk_block(self, block);
+ self.resolver.current_module = parent;
+ }
+
+ fn visit_trait_item(&mut self, item: &TraitItem) {
+ let parent = self.resolver.current_module;
+ let def_id = parent.def_id().unwrap();
+
+ // Add the item to the trait info.
+ let item_def_id = self.resolver.definitions.local_def_id(item.id);
+ let mut is_static_method = false;
+ let (def, ns) = match item.node {
+ TraitItemKind::Const(..) => (Def::AssociatedConst(item_def_id), ValueNS),
+ TraitItemKind::Method(ref sig, _) => {
+ is_static_method = !sig.decl.has_self();
+ (Def::Method(item_def_id), ValueNS)
+ }
+ TraitItemKind::Type(..) => (Def::AssociatedTy(item_def_id), TypeNS),
+ TraitItemKind::Macro(_) => return self.visit_invoc(item.id),
+ };
+
+ self.resolver.trait_item_map.insert((item.ident.name, def_id), is_static_method);
+
+ let vis = ty::Visibility::Public;
+ self.resolver.define(parent, item.ident.name, ns, (def, item.span, vis));
+
+ self.resolver.current_module = parent.parent.unwrap(); // nearest normal ancestor
+ visit::walk_trait_item(self, item);
+ self.resolver.current_module = parent;
}
}
use self::RibKind::*;
use self::UseLexicalScopeFlag::*;
use self::ModulePrefixResult::*;
-use self::ParentLink::*;
-use rustc::hir::map::Definitions;
+use rustc::hir::map::{Definitions, DefCollector};
use rustc::hir::{self, PrimTy, TyBool, TyChar, TyFloat, TyInt, TyUint, TyStr};
-use rustc::middle::cstore::MacroLoader;
+use rustc::middle::cstore::CrateLoader;
use rustc::session::Session;
use rustc::lint;
use rustc::hir::def::*;
-use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
+use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId};
use rustc::ty;
use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap};
use rustc::util::nodemap::{NodeMap, NodeSet, FnvHashMap, FnvHashSet};
+use syntax::ext::base::MultiItemModifier;
use syntax::ext::hygiene::Mark;
use syntax::ast::{self, FloatTy};
-use syntax::ast::{CRATE_NODE_ID, Name, NodeId, CrateNum, IntTy, UintTy};
+use syntax::ast::{CRATE_NODE_ID, Name, NodeId, IntTy, UintTy};
+use syntax::ext::base::SyntaxExtension;
use syntax::parse::token::{self, keywords};
use syntax::util::lev_distance::find_best_match_for_name;
use syntax::visit::{self, FnKind, Visitor};
+use syntax::attr;
use syntax::ast::{Arm, BindingMode, Block, Crate, Expr, ExprKind};
use syntax::ast::{FnDecl, ForeignItem, ForeignItemKind, Generics};
use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind};
use std::cell::{Cell, RefCell};
use std::fmt;
use std::mem::replace;
+use std::rc::Rc;
use resolve_imports::{ImportDirective, NameResolution};
let mut err = struct_span_err!(resolver.session,
span,
E0425,
- "unresolved name `{}`{}",
- path,
- msg);
+ "unresolved name `{}`",
+ path);
+ if msg != "" {
+ err.span_label(span, &msg);
+ } else {
+ err.span_label(span, &format!("unresolved name"));
+ }
+
match context {
UnresolvedNameContext::Other => {
if msg.is_empty() && is_static_method && is_field {
}
}
-/// The link from a module up to its nearest parent node.
-#[derive(Clone,Debug)]
-enum ParentLink<'a> {
- NoParentLink,
- ModuleParentLink(Module<'a>, Name),
- BlockParentLink(Module<'a>, NodeId),
+enum ModuleKind {
+ Block(NodeId),
+ Def(Def, Name),
}
/// One node in the tree of modules.
pub struct ModuleS<'a> {
- parent_link: ParentLink<'a>,
- def: Option<Def>,
+ parent: Option<Module<'a>>,
+ kind: ModuleKind,
// The node id of the closest normal module (`mod`) ancestor (including this module).
normal_ancestor_id: Option<NodeId>,
resolutions: RefCell<FnvHashMap<(Name, Namespace), &'a RefCell<NameResolution<'a>>>>,
- no_implicit_prelude: Cell<bool>,
+ no_implicit_prelude: bool,
glob_importers: RefCell<Vec<&'a ImportDirective<'a>>>,
globs: RefCell<Vec<&'a ImportDirective<'a>>>,
// access the children must be preceded with a
// `populate_module_if_necessary` call.
populated: Cell<bool>,
+
+ macros: RefCell<FnvHashMap<Name, Rc<SyntaxExtension>>>,
+ macros_escape: bool,
}
pub type Module<'a> = &'a ModuleS<'a>;
impl<'a> ModuleS<'a> {
- fn new(parent_link: ParentLink<'a>, def: Option<Def>, normal_ancestor_id: Option<NodeId>)
- -> Self {
+ fn new(parent: Option<Module<'a>>, kind: ModuleKind) -> Self {
ModuleS {
- parent_link: parent_link,
- def: def,
- normal_ancestor_id: normal_ancestor_id,
+ parent: parent,
+ kind: kind,
+ normal_ancestor_id: None,
extern_crate_id: None,
resolutions: RefCell::new(FnvHashMap()),
- no_implicit_prelude: Cell::new(false),
+ no_implicit_prelude: false,
glob_importers: RefCell::new(Vec::new()),
globs: RefCell::new((Vec::new())),
traits: RefCell::new(None),
- populated: Cell::new(normal_ancestor_id.is_some()),
+ populated: Cell::new(true),
+ macros: RefCell::new(FnvHashMap()),
+ macros_escape: false,
}
}
}
}
+ fn def(&self) -> Option<Def> {
+ match self.kind {
+ ModuleKind::Def(def, _) => Some(def),
+ _ => None,
+ }
+ }
+
fn def_id(&self) -> Option<DefId> {
- self.def.as_ref().map(Def::def_id)
+ self.def().as_ref().map(Def::def_id)
}
// `self` resolves to the first module ancestor that `is_normal`.
fn is_normal(&self) -> bool {
- match self.def {
- Some(Def::Mod(_)) => true,
+ match self.kind {
+ ModuleKind::Def(Def::Mod(_), _) => true,
_ => false,
}
}
fn is_trait(&self) -> bool {
- match self.def {
- Some(Def::Trait(_)) => true,
+ match self.kind {
+ ModuleKind::Def(Def::Trait(_), _) => true,
_ => false,
}
}
-
- fn parent(&self) -> Option<&'a Self> {
- match self.parent_link {
- ModuleParentLink(parent, _) | BlockParentLink(parent, _) => Some(parent),
- NoParentLink => None,
- }
- }
}
impl<'a> fmt::Debug for ModuleS<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "{:?}", self.def)
+ write!(f, "{:?}", self.def())
}
}
fn def(&self) -> Def {
match self.kind {
NameBindingKind::Def(def) => def,
- NameBindingKind::Module(module) => module.def.unwrap(),
+ NameBindingKind::Module(module) => module.def().unwrap(),
NameBindingKind::Import { binding, .. } => binding.def(),
NameBindingKind::Ambiguity { .. } => Def::Err,
}
dummy_binding: &'a NameBinding<'a>,
new_import_semantics: bool, // true if `#![feature(item_like_imports)]`
- macro_loader: &'a mut MacroLoader,
+ pub exported_macros: Vec<ast::MacroDef>,
+ pub derive_modes: FnvHashMap<Name, Rc<MultiItemModifier>>,
+ crate_loader: &'a mut CrateLoader,
macro_names: FnvHashSet<Name>,
// Maps the `Mark` of an expansion to its containing module or block.
- expansion_data: Vec<macros::ExpansionData>,
+ expansion_data: FnvHashMap<u32, macros::ExpansionData<'a>>,
}
pub struct ResolverArenas<'a> {
impl<'a> ty::NodeIdTree for Resolver<'a> {
fn is_descendant_of(&self, mut node: NodeId, ancestor: NodeId) -> bool {
while node != ancestor {
- node = match self.module_map[&node].parent() {
+ node = match self.module_map[&node].parent {
Some(parent) => parent.normal_ancestor_id.unwrap(),
None => return false,
}
self.def_map.insert(id, PathResolution::new(def));
}
- fn definitions(&mut self) -> Option<&mut Definitions> {
- Some(&mut self.definitions)
+ fn definitions(&mut self) -> &mut Definitions {
+ &mut self.definitions
}
}
impl<'a> Resolver<'a> {
pub fn new(session: &'a Session,
+ krate: &Crate,
make_glob_map: MakeGlobMap,
- macro_loader: &'a mut MacroLoader,
+ crate_loader: &'a mut CrateLoader,
arenas: &'a ResolverArenas<'a>)
-> Resolver<'a> {
- let root_def_id = DefId::local(CRATE_DEF_INDEX);
- let graph_root =
- ModuleS::new(NoParentLink, Some(Def::Mod(root_def_id)), Some(CRATE_NODE_ID));
- let graph_root = arenas.alloc_module(graph_root);
+ let root_def = Def::Mod(DefId::local(CRATE_DEF_INDEX));
+ let graph_root = arenas.alloc_module(ModuleS {
+ normal_ancestor_id: Some(CRATE_NODE_ID),
+ no_implicit_prelude: attr::contains_name(&krate.attrs, "no_implicit_prelude"),
+ ..ModuleS::new(None, ModuleKind::Def(root_def, keywords::Invalid.name()))
+ });
let mut module_map = NodeMap();
module_map.insert(CRATE_NODE_ID, graph_root);
+ let mut definitions = Definitions::new();
+ DefCollector::new(&mut definitions).collect_root();
+
+ let mut expansion_data = FnvHashMap();
+ expansion_data.insert(0, macros::ExpansionData::root(graph_root)); // Crate root expansion
+
Resolver {
session: session,
- definitions: Definitions::new(),
+ definitions: definitions,
macros_at_scope: FnvHashMap(),
// The outermost module has def ID 0; this is not reflected in the
}),
new_import_semantics: session.features.borrow().item_like_imports,
- macro_loader: macro_loader,
+ exported_macros: Vec::new(),
+ derive_modes: FnvHashMap(),
+ crate_loader: crate_loader,
macro_names: FnvHashSet(),
- expansion_data: vec![macros::ExpansionData::default()],
+ expansion_data: expansion_data,
}
}
/// Entry point to crate resolution.
pub fn resolve_crate(&mut self, krate: &Crate) {
+ // Collect `DefId`s for exported macro defs.
+ for def in &krate.exported_macros {
+ DefCollector::new(&mut self.definitions).with_parent(CRATE_DEF_INDEX, |collector| {
+ collector.visit_macro_def(def)
+ })
+ }
+
self.current_module = self.graph_root;
visit::walk_crate(self, krate);
check_unused::check_crate(self, krate);
self.report_errors();
+ self.crate_loader.postprocess(krate);
}
- fn new_module(&self,
- parent_link: ParentLink<'a>,
- def: Option<Def>,
- normal_ancestor_id: Option<NodeId>)
- -> Module<'a> {
- self.arenas.alloc_module(ModuleS::new(parent_link, def, normal_ancestor_id))
- }
-
- fn new_extern_crate_module(&self, parent_link: ParentLink<'a>, def: Def, local_node_id: NodeId)
- -> Module<'a> {
- let mut module = ModuleS::new(parent_link, Some(def), Some(local_node_id));
- module.extern_crate_id = Some(local_node_id);
- self.arenas.modules.alloc(module)
+ fn new_module(&self, parent: Module<'a>, kind: ModuleKind, local: bool) -> Module<'a> {
+ self.arenas.alloc_module(ModuleS {
+ normal_ancestor_id: if local { self.current_module.normal_ancestor_id } else { None },
+ populated: Cell::new(local),
+ ..ModuleS::new(Some(parent), kind)
+ })
}
fn get_ribs<'b>(&'b mut self, ns: Namespace) -> &'b mut Vec<Rib<'a>> {
-> Option<Module<'a>> {
match this.resolve_name_in_module(module, needle, TypeNS, false, None) {
Success(binding) if binding.is_extern_crate() => Some(module),
- _ => match module.parent_link {
- ModuleParentLink(ref parent, _) => {
- search_parent_externals(this, needle, parent)
- }
- _ => None,
+ _ => if let (&ModuleKind::Def(..), Some(parent)) = (&module.kind, module.parent) {
+ search_parent_externals(this, needle, parent)
+ } else {
+ None
},
}
}
return Some(LexicalScopeBinding::Item(binding));
}
- // We can only see through anonymous modules
- if module.def.is_some() {
- return match self.prelude {
- Some(prelude) if !module.no_implicit_prelude.get() => {
- self.resolve_name_in_module(prelude, name, ns, false, None).success()
- .map(LexicalScopeBinding::Item)
- }
- _ => None,
- };
+ if let ModuleKind::Block(..) = module.kind { // We can see through blocks
+ } else if !module.no_implicit_prelude {
+ return self.prelude.and_then(|prelude| {
+ self.resolve_name_in_module(prelude, name, ns, false, None).success()
+ }).map(LexicalScopeBinding::Item)
+ } else {
+ return None;
}
}
while i < module_path.len() && "super" == module_path[i].as_str() {
debug!("(resolving module prefix) resolving `super` at {}",
module_to_string(&containing_module));
- if let Some(parent) = containing_module.parent() {
+ if let Some(parent) = containing_module.parent {
containing_module = self.module_map[&parent.normal_ancestor_id.unwrap()];
i += 1;
} else {
// Resolve the self type.
this.visit_ty(self_type);
- this.with_self_rib(Def::SelfTy(trait_id, Some(item_id)), |this| {
+ let item_def_id = this.definitions.local_def_id(item_id);
+ this.with_self_rib(Def::SelfTy(trait_id, Some(item_def_id)), |this| {
this.with_current_self_type(self_type, |this| {
for impl_item in impl_items {
this.resolve_visibility(&impl_item.vis);
// must not add it if it's in the bindings map
// because that breaks the assumptions later
// passes make about or-patterns.)
- let mut def = Def::Local(self.definitions.local_def_id(pat_id), pat_id);
+ let mut def = Def::Local(self.definitions.local_def_id(pat_id));
match bindings.get(&ident.node).cloned() {
Some(id) if id == outer_pat_id => {
// `Variant(a, a)`, error
Def::Upvar(..) => {
span_bug!(span, "unexpected {:?} in bindings", def)
}
- Def::Local(_, node_id) => {
+ Def::Local(def_id) => {
for rib in ribs {
match rib.kind {
NormalRibKind | ModuleRibKind(..) | MacroDefinition(..) => {
}
ClosureRibKind(function_id) => {
let prev_def = def;
- let node_def_id = self.definitions.local_def_id(node_id);
+ let node_id = self.definitions.as_local_node_id(def_id).unwrap();
let seen = self.freevars_seen
.entry(function_id)
.or_insert_with(|| NodeMap());
if let Some(&index) = seen.get(&node_id) {
- def = Def::Upvar(node_def_id, node_id, index, function_id);
+ def = Def::Upvar(def_id, index, function_id);
continue;
}
let vec = self.freevars
span: span,
});
- def = Def::Upvar(node_def_id, node_id, depth, function_id);
+ def = Def::Upvar(def_id, depth, function_id);
seen.insert(node_id, depth);
}
ItemRibKind | MethodRibKind(_) => {
if let Some(resolution) = self.def_map.get(&node_id) {
match resolution.base_def {
Def::Enum(did) | Def::TyAlias(did) | Def::Union(did) |
- Def::Struct(did) | Def::Variant(_, did) if resolution.depth == 0 => {
+ Def::Struct(did) | Def::Variant(did) if resolution.depth == 0 => {
if let Some(fields) = self.structs.get(&did) {
if fields.iter().any(|&field_name| name == field_name) {
return Field;
if let Some(path_res) = self.resolve_possibly_assoc_item(expr.id,
maybe_qself.as_ref(), path, ValueNS) {
// Check if struct variant
- let is_struct_variant = if let Def::Variant(_, variant_id) = path_res.base_def {
+ let is_struct_variant = if let Def::Variant(variant_id) = path_res.base_def {
self.structs.contains_key(&variant_id)
} else {
false
let mut context = UnresolvedNameContext::Other;
let mut def = Def::Err;
if !msg.is_empty() {
- msg = format!(". Did you mean {}?", msg);
+ msg = format!("did you mean {}?", msg);
} else {
// we display a help message if this is a module
let name_path = path.segments.iter()
UseLexicalScope,
Some(expr.span)) {
Success(e) => {
- if let Some(def_type) = e.def {
+ if let Some(def_type) = e.def() {
def = def_type;
}
context = UnresolvedNameContext::PathIsMod(parent);
};
search_in_module(self, search_module);
- match search_module.parent_link {
- NoParentLink | ModuleParentLink(..) => {
- if !search_module.no_implicit_prelude.get() {
- self.prelude.map(|prelude| search_in_module(self, prelude));
- }
- break;
- }
- BlockParentLink(parent_module, _) => {
- search_module = parent_module;
+ if let ModuleKind::Block(..) = search_module.kind {
+ search_module = search_module.parent.unwrap();
+ } else {
+ if !search_module.no_implicit_prelude {
+ self.prelude.map(|prelude| search_in_module(self, prelude));
}
+ break;
}
}
// collect submodules to explore
if let Ok(module) = name_binding.module() {
// form the path
- let path_segments = match module.parent_link {
- NoParentLink => path_segments.clone(),
- ModuleParentLink(_, name) => {
+ let path_segments = match module.kind {
+ _ if module.parent.is_none() => path_segments.clone(),
+ ModuleKind::Def(_, name) => {
let mut paths = path_segments.clone();
let ident = ast::Ident::with_empty_ctxt(name);
let params = PathParameters::none();
if !in_module_is_extern || name_binding.vis == ty::Visibility::Public {
// add the module to the lookup
let is_extern = in_module_is_extern || name_binding.is_extern_crate();
- if !worklist.iter().any(|&(m, ..)| m.def == module.def) {
+ if !worklist.iter().any(|&(m, ..)| m.def() == module.def()) {
worklist.push((module, path_segments, is_extern));
}
}
let mut path_resolution = err_path_resolution();
let vis = match self.resolve_module_path(&segments, DontUseLexicalScope, Some(path.span)) {
Success(module) => {
- path_resolution = PathResolution::new(module.def.unwrap());
+ path_resolution = PathResolution::new(module.def().unwrap());
ty::Visibility::Restricted(module.normal_ancestor_id.unwrap())
}
Indeterminate => unreachable!(),
return self.report_conflict(parent, name, ns, old_binding, binding);
}
- let container = match parent.def {
- Some(Def::Mod(_)) => "module",
- Some(Def::Trait(_)) => "trait",
- None => "block",
+ let container = match parent.kind {
+ ModuleKind::Def(Def::Mod(_), _) => "module",
+ ModuleKind::Def(Def::Trait(_), _) => "trait",
+ ModuleKind::Block(..) => "block",
_ => "enum",
};
let mut names = Vec::new();
fn collect_mod(names: &mut Vec<ast::Name>, module: Module) {
- match module.parent_link {
- NoParentLink => {}
- ModuleParentLink(ref module, name) => {
+ if let ModuleKind::Def(_, name) = module.kind {
+ if let Some(parent) = module.parent {
names.push(name);
- collect_mod(names, module);
- }
- BlockParentLink(ref module, _) => {
- // danger, shouldn't be ident?
- names.push(token::intern("<opaque>"));
- collect_mod(names, module);
+ collect_mod(names, parent);
}
+ } else {
+ // danger, shouldn't be ident?
+ names.push(token::intern("<opaque>"));
+ collect_mod(names, module);
}
}
collect_mod(&mut names, module);
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use Resolver;
-use rustc::util::nodemap::FnvHashMap;
-use std::cell::RefCell;
-use std::mem;
+use {Module, Resolver};
+use build_reduced_graph::BuildReducedGraphVisitor;
+use rustc::hir::def_id::{CRATE_DEF_INDEX, DefIndex};
+use rustc::hir::map::{self, DefCollector};
use std::rc::Rc;
-use syntax::ast::{self, Name};
+use syntax::ast;
use syntax::errors::DiagnosticBuilder;
-use syntax::ext::base::{self, LoadedMacro, MultiModifier, MultiDecorator};
+use syntax::ext::base::{self, MultiModifier, MultiDecorator, MultiItemModifier};
use syntax::ext::base::{NormalTT, SyntaxExtension};
use syntax::ext::expand::{Expansion, Invocation, InvocationKind};
use syntax::ext::hygiene::Mark;
+use syntax::ext::tt::macro_rules;
use syntax::parse::token::intern;
use syntax::util::lev_distance::find_best_match_for_name;
-use syntax::visit::{self, Visitor};
-#[derive(Clone, Default)]
-pub struct ExpansionData {
- module: Rc<ModuleData>,
+#[derive(Clone)]
+pub struct ExpansionData<'a> {
+ pub module: Module<'a>,
+ def_index: DefIndex,
+ // True if this expansion is in a `const_integer` position, for example `[u32; m!()]`.
+ // c.f. `DefCollector::visit_ast_const_integer`.
+ const_integer: bool,
}
-// FIXME(jseyfried): merge with `::ModuleS`.
-#[derive(Default)]
-struct ModuleData {
- parent: Option<Rc<ModuleData>>,
- macros: RefCell<FnvHashMap<Name, Rc<SyntaxExtension>>>,
- macros_escape: bool,
+impl<'a> ExpansionData<'a> {
+ pub fn root(graph_root: Module<'a>) -> Self {
+ ExpansionData {
+ module: graph_root,
+ def_index: CRATE_DEF_INDEX,
+ const_integer: false,
+ }
+ }
}
impl<'a> base::Resolver for Resolver<'a> {
- fn load_crate(&mut self, extern_crate: &ast::Item, allows_macros: bool) -> Vec<LoadedMacro> {
- self.macro_loader.load_crate(extern_crate, allows_macros)
- }
-
fn next_node_id(&mut self) -> ast::NodeId {
self.session.next_node_id()
}
- fn visit_expansion(&mut self, mark: Mark, expansion: &Expansion) {
- expansion.visit_with(&mut ExpansionVisitor {
- current_module: self.expansion_data[mark.as_u32() as usize].module.clone(),
- resolver: self,
+ fn get_module_scope(&mut self, id: ast::NodeId) -> Mark {
+ let mark = Mark::fresh();
+ let module = self.module_map[&id];
+ self.expansion_data.insert(mark.as_u32(), ExpansionData {
+ module: module,
+ def_index: module.def_id().unwrap().index,
+ const_integer: false,
});
+ mark
+ }
+
+ fn visit_expansion(&mut self, mark: Mark, expansion: &Expansion) {
+ self.collect_def_ids(mark, expansion);
+ self.current_module = self.expansion_data[&mark.as_u32()].module;
+ expansion.visit_with(&mut BuildReducedGraphVisitor { resolver: self });
+ }
+
+ fn add_macro(&mut self, scope: Mark, mut def: ast::MacroDef) {
+ if &def.ident.name.as_str() == "macro_rules" {
+ self.session.span_err(def.span, "user-defined macros may not be named `macro_rules`");
+ }
+ if def.use_locally {
+ let ext = macro_rules::compile(&self.session.parse_sess, &def);
+ self.add_ext(scope, def.ident, Rc::new(ext));
+ }
+ if def.export {
+ def.id = self.next_node_id();
+ self.exported_macros.push(def);
+ }
}
- fn add_macro(&mut self, scope: Mark, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
+ fn add_ext(&mut self, scope: Mark, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
if let NormalTT(..) = *ext {
self.macro_names.insert(ident.name);
}
- let mut module = self.expansion_data[scope.as_u32() as usize].module.clone();
+ let mut module = self.expansion_data[&scope.as_u32()].module;
while module.macros_escape {
- module = module.parent.clone().unwrap();
+ module = module.parent.unwrap();
}
module.macros.borrow_mut().insert(ident.name, ext);
}
fn find_attr_invoc(&mut self, attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
for i in 0..attrs.len() {
let name = intern(&attrs[i].name());
- match self.expansion_data[0].module.macros.borrow().get(&name) {
+ match self.expansion_data[&0].module.macros.borrow().get(&name) {
Some(ext) => match **ext {
- MultiModifier(..) | MultiDecorator(..) => return Some(attrs.remove(i)),
+ MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => {
+ return Some(attrs.remove(i))
+ }
_ => {}
},
None => {}
None
}
- fn resolve_invoc(&mut self, invoc: &Invocation) -> Option<Rc<SyntaxExtension>> {
+ fn resolve_invoc(&mut self, scope: Mark, invoc: &Invocation) -> Option<Rc<SyntaxExtension>> {
let (name, span) = match invoc.kind {
InvocationKind::Bang { ref mac, .. } => {
let path = &mac.node.path;
InvocationKind::Attr { ref attr, .. } => (intern(&*attr.name()), attr.span),
};
- let mut module = self.expansion_data[invoc.mark().as_u32() as usize].module.clone();
+ let mut module = self.expansion_data[&scope.as_u32()].module;
loop {
if let Some(ext) = module.macros.borrow().get(&name) {
return Some(ext.clone());
}
- match module.parent.clone() {
+ match module.parent {
Some(parent) => module = parent,
None => break,
}
err.emit();
None
}
+
+ fn resolve_derive_mode(&mut self, ident: ast::Ident) -> Option<Rc<MultiItemModifier>> {
+ self.derive_modes.get(&ident.name).cloned()
+ }
}
impl<'a> Resolver<'a> {
}
}
}
-}
-struct ExpansionVisitor<'b, 'a: 'b> {
- resolver: &'b mut Resolver<'a>,
- current_module: Rc<ModuleData>,
-}
+ fn collect_def_ids(&mut self, mark: Mark, expansion: &Expansion) {
+ let expansion_data = &mut self.expansion_data;
+ let ExpansionData { def_index, const_integer, module } = expansion_data[&mark.as_u32()];
+ let visit_macro_invoc = &mut |invoc: map::MacroInvocationData| {
+ expansion_data.entry(invoc.id.as_u32()).or_insert(ExpansionData {
+ def_index: invoc.def_index,
+ const_integer: invoc.const_integer,
+ module: module,
+ });
+ };
-impl<'a, 'b> ExpansionVisitor<'a, 'b> {
- fn visit_invoc(&mut self, id: ast::NodeId) {
- assert_eq!(id, self.resolver.expansion_data.len() as u32);
- self.resolver.expansion_data.push(ExpansionData {
- module: self.current_module.clone(),
+ let mut def_collector = DefCollector::new(&mut self.definitions);
+ def_collector.visit_macro_invoc = Some(visit_macro_invoc);
+ def_collector.with_parent(def_index, |def_collector| if !const_integer {
+ expansion.visit_with(def_collector)
+ } else if let Expansion::Expr(ref expr) = *expansion {
+ def_collector.visit_ast_const_integer(expr);
});
}
-
- // does this attribute list contain "macro_use"?
- fn contains_macro_use(&mut self, attrs: &[ast::Attribute]) -> bool {
- for attr in attrs {
- if attr.check_name("macro_escape") {
- let msg = "macro_escape is a deprecated synonym for macro_use";
- let mut err = self.resolver.session.struct_span_warn(attr.span, msg);
- if let ast::AttrStyle::Inner = attr.node.style {
- err.help("consider an outer attribute, #[macro_use] mod ...").emit();
- } else {
- err.emit();
- }
- } else if !attr.check_name("macro_use") {
- continue;
- }
-
- if !attr.is_word() {
- self.resolver.session.span_err(attr.span,
- "arguments to macro_use are not allowed here");
- }
- return true;
- }
-
- false
- }
-}
-
-macro_rules! method {
- ($visit:ident: $ty:ty, $invoc:path, $walk:ident) => {
- fn $visit(&mut self, node: &$ty) {
- match node.node {
- $invoc(..) => self.visit_invoc(node.id),
- _ => visit::$walk(self, node),
- }
- }
- }
-}
-
-impl<'a, 'b> Visitor for ExpansionVisitor<'a, 'b> {
- method!(visit_trait_item: ast::TraitItem, ast::TraitItemKind::Macro, walk_trait_item);
- method!(visit_impl_item: ast::ImplItem, ast::ImplItemKind::Macro, walk_impl_item);
- method!(visit_stmt: ast::Stmt, ast::StmtKind::Mac, walk_stmt);
- method!(visit_expr: ast::Expr, ast::ExprKind::Mac, walk_expr);
- method!(visit_pat: ast::Pat, ast::PatKind::Mac, walk_pat);
- method!(visit_ty: ast::Ty, ast::TyKind::Mac, walk_ty);
-
- fn visit_item(&mut self, item: &ast::Item) {
- match item.node {
- ast::ItemKind::Mac(..) if item.id == ast::DUMMY_NODE_ID => {} // Scope placeholder
- ast::ItemKind::Mac(..) => self.visit_invoc(item.id),
- ast::ItemKind::Mod(..) => {
- let module_data = ModuleData {
- parent: Some(self.current_module.clone()),
- macros: RefCell::new(FnvHashMap()),
- macros_escape: self.contains_macro_use(&item.attrs),
- };
- let orig_module = mem::replace(&mut self.current_module, Rc::new(module_data));
- visit::walk_item(self, item);
- self.current_module = orig_module;
- }
- _ => visit::walk_item(self, item),
- }
- }
-
- fn visit_block(&mut self, block: &ast::Block) {
- let module_data = ModuleData {
- parent: Some(self.current_module.clone()),
- macros: RefCell::new(FnvHashMap()),
- macros_escape: false,
- };
- let orig_module = mem::replace(&mut self.current_module, Rc::new(module_data));
- visit::walk_block(self, block);
- self.current_module = orig_module;
- }
}
};
match (value_result, type_result) {
- // With `#![feature(item_like_imports)]`, all namespaces
- // must be re-exported with extra visibility for an error to occur.
- (Ok(value_binding), Ok(type_binding)) if self.new_import_semantics => {
+ // All namespaces must be re-exported with extra visibility for an error to occur.
+ (Ok(value_binding), Ok(type_binding)) => {
let vis = directive.vis.get();
if !value_binding.pseudo_vis().is_at_least(vis, self) &&
!type_binding.pseudo_vis().is_at_least(vis, self) {
let module = directive.imported_module.get().unwrap();
self.populate_module_if_necessary(module);
- if let Some(Def::Trait(_)) = module.def {
+ if let Some(Def::Trait(_)) = module.def() {
self.session.span_err(directive.span, "items in traits are not importable.");
return;
} else if module.def_id() == directive.parent.def_id() {
//! retrieve the data from a crate.
use rustc::hir;
-use rustc::hir::def_id::DefId;
-use syntax::ast::{self, CrateNum, NodeId};
+use rustc::hir::def_id::{CrateNum, DefId};
+use syntax::ast::{self, NodeId};
use syntax_pos::Span;
pub struct CrateData {
pub scope: NodeId,
pub value: String,
pub visibility: Visibility,
- pub parent: Option<NodeId>,
+ pub parent: Option<DefId>,
pub docs: String,
}
pub scope: NodeId,
pub value: String,
pub decl_id: Option<DefId>,
+ pub parent: Option<DefId>,
pub visibility: Visibility,
pub docs: String,
}
pub type_value: String,
pub value: String,
pub scope: NodeId,
- pub parent: Option<NodeId>,
+ pub parent: Option<DefId>,
pub docs: String,
}
pub type_value: String,
pub value: String,
pub scope: NodeId,
- pub parent: Option<NodeId>,
+ pub parent: Option<DefId>,
pub docs: String,
}
pub qualname: String,
pub value: String,
pub visibility: Visibility,
- pub parent: Option<NodeId>,
+ pub parent: Option<DefId>,
pub docs: String,
}
pub qualname: String,
pub span: Span,
pub scope: NodeId,
- pub parent: Option<NodeId>,
+ pub parent: Option<DefId>,
pub value: String,
pub type_value: String,
pub visibility: Visibility,
//! is used for recording the output in a format-agnostic way (see CsvDumper
//! for an example).
+use rustc::hir;
use rustc::hir::def::Def;
-use rustc::hir::def_id::DefId;
-use rustc::hir::map::Node;
+use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
+use rustc::hir::map::{Node, NodeItem};
use rustc::session::Session;
use rustc::ty::{self, TyCtxt, ImplOrTraitItem, ImplOrTraitItemContainer};
use std::collections::HashSet;
use std::hash::*;
-use syntax::ast::{self, NodeId, PatKind, Attribute};
+use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
use syntax::parse::token::{self, keywords};
use syntax::visit::{self, Visitor};
use syntax::print::pprust::{path_to_string, ty_to_string, bounds_to_string, generics_to_string};
use super::{escape, generated_code, SaveContext, PathCollector, docs_for_attrs};
use super::data::*;
use super::dump::Dump;
-use super::external_data::Lower;
+use super::external_data::{Lower, make_def_id};
use super::span_utils::SpanUtils;
use super::recorder;
analysis: analysis,
dumper: dumper,
span: span_utils.clone(),
- cur_scope: 0,
+ cur_scope: CRATE_NODE_ID,
mac_defs: HashSet::new(),
mac_uses: HashSet::new(),
}
let lo_loc = self.span.sess.codemap().lookup_char_pos(c.span.lo);
ExternalCrateData {
name: c.name,
- num: c.number,
+ num: CrateNum::from_u32(c.number),
file_name: SpanUtils::make_path_string(&lo_loc.file.name),
}
}).collect();
ref_id: None,
span: *span,
qualname: qualname.to_owned(),
- scope: 0
+ scope: CRATE_NODE_ID
}.lower(self.tcx));
// write the other sub-paths
// looks up anything, not just a type
fn lookup_type_ref(&self, ref_id: NodeId) -> Option<DefId> {
- match self.tcx.expect_def(ref_id) {
- Def::PrimTy(..) => None,
- Def::SelfTy(..) => None,
- def => Some(def.def_id()),
- }
+ self.tcx.expect_def_or_none(ref_id).and_then(|def| {
+ match def {
+ Def::PrimTy(..) => None,
+ Def::SelfTy(..) => None,
+ def => Some(def.def_id()),
+ }
+ })
}
fn process_def_kind(&mut self,
let def = self.tcx.expect_def(ref_id);
match def {
- Def::Mod(_) |
- Def::ForeignMod(_) => {
+ Def::Mod(_) => {
self.dumper.mod_ref(ModRefData {
span: sub_span.expect("No span found for mod ref"),
ref_id: Some(def_id),
qualname: format!("{}::{}", qualname, path_to_string(p)),
type_value: typ,
value: String::new(),
- scope: 0,
+ scope: CRATE_NODE_ID,
parent: None,
visibility: Visibility::Inherited,
docs: String::new(),
if !self.span.filter_generated(Some(method_data.span), span) {
let container =
self.tcx.impl_or_trait_item(self.tcx.map.local_def_id(id)).container();
- let decl_id = if let ImplOrTraitItemContainer::ImplContainer(id) = container {
- self.tcx.trait_id_of_impl(id).and_then(|id| {
- for item in &**self.tcx.trait_items(id) {
- if let &ImplOrTraitItem::MethodTraitItem(ref m) = item {
- if m.name == name {
- return Some(m.def_id);
+ let mut trait_id;
+ let mut decl_id = None;
+ match container {
+ ImplOrTraitItemContainer::ImplContainer(id) => {
+ trait_id = self.tcx.trait_id_of_impl(id);
+
+ match trait_id {
+ Some(id) => {
+ for item in &**self.tcx.trait_items(id) {
+ if let &ImplOrTraitItem::MethodTraitItem(ref m) = item {
+ if m.name == name {
+ decl_id = Some(m.def_id);
+ break;
+ }
+ }
+ }
+ }
+ None => {
+ if let Some(NodeItem(item)) = self.tcx.map.get_if_local(id) {
+ if let hir::ItemImpl(_, _, _, _, ref ty, _) = item.node {
+ trait_id = self.lookup_type_ref(ty.id);
+ }
}
}
}
- None
- })
- } else {
- None
- };
+ }
+ ImplOrTraitItemContainer::TraitContainer(id) => {
+ trait_id = Some(id);
+ }
+ }
self.dumper.method(MethodData {
id: method_data.id,
qualname: method_data.qualname.clone(),
value: sig_str,
decl_id: decl_id,
+ parent: trait_id,
visibility: vis,
docs: docs_for_attrs(attrs),
}.lower(self.tcx));
span: Span,
typ: &ast::Ty,
expr: &ast::Expr,
- parent_id: NodeId,
+ parent_id: DefId,
vis: Visibility,
attrs: &[Attribute]) {
let qualname = format!("::{}", self.tcx.node_path_str(id));
type_value: enum_data.qualname.clone(),
value: val,
scope: enum_data.scope,
- parent: Some(item.id),
+ parent: Some(make_def_id(item.id, &self.tcx.map)),
docs: docs_for_attrs(&variant.node.attrs),
}.lower(self.tcx));
}
type_value: enum_data.qualname.clone(),
value: val,
scope: enum_data.scope,
- parent: Some(item.id),
+ parent: Some(make_def_id(item.id, &self.tcx.map)),
docs: docs_for_attrs(&variant.node.attrs),
}.lower(self.tcx));
}
}
self.process_generic_params(type_parameters, item.span, "", item.id);
for impl_item in impl_items {
- self.process_impl_item(impl_item, item.id);
+ let map = &self.tcx.map;
+ self.process_impl_item(impl_item, make_def_id(item.id, map));
}
}
// walk generics and methods
self.process_generic_params(generics, item.span, &qualname, item.id);
for method in methods {
- self.process_trait_item(method, item.id)
+ let map = &self.tcx.map;
+ self.process_trait_item(method, make_def_id(item.id, map))
}
}
qualname: format!("{}${}", path_to_string(p), id),
value: value,
type_value: typ,
- scope: 0,
+ scope: CRATE_NODE_ID,
parent: None,
visibility: Visibility::Inherited,
docs: String::new(),
}
}
- fn process_trait_item(&mut self, trait_item: &ast::TraitItem, trait_id: NodeId) {
+ fn process_trait_item(&mut self, trait_item: &ast::TraitItem, trait_id: DefId) {
self.process_macro_use(trait_item.span, trait_item.id);
match trait_item.node {
ast::TraitItemKind::Const(ref ty, Some(ref expr)) => {
}
}
- fn process_impl_item(&mut self, impl_item: &ast::ImplItem, impl_id: NodeId) {
+ fn process_impl_item(&mut self, impl_item: &ast::ImplItem, impl_id: DefId) {
self.process_macro_use(impl_item.span, impl_item.id);
match impl_item.node {
ast::ImplItemKind::Const(ref ty, ref expr) => {
let alias_span = self.span.span_for_last_ident(item.span);
let cnum = match self.sess.cstore.extern_mod_stmt_cnum(item.id) {
Some(cnum) => cnum,
- None => 0,
+ None => LOCAL_CRATE,
};
if !self.span.filter_generated(alias_span, item.span) {
// process collected paths
for &(id, ref p, immut, ref_kind) in &collector.collected_paths {
match self.tcx.expect_def(id) {
- Def::Local(_, id) => {
+ Def::Local(def_id) => {
+ let id = self.tcx.map.as_local_node_id(def_id).unwrap();
let mut value = if immut == ast::Mutability::Immutable {
self.span.snippet(p.span).to_string()
} else {
qualname: format!("{}${}", path_to_string(p), id),
value: value,
type_value: typ,
- scope: 0,
+ scope: CRATE_NODE_ID,
parent: None,
visibility: Visibility::Inherited,
docs: String::new(),
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rustc::hir::def_id::{DefId, DefIndex};
+use rustc::hir::def_id::{CrateNum, DefId, DefIndex};
use rustc::hir::map::Map;
use rustc::ty::TyCtxt;
-use syntax::ast::{CrateNum, NodeId};
+use syntax::ast::NodeId;
use syntax::codemap::CodeMap;
use syntax_pos::Span;
fn lower(self, tcx: TyCtxt) -> Self::Target;
}
-fn make_def_id(id: NodeId, map: &Map) -> DefId {
+pub fn make_def_id(id: NodeId, map: &Map) -> DefId {
map.opt_local_def_id(id).unwrap_or(null_def_id())
}
pub fn null_def_id() -> DefId {
- DefId { krate: u32::max_value(), index: DefIndex::from_u32(u32::max_value()) }
+ DefId {
+ krate: CrateNum::from_u32(u32::max_value()),
+ index: DefIndex::from_u32(u32::max_value())
+ }
}
#[derive(Clone, Debug, RustcEncodable)]
scope: make_def_id(self.scope, &tcx.map),
value: self.value,
visibility: self.visibility,
- parent: self.parent.map(|id| make_def_id(id, &tcx.map)),
+ parent: self.parent,
docs: self.docs,
}
}
value: self.value,
decl_id: self.decl_id,
visibility: self.visibility,
- parent: Some(make_def_id(self.scope, &tcx.map)),
+ parent: self.parent,
docs: self.docs,
}
}
type_value: self.type_value,
value: self.value,
scope: make_def_id(self.scope, &tcx.map),
- parent: self.parent.map(|id| make_def_id(id, &tcx.map)),
+ parent: self.parent,
docs: self.docs,
}
}
type_value: self.type_value,
value: self.value,
scope: make_def_id(self.scope, &tcx.map),
- parent: self.parent.map(|id| make_def_id(id, &tcx.map)),
+ parent: self.parent,
docs: self.docs,
}
}
qualname: self.qualname,
value: self.value,
visibility: self.visibility,
- parent: self.parent.map(|id| make_def_id(id, &tcx.map)),
+ parent: self.parent,
docs: self.docs,
}
}
scope: make_def_id(self.scope, &tcx.map),
value: self.value,
type_value: self.type_value,
- parent: self.parent.map(|id| make_def_id(id, &tcx.map)),
+ parent: self.parent,
visibility: self.visibility,
docs: self.docs,
}
impl From<DefId> for Id {
fn from(id: DefId) -> Id {
Id {
- krate: id.krate,
+ krate: id.krate.as_u32(),
index: id.index.as_u32(),
}
}
impl From<DefId> for Id {
fn from(id: DefId) -> Id {
Id {
- krate: id.krate,
+ krate: id.krate.as_u32(),
index: id.index.as_u32(),
}
}
use std::fs::{self, File};
use std::path::{Path, PathBuf};
-use syntax::ast::{self, NodeId, PatKind, Attribute};
+use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
use syntax::parse::lexer::comments::strip_doc_comment_decoration;
use syntax::parse::token::{self, keywords, InternedString};
use syntax::visit::{self, Visitor};
pub use self::json_api_dumper::JsonApiDumper;
pub use self::json_dumper::JsonDumper;
pub use self::data::*;
+pub use self::external_data::make_def_id;
pub use self::dump::Dump;
pub use self::dump_visitor::DumpVisitor;
use self::span_utils::SpanUtils;
};
result.push(CrateData {
name: (&self.tcx.sess.cstore.crate_name(n)[..]).to_owned(),
- number: n,
+ number: n.as_u32(),
span: span,
});
}
qualname: qualname,
span: sub_span.unwrap(),
scope: scope,
- parent: Some(scope),
+ parent: Some(make_def_id(scope, &self.tcx.map)),
value: "".to_owned(),
type_value: typ,
visibility: From::from(&field.vis),
name: ast::Name, span: Span) -> Option<FunctionData> {
// The qualname for a method is the trait name or name of the struct in an impl in
// which the method is declared in, followed by the method's name.
- let (qualname, vis, docs) = match self.tcx.impl_of_method(self.tcx.map.local_def_id(id)) {
+ let (qualname, parent_scope, vis, docs) =
+ match self.tcx.impl_of_method(self.tcx.map.local_def_id(id)) {
Some(impl_id) => match self.tcx.map.get_if_local(impl_id) {
Some(NodeItem(item)) => {
match item.node {
let mut result = String::from("<");
result.push_str(&rustc::hir::print::ty_to_string(&ty));
- if let Some(def_id) = self.tcx.trait_id_of_impl(impl_id) {
+ let trait_id = self.tcx.trait_id_of_impl(impl_id);
+ if let Some(def_id) = trait_id {
result.push_str(" as ");
result.push_str(&self.tcx.item_path_str(def_id));
}
result.push_str(">");
- (result, From::from(&item.vis), docs_for_attrs(&item.attrs))
+ (result, trait_id, From::from(&item.vis), docs_for_attrs(&item.attrs))
}
_ => {
span_bug!(span,
match self.tcx.map.get_if_local(def_id) {
Some(NodeItem(item)) => {
(format!("::{}", self.tcx.item_path_str(def_id)),
+ Some(def_id),
From::from(&item.vis),
docs_for_attrs(&item.attrs))
}
let qualname = format!("{}::{}", qualname, name);
let def_id = self.tcx.map.local_def_id(id);
- let decl_id = self.tcx.trait_item_of_item(def_id).and_then(|new_id| {
- let new_def_id = new_id.def_id();
+ let decl_id = self.tcx.trait_item_of_item(def_id).and_then(|new_def_id| {
if new_def_id != def_id {
Some(new_def_id)
} else {
let sub_span = self.span_utils.sub_span_after_keyword(span, keywords::Fn);
filter!(self.span_utils, sub_span, span, None);
- let parent_scope = self.enclosing_scope(id);
Some(FunctionData {
id: id,
name: name.to_string(),
// FIXME you get better data here by using the visitor.
value: String::new(),
visibility: vis,
- parent: Some(parent_scope),
+ parent: parent_scope,
docs: docs,
})
}
.map(|mr| mr.def_id())
}
ty::ImplContainer(def_id) => {
- let impl_items = self.tcx.impl_items.borrow();
- Some(impl_items.get(&def_id)
- .unwrap()
- .iter()
- .find(|mr| {
- self.tcx.impl_or_trait_item(mr.def_id()).name() ==
- ti.name()
- })
- .unwrap()
- .def_id())
+ Some(*self.tcx.impl_or_trait_items(def_id).iter().find(|&&mr| {
+ self.tcx.impl_or_trait_item(mr).name() == ti.name()
+ }).unwrap())
}
}
} else {
#[inline]
pub fn enclosing_scope(&self, id: NodeId) -> NodeId {
- self.tcx.map.get_enclosing_scope(id).unwrap_or(0)
+ self.tcx.map.get_enclosing_scope(id).unwrap_or(CRATE_NODE_ID)
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use llvm::{self, ValueRef};
+use llvm::{self, ValueRef, Integer, Pointer, Float, Double, Struct, Array, Vector};
use base;
use build::AllocaFcx;
use common::{type_is_fat_ptr, BlockAndBuilder, C_uint};
use cabi_mips;
use cabi_mips64;
use cabi_asmjs;
-use machine::{llalign_of_min, llsize_of, llsize_of_real, llsize_of_store};
+use machine::{llalign_of_min, llsize_of, llsize_of_alloc};
use type_::Type;
use type_of;
// Wipe old attributes, likely not valid through indirection.
self.attrs = llvm::Attributes::default();
- let llarg_sz = llsize_of_real(ccx, self.ty);
+ let llarg_sz = llsize_of_alloc(ccx, self.ty);
// For non-immediate arguments the callee gets its own copy of
// the value on the stack, so there are no aliases. It's also
base::call_memcpy(bcx,
bcx.pointercast(dst, Type::i8p(ccx)),
bcx.pointercast(llscratch, Type::i8p(ccx)),
- C_uint(ccx, llsize_of_store(ccx, self.ty)),
+ C_uint(ccx, llsize_of_alloc(ccx, self.ty)),
cmp::min(llalign_of_min(ccx, self.ty),
llalign_of_min(ccx, ty)) as u32);
if let Layout::CEnum { signed, .. } = *ccx.layout_of(ty) {
arg.signedness = Some(signed);
}
- if llsize_of_real(ccx, arg.ty) == 0 {
+ if llsize_of_alloc(ccx, arg.ty) == 0 {
// For some forsaken reason, x86_64-pc-windows-gnu
// doesn't ignore zero-sized struct arguments.
// The same is true for s390x-unknown-linux-gnu.
ty::TyRef(_, ty::TypeAndMut { ty, .. }) |
ty::TyBox(ty) => {
let llty = type_of::sizing_type_of(ccx, ty);
- let llsz = llsize_of_real(ccx, llty);
+ let llsz = llsize_of_alloc(ccx, llty);
ret.attrs.set_dereferenceable(llsz);
}
_ => {}
} else {
if let Some(inner) = rust_ptr_attrs(ty, &mut arg) {
let llty = type_of::sizing_type_of(ccx, inner);
- let llsz = llsize_of_real(ccx, llty);
+ let llsz = llsize_of_alloc(ccx, llty);
arg.attrs.set_dereferenceable(llsz);
}
args.push(arg);
return;
}
- let size = llsize_of_real(ccx, llty);
- if size > llsize_of_real(ccx, ccx.int_type()) {
+ let size = llsize_of_alloc(ccx, llty);
+ if size > llsize_of_alloc(ccx, ccx.int_type()) {
arg.make_indirect(ccx);
} else if size > 0 {
// We want to pass small aggregates as immediates, but using
}
}
}
+
+pub fn align_up_to(off: usize, a: usize) -> usize {
+ return (off + a - 1) / a * a;
+}
+
+fn align(off: usize, ty: Type, pointer: usize) -> usize {
+ let a = ty_align(ty, pointer);
+ return align_up_to(off, a);
+}
+
+pub fn ty_align(ty: Type, pointer: usize) -> usize {
+ match ty.kind() {
+ Integer => ((ty.int_width() as usize) + 7) / 8,
+ Pointer => pointer,
+ Float => 4,
+ Double => 8,
+ Struct => {
+ if ty.is_packed() {
+ 1
+ } else {
+ let str_tys = ty.field_types();
+ str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t, pointer)))
+ }
+ }
+ Array => {
+ let elt = ty.element_type();
+ ty_align(elt, pointer)
+ }
+ Vector => {
+ let len = ty.vector_length();
+ let elt = ty.element_type();
+ ty_align(elt, pointer) * len
+ }
+ _ => bug!("ty_align: unhandled type")
+ }
+}
+
+pub fn ty_size(ty: Type, pointer: usize) -> usize {
+ match ty.kind() {
+ Integer => ((ty.int_width() as usize) + 7) / 8,
+ Pointer => pointer,
+ Float => 4,
+ Double => 8,
+ Struct => {
+ if ty.is_packed() {
+ let str_tys = ty.field_types();
+ str_tys.iter().fold(0, |s, t| s + ty_size(*t, pointer))
+ } else {
+ let str_tys = ty.field_types();
+ let size = str_tys.iter().fold(0, |s, t| {
+ align(s, *t, pointer) + ty_size(*t, pointer)
+ });
+ align(size, ty, pointer)
+ }
+ }
+ Array => {
+ let len = ty.array_length();
+ let elt = ty.element_type();
+ let eltsz = ty_size(elt, pointer);
+ len * eltsz
+ }
+ Vector => {
+ let len = ty.vector_length();
+ let elt = ty.element_type();
+ let eltsz = ty_size(elt, pointer);
+ len * eltsz
+ },
+ _ => bug!("ty_size: unhandled type")
+ }
+}
//! used unboxed and any field can have pointers (including mutable)
//! taken to it, implementing them for Rust seems difficult.
-pub use self::Repr::*;
use super::Disr;
use std;
-use std::rc::Rc;
use llvm::{ValueRef, True, IntEQ, IntNE};
-use rustc::ty::subst::Substs;
-use rustc::ty::{self, AdtKind, Ty, TyCtxt};
-use syntax::ast;
+use rustc::ty::layout;
+use rustc::ty::{self, Ty, AdtKind};
use syntax::attr;
-use syntax::attr::IntType;
-use abi::FAT_PTR_ADDR;
use build::*;
use common::*;
use debuginfo::DebugLoc;
use glue;
+use base;
use machine;
use monomorphize;
use type_::Type;
type Hint = attr::ReprAttr;
-/// Representations.
-#[derive(Eq, PartialEq, Debug)]
-pub enum Repr<'tcx> {
- /// C-like enums; basically an int.
- CEnum(IntType, Disr, Disr), // discriminant range (signedness based on the IntType)
- /// Single-case variants, and structs/tuples/records.
- Univariant(Struct<'tcx>),
- /// Untagged unions.
- UntaggedUnion(Union<'tcx>),
- /// General-case enums: for each case there is a struct, and they
- /// all start with a field for the discriminant.
- General(IntType, Vec<Struct<'tcx>>),
- /// Two cases distinguished by a nullable pointer: the case with discriminant
- /// `nndiscr` must have single field which is known to be nonnull due to its type.
- /// The other case is known to be zero sized. Hence we represent the enum
- /// as simply a nullable pointer: if not null it indicates the `nndiscr` variant,
- /// otherwise it indicates the other case.
- RawNullablePointer {
- nndiscr: Disr,
- nnty: Ty<'tcx>,
- nullfields: Vec<Ty<'tcx>>
- },
- /// Two cases distinguished by a nullable pointer: the case with discriminant
- /// `nndiscr` is represented by the struct `nonnull`, where the `discrfield`th
- /// field is known to be nonnull due to its type; if that field is null, then
- /// it represents the other case, which is inhabited by at most one value
- /// (and all other fields are undefined/unused).
- ///
- /// For example, `std::option::Option` instantiated at a safe pointer type
- /// is represented such that `None` is a null pointer and `Some` is the
- /// identity function.
- StructWrappedNullablePointer {
- nonnull: Struct<'tcx>,
- nndiscr: Disr,
- discrfield: DiscrField,
- nullfields: Vec<Ty<'tcx>>,
- }
-}
-
-/// For structs, and struct-like parts of anything fancier.
-#[derive(Eq, PartialEq, Debug)]
-pub struct Struct<'tcx> {
- // If the struct is DST, then the size and alignment do not take into
- // account the unsized fields of the struct.
- pub size: u64,
- pub align: u32,
- pub sized: bool,
- pub packed: bool,
- pub fields: Vec<Ty<'tcx>>,
-}
-
-/// For untagged unions.
-#[derive(Eq, PartialEq, Debug)]
-pub struct Union<'tcx> {
- pub min_size: u64,
- pub align: u32,
- pub packed: bool,
- pub fields: Vec<Ty<'tcx>>,
-}
-
#[derive(Copy, Clone)]
pub struct MaybeSizedValue {
pub value: ValueRef,
}
}
-/// Decides how to represent a given type.
-pub fn represent_type<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
- t: Ty<'tcx>)
- -> Rc<Repr<'tcx>> {
- debug!("Representing: {}", t);
- if let Some(repr) = cx.adt_reprs().borrow().get(&t) {
- return repr.clone();
- }
-
- let repr = Rc::new(represent_type_uncached(cx, t));
- debug!("Represented as: {:?}", repr);
- cx.adt_reprs().borrow_mut().insert(t, repr.clone());
- repr
-}
-
-fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
- t: Ty<'tcx>) -> Repr<'tcx> {
+/// Given an enum, struct, closure, or tuple, extracts fields.
+/// Treats closures as a struct with one variant.
+/// `empty_if_no_variants` is a switch to deal with empty enums.
+/// If true, `variant_index` is disregarded and an empty Vec returned in this case.
+fn compute_fields<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>,
+ variant_index: usize,
+ empty_if_no_variants: bool) -> Vec<Ty<'tcx>> {
match t.sty {
- ty::TyTuple(ref elems) => {
- Univariant(mk_struct(cx, &elems[..], false, t))
- }
- ty::TyClosure(_, ref substs) => {
- Univariant(mk_struct(cx, &substs.upvar_tys, false, t))
- }
- ty::TyAdt(def, substs) => match def.adt_kind() {
- AdtKind::Struct => {
- let ftys = def.struct_variant().fields.iter().map(|field| {
- monomorphize::field_ty(cx.tcx(), substs, field)
- }).collect::<Vec<_>>();
- let packed = cx.tcx().lookup_packed(def.did);
-
- Univariant(mk_struct(cx, &ftys[..], packed, t))
- }
- AdtKind::Union => {
- let ftys = def.struct_variant().fields.iter().map(|field| {
- monomorphize::field_ty(cx.tcx(), substs, field)
- }).collect::<Vec<_>>();
- let packed = cx.tcx().lookup_packed(def.did);
- UntaggedUnion(mk_union(cx, &ftys[..], packed, t))
- }
- AdtKind::Enum => {
- let cases = get_cases(cx.tcx(), def, substs);
- let hint = *cx.tcx().lookup_repr_hints(def.did).get(0)
- .unwrap_or(&attr::ReprAny);
-
- if cases.is_empty() {
- // Uninhabitable; represent as unit
- // (Typechecking will reject discriminant-sizing attrs.)
- assert_eq!(hint, attr::ReprAny);
- return Univariant(mk_struct(cx, &[], false, t));
- }
-
- if cases.iter().all(|c| c.tys.is_empty()) {
- // All bodies empty -> intlike
- let discrs: Vec<_> = cases.iter().map(|c| Disr::from(c.discr)).collect();
- let bounds = IntBounds {
- ulo: discrs.iter().min().unwrap().0,
- uhi: discrs.iter().max().unwrap().0,
- slo: discrs.iter().map(|n| n.0 as i64).min().unwrap(),
- shi: discrs.iter().map(|n| n.0 as i64).max().unwrap()
- };
- return mk_cenum(cx, hint, &bounds);
- }
-
- // Since there's at least one
- // non-empty body, explicit discriminants should have
- // been rejected by a checker before this point.
- if !cases.iter().enumerate().all(|(i,c)| c.discr == Disr::from(i)) {
- bug!("non-C-like enum {} with specified discriminants",
- cx.tcx().item_path_str(def.did));
- }
-
- if cases.len() == 1 && hint == attr::ReprAny {
- // Equivalent to a struct or tuple.
- return Univariant(mk_struct(cx, &cases[0].tys, false, t));
- }
-
- if cases.len() == 2 && hint == attr::ReprAny {
- // Nullable pointer optimization
- let mut discr = 0;
- while discr < 2 {
- if cases[1 - discr].is_zerolen(cx, t) {
- let st = mk_struct(cx, &cases[discr].tys,
- false, t);
- match cases[discr].find_ptr(cx) {
- Some(ref df) if df.len() == 1 && st.fields.len() == 1 => {
- return RawNullablePointer {
- nndiscr: Disr::from(discr),
- nnty: st.fields[0],
- nullfields: cases[1 - discr].tys.clone()
- };
- }
- Some(mut discrfield) => {
- discrfield.push(0);
- discrfield.reverse();
- return StructWrappedNullablePointer {
- nndiscr: Disr::from(discr),
- nonnull: st,
- discrfield: discrfield,
- nullfields: cases[1 - discr].tys.clone()
- };
- }
- None => {}
- }
- }
- discr += 1;
- }
- }
-
- // The general case.
- assert!((cases.len() - 1) as i64 >= 0);
- let bounds = IntBounds { ulo: 0, uhi: (cases.len() - 1) as u64,
- slo: 0, shi: (cases.len() - 1) as i64 };
- let min_ity = range_to_inttype(cx, hint, &bounds);
-
- // Create the set of structs that represent each variant
- // Use the minimum integer type we figured out above
- let fields : Vec<_> = cases.iter().map(|c| {
- let mut ftys = vec!(ty_of_inttype(cx.tcx(), min_ity));
- ftys.extend_from_slice(&c.tys);
- mk_struct(cx, &ftys, false, t)
- }).collect();
-
-
- // Check to see if we should use a different type for the
- // discriminant. If the overall alignment of the type is
- // the same as the first field in each variant, we can safely use
- // an alignment-sized type.
- // We increase the size of the discriminant to avoid LLVM copying
- // padding when it doesn't need to. This normally causes unaligned
- // load/stores and excessive memcpy/memset operations. By using a
- // bigger integer size, LLVM can be sure about it's contents and
- // won't be so conservative.
- // This check is needed to avoid increasing the size of types when
- // the alignment of the first field is smaller than the overall
- // alignment of the type.
- let (_, align) = union_size_and_align(&fields);
- let mut use_align = true;
- for st in &fields {
- // Get the first non-zero-sized field
- let field = st.fields.iter().skip(1).filter(|ty| {
- let t = type_of::sizing_type_of(cx, **ty);
- machine::llsize_of_real(cx, t) != 0 ||
- // This case is only relevant for zero-sized types with large alignment
- machine::llalign_of_min(cx, t) != 1
- }).next();
-
- if let Some(field) = field {
- let field_align = type_of::align_of(cx, *field);
- if field_align != align {
- use_align = false;
- break;
- }
- }
- }
-
- // If the alignment is smaller than the chosen discriminant size, don't use the
- // alignment as the final size.
- let min_ty = ll_inttype(&cx, min_ity);
- let min_size = machine::llsize_of_real(cx, min_ty);
- if (align as u64) < min_size {
- use_align = false;
- }
-
- let ity = if use_align {
- // Use the overall alignment
- match align {
- 1 => attr::UnsignedInt(ast::UintTy::U8),
- 2 => attr::UnsignedInt(ast::UintTy::U16),
- 4 => attr::UnsignedInt(ast::UintTy::U32),
- 8 if machine::llalign_of_min(cx, Type::i64(cx)) == 8 =>
- attr::UnsignedInt(ast::UintTy::U64),
- _ => min_ity // use min_ity as a fallback
- }
- } else {
- min_ity
- };
-
- let fields : Vec<_> = cases.iter().map(|c| {
- let mut ftys = vec!(ty_of_inttype(cx.tcx(), ity));
- ftys.extend_from_slice(&c.tys);
- mk_struct(cx, &ftys[..], false, t)
- }).collect();
-
- ensure_enum_fits_in_address_space(cx, &fields[..], t);
-
- General(ity, fields)
- }
+ ty::TyAdt(ref def, _) if def.variants.len() == 0 && empty_if_no_variants => {
+ Vec::default()
},
- _ => bug!("adt::represent_type called on non-ADT type: {}", t)
- }
-}
-
-// this should probably all be in ty
-struct Case<'tcx> {
- discr: Disr,
- tys: Vec<Ty<'tcx>>
-}
-
-/// This represents the (GEP) indices to follow to get to the discriminant field
-pub type DiscrField = Vec<usize>;
-
-fn find_discr_field_candidate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: Ty<'tcx>,
- mut path: DiscrField)
- -> Option<DiscrField> {
- match ty.sty {
- // Fat &T/&mut T/Box<T> i.e. T is [T], str, or Trait
- ty::TyRef(_, ty::TypeAndMut { ty, .. }) | ty::TyBox(ty) if !type_is_sized(tcx, ty) => {
- path.push(FAT_PTR_ADDR);
- Some(path)
+ ty::TyAdt(ref def, ref substs) => {
+ def.variants[variant_index].fields.iter().map(|f| {
+ monomorphize::field_ty(cx.tcx(), substs, f)
+ }).collect::<Vec<_>>()
},
-
- // Regular thin pointer: &T/&mut T/Box<T>
- ty::TyRef(..) | ty::TyBox(..) => Some(path),
-
- // Function pointer: `fn() -> i32`
- ty::TyFnPtr(_) => Some(path),
-
- // Is this the NonZero lang item wrapping a pointer or integer type?
- ty::TyAdt(def, substs) if Some(def.did) == tcx.lang_items.non_zero() => {
- let nonzero_fields = &def.struct_variant().fields;
- assert_eq!(nonzero_fields.len(), 1);
- let field_ty = monomorphize::field_ty(tcx, substs, &nonzero_fields[0]);
- match field_ty.sty {
- ty::TyRawPtr(ty::TypeAndMut { ty, .. }) if !type_is_sized(tcx, ty) => {
- path.extend_from_slice(&[0, FAT_PTR_ADDR]);
- Some(path)
- },
- ty::TyRawPtr(..) | ty::TyInt(..) | ty::TyUint(..) => {
- path.push(0);
- Some(path)
- },
- _ => None
- }
+ ty::TyTuple(fields) => fields.to_vec(),
+ ty::TyClosure(_, substs) => {
+ if variant_index > 0 { bug!("{} is a closure, which only has one variant", t);}
+ substs.upvar_tys.to_vec()
},
-
- // Perhaps one of the fields of this struct is non-zero
- // let's recurse and find out
- ty::TyAdt(def, substs) if def.is_struct() => {
- for (j, field) in def.struct_variant().fields.iter().enumerate() {
- let field_ty = monomorphize::field_ty(tcx, substs, field);
- if let Some(mut fpath) = find_discr_field_candidate(tcx, field_ty, path.clone()) {
- fpath.push(j);
- return Some(fpath);
- }
- }
- None
- },
-
- // Perhaps one of the upvars of this struct is non-zero
- // Let's recurse and find out!
- ty::TyClosure(_, ref substs) => {
- for (j, &ty) in substs.upvar_tys.iter().enumerate() {
- if let Some(mut fpath) = find_discr_field_candidate(tcx, ty, path.clone()) {
- fpath.push(j);
- return Some(fpath);
- }
- }
- None
- },
-
- // Can we use one of the fields in this tuple?
- ty::TyTuple(ref tys) => {
- for (j, &ty) in tys.iter().enumerate() {
- if let Some(mut fpath) = find_discr_field_candidate(tcx, ty, path.clone()) {
- fpath.push(j);
- return Some(fpath);
- }
- }
- None
- },
-
- // Is this a fixed-size array of something non-zero
- // with at least one element?
- ty::TyArray(ety, d) if d > 0 => {
- if let Some(mut vpath) = find_discr_field_candidate(tcx, ety, path) {
- vpath.push(0);
- Some(vpath)
- } else {
- None
- }
- },
-
- // Anything else is not a pointer
- _ => None
- }
-}
-
-impl<'tcx> Case<'tcx> {
- fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>) -> bool {
- mk_struct(cx, &self.tys, false, scapegoat).size == 0
- }
-
- fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option<DiscrField> {
- for (i, &ty) in self.tys.iter().enumerate() {
- if let Some(mut path) = find_discr_field_candidate(cx.tcx(), ty, vec![]) {
- path.push(i);
- return Some(path);
- }
- }
- None
- }
-}
-
-fn get_cases<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- adt: ty::AdtDef<'tcx>,
- substs: &Substs<'tcx>)
- -> Vec<Case<'tcx>> {
- adt.variants.iter().map(|vi| {
- let field_tys = vi.fields.iter().map(|field| {
- monomorphize::field_ty(tcx, substs, field)
- }).collect();
- Case { discr: Disr::from(vi.disr_val), tys: field_tys }
- }).collect()
-}
-
-fn mk_struct<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
- tys: &[Ty<'tcx>], packed: bool,
- scapegoat: Ty<'tcx>)
- -> Struct<'tcx> {
- let sized = tys.iter().all(|&ty| type_is_sized(cx.tcx(), ty));
- let lltys : Vec<Type> = if sized {
- tys.iter().map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
- } else {
- tys.iter().filter(|&ty| type_is_sized(cx.tcx(), *ty))
- .map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
- };
-
- ensure_struct_fits_in_address_space(cx, &lltys[..], packed, scapegoat);
-
- let llty_rec = Type::struct_(cx, &lltys[..], packed);
- Struct {
- size: machine::llsize_of_alloc(cx, llty_rec),
- align: machine::llalign_of_min(cx, llty_rec),
- sized: sized,
- packed: packed,
- fields: tys.to_vec(),
- }
-}
-
-fn mk_union<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
- tys: &[Ty<'tcx>], packed: bool,
- _scapegoat: Ty<'tcx>)
- -> Union<'tcx> {
- let mut min_size = 0;
- let mut align = 0;
- for llty in tys.iter().map(|&ty| type_of::sizing_type_of(cx, ty)) {
- let field_size = machine::llsize_of_alloc(cx, llty);
- if min_size < field_size {
- min_size = field_size;
- }
- let field_align = machine::llalign_of_min(cx, llty);
- if align < field_align {
- align = field_align;
- }
- }
-
- Union {
- min_size: min_size,
- align: if packed { 1 } else { align },
- packed: packed,
- fields: tys.to_vec(),
- }
-}
-
-#[derive(Debug)]
-struct IntBounds {
- slo: i64,
- shi: i64,
- ulo: u64,
- uhi: u64
-}
-
-fn mk_cenum<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
- hint: Hint, bounds: &IntBounds)
- -> Repr<'tcx> {
- let it = range_to_inttype(cx, hint, bounds);
- match it {
- attr::SignedInt(_) => CEnum(it, Disr(bounds.slo as u64), Disr(bounds.shi as u64)),
- attr::UnsignedInt(_) => CEnum(it, Disr(bounds.ulo), Disr(bounds.uhi))
- }
-}
-
-fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntType {
- debug!("range_to_inttype: {:?} {:?}", hint, bounds);
- // Lists of sizes to try. u64 is always allowed as a fallback.
- #[allow(non_upper_case_globals)]
- const choose_shortest: &'static [IntType] = &[
- attr::UnsignedInt(ast::UintTy::U8), attr::SignedInt(ast::IntTy::I8),
- attr::UnsignedInt(ast::UintTy::U16), attr::SignedInt(ast::IntTy::I16),
- attr::UnsignedInt(ast::UintTy::U32), attr::SignedInt(ast::IntTy::I32)];
- #[allow(non_upper_case_globals)]
- const at_least_32: &'static [IntType] = &[
- attr::UnsignedInt(ast::UintTy::U32), attr::SignedInt(ast::IntTy::I32)];
-
- let attempts;
- match hint {
- attr::ReprInt(span, ity) => {
- if !bounds_usable(cx, ity, bounds) {
- span_bug!(span, "representation hint insufficient for discriminant range")
- }
- return ity;
- }
- attr::ReprExtern => {
- attempts = match &cx.sess().target.target.arch[..] {
- // WARNING: the ARM EABI has two variants; the one corresponding to `at_least_32`
- // appears to be used on Linux and NetBSD, but some systems may use the variant
- // corresponding to `choose_shortest`. However, we don't run on those yet...?
- "arm" => at_least_32,
- _ => at_least_32,
- }
- }
- attr::ReprAny => {
- attempts = choose_shortest;
- },
- attr::ReprPacked => {
- bug!("range_to_inttype: found ReprPacked on an enum");
- }
- attr::ReprSimd => {
- bug!("range_to_inttype: found ReprSimd on an enum");
- }
- }
- for &ity in attempts {
- if bounds_usable(cx, ity, bounds) {
- return ity;
- }
- }
- return attr::UnsignedInt(ast::UintTy::U64);
-}
-
-pub fn ll_inttype(cx: &CrateContext, ity: IntType) -> Type {
- match ity {
- attr::SignedInt(t) => Type::int_from_ty(cx, t),
- attr::UnsignedInt(t) => Type::uint_from_ty(cx, t)
- }
-}
-
-fn bounds_usable(cx: &CrateContext, ity: IntType, bounds: &IntBounds) -> bool {
- debug!("bounds_usable: {:?} {:?}", ity, bounds);
- match ity {
- attr::SignedInt(_) => {
- let lllo = C_integral(ll_inttype(cx, ity), bounds.slo as u64, true);
- let llhi = C_integral(ll_inttype(cx, ity), bounds.shi as u64, true);
- bounds.slo == const_to_int(lllo) as i64 && bounds.shi == const_to_int(llhi) as i64
- }
- attr::UnsignedInt(_) => {
- let lllo = C_integral(ll_inttype(cx, ity), bounds.ulo, false);
- let llhi = C_integral(ll_inttype(cx, ity), bounds.uhi, false);
- bounds.ulo == const_to_uint(lllo) as u64 && bounds.uhi == const_to_uint(llhi) as u64
- }
- }
-}
-
-pub fn ty_of_inttype<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ity: IntType) -> Ty<'tcx> {
- match ity {
- attr::SignedInt(t) => tcx.mk_mach_int(t),
- attr::UnsignedInt(t) => tcx.mk_mach_uint(t)
- }
-}
-
-// LLVM doesn't like types that don't fit in the address space
-fn ensure_struct_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
- fields: &[Type],
- packed: bool,
- scapegoat: Ty<'tcx>) {
- let mut offset = 0;
- for &llty in fields {
- // Invariant: offset < ccx.obj_size_bound() <= 1<<61
- if !packed {
- let type_align = machine::llalign_of_min(ccx, llty);
- offset = roundup(offset, type_align);
- }
- // type_align is a power-of-2, so still offset < ccx.obj_size_bound()
- // llsize_of_alloc(ccx, llty) is also less than ccx.obj_size_bound()
- // so the sum is less than 1<<62 (and therefore can't overflow).
- offset += machine::llsize_of_alloc(ccx, llty);
-
- if offset >= ccx.obj_size_bound() {
- ccx.report_overbig_object(scapegoat);
- }
- }
-}
-
-fn union_size_and_align(sts: &[Struct]) -> (machine::llsize, machine::llalign) {
- let size = sts.iter().map(|st| st.size).max().unwrap();
- let align = sts.iter().map(|st| st.align).max().unwrap();
- (roundup(size, align), align)
-}
-
-fn ensure_enum_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
- fields: &[Struct],
- scapegoat: Ty<'tcx>) {
- let (total_size, _) = union_size_and_align(fields);
-
- if total_size >= ccx.obj_size_bound() {
- ccx.report_overbig_object(scapegoat);
+ _ => bug!("{} is not a type that can have fields.", t)
}
}
+/// This represents the (GEP) indices to follow to get to the discriminant field
+pub type DiscrField = Vec<usize>;
/// LLVM-level types are a little complicated.
///
/// For nominal types, in some cases, we need to use LLVM named structs
/// and fill in the actual contents in a second pass to prevent
/// unbounded recursion; see also the comments in `trans::type_of`.
-pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>) -> Type {
- generic_type_of(cx, r, None, false, false)
+pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type {
+ generic_type_of(cx, t, None, false, false)
}
// this out, but if you call this on an unsized type without realising it, you
// are going to get the wrong type (it will not include the unsized parts of it).
pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
- r: &Repr<'tcx>, dst: bool) -> Type {
- generic_type_of(cx, r, None, true, dst)
+ t: Ty<'tcx>, dst: bool) -> Type {
+ generic_type_of(cx, t, None, true, dst)
}
pub fn incomplete_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
- r: &Repr<'tcx>, name: &str) -> Type {
- generic_type_of(cx, r, Some(name), false, false)
+ t: Ty<'tcx>, name: &str) -> Type {
+ generic_type_of(cx, t, Some(name), false, false)
}
pub fn finish_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
- r: &Repr<'tcx>, llty: &mut Type) {
- match *r {
- CEnum(..) | General(..) | UntaggedUnion(..) | RawNullablePointer { .. } => { }
- Univariant(ref st) | StructWrappedNullablePointer { nonnull: ref st, .. } =>
- llty.set_struct_body(&struct_llfields(cx, st, false, false),
- st.packed)
+ t: Ty<'tcx>, llty: &mut Type) {
+ let l = cx.layout_of(t);
+ debug!("finish_type_of: {} with layout {:#?}", t, l);
+ match *l {
+ layout::CEnum { .. } | layout::General { .. }
+ | layout::UntaggedUnion { .. } | layout::RawNullablePointer { .. } => { }
+ layout::Univariant { ..}
+ | layout::StructWrappedNullablePointer { .. } => {
+ let (nonnull_variant, packed) = match *l {
+ layout::Univariant { ref variant, .. } => (0, variant.packed),
+ layout::StructWrappedNullablePointer { nndiscr, ref nonnull, .. } =>
+ (nndiscr, nonnull.packed),
+ _ => unreachable!()
+ };
+ let fields = compute_fields(cx, t, nonnull_variant as usize, true);
+ llty.set_struct_body(&struct_llfields(cx, &fields, false, false),
+ packed)
+ },
+ _ => bug!("This function cannot handle {} with layout {:#?}", t, l)
}
}
fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
- r: &Repr<'tcx>,
+ t: Ty<'tcx>,
name: Option<&str>,
sizing: bool,
dst: bool) -> Type {
- debug!("adt::generic_type_of r: {:?} name: {:?} sizing: {} dst: {}",
- r, name, sizing, dst);
- match *r {
- CEnum(ity, ..) => ll_inttype(cx, ity),
- RawNullablePointer { nnty, .. } =>
- type_of::sizing_type_of(cx, nnty),
- StructWrappedNullablePointer { nonnull: ref st, .. } => {
+ let l = cx.layout_of(t);
+ debug!("adt::generic_type_of t: {:?} name: {:?} sizing: {} dst: {}",
+ t, name, sizing, dst);
+ match *l {
+ layout::CEnum { discr, .. } => Type::from_integer(cx, discr),
+ layout::RawNullablePointer { nndiscr, .. } => {
+ let (def, substs) = match t.sty {
+ ty::TyAdt(d, s) => (d, s),
+ _ => bug!("{} is not an ADT", t)
+ };
+ let nnty = monomorphize::field_ty(cx.tcx(), substs,
+ &def.variants[nndiscr as usize].fields[0]);
+ type_of::sizing_type_of(cx, nnty)
+ }
+ layout::StructWrappedNullablePointer { nndiscr, ref nonnull, .. } => {
+ let fields = compute_fields(cx, t, nndiscr as usize, false);
match name {
None => {
- Type::struct_(cx, &struct_llfields(cx, st, sizing, dst),
- st.packed)
+ Type::struct_(cx, &struct_llfields(cx, &fields, sizing, dst),
+ nonnull.packed)
}
Some(name) => {
assert_eq!(sizing, false);
}
}
}
- Univariant(ref st) => {
+ layout::Univariant { ref variant, .. } => {
+ // Note that this case also handles empty enums.
+ // Thus the true as the final parameter here.
+ let fields = compute_fields(cx, t, 0, true);
match name {
None => {
- let fields = struct_llfields(cx, st, sizing, dst);
- Type::struct_(cx, &fields, st.packed)
+ let fields = struct_llfields(cx, &fields, sizing, dst);
+ Type::struct_(cx, &fields, variant.packed)
}
Some(name) => {
// Hypothesis: named_struct's can never need a
}
}
}
- UntaggedUnion(ref un) => {
+ layout::Vector { element, count } => {
+ let elem_ty = Type::from_primitive(cx, element);
+ Type::vector(&elem_ty, count)
+ }
+ layout::UntaggedUnion { ref variants, .. }=> {
// Use alignment-sized ints to fill all the union storage.
- let (size, align) = (roundup(un.min_size, un.align), un.align);
-
- let align_s = align as u64;
- assert_eq!(size % align_s, 0); // Ensure division in align_units comes out evenly
- let align_units = size / align_s;
- let fill_ty = match align_s {
- 1 => Type::array(&Type::i8(cx), align_units),
- 2 => Type::array(&Type::i16(cx), align_units),
- 4 => Type::array(&Type::i32(cx), align_units),
- 8 if machine::llalign_of_min(cx, Type::i64(cx)) == 8 =>
- Type::array(&Type::i64(cx), align_units),
- a if a.count_ones() == 1 => Type::array(&Type::vector(&Type::i32(cx), a / 4),
- align_units),
- _ => bug!("unsupported union alignment: {}", align)
- };
+ let size = variants.stride().bytes();
+ let align = variants.align.abi();
+ let fill = union_fill(cx, size, align);
match name {
None => {
- Type::struct_(cx, &[fill_ty], un.packed)
+ Type::struct_(cx, &[fill], variants.packed)
}
Some(name) => {
let mut llty = Type::named_struct(cx, name);
- llty.set_struct_body(&[fill_ty], un.packed);
+ llty.set_struct_body(&[fill], variants.packed);
llty
}
}
}
- General(ity, ref sts) => {
+ layout::General { discr, size, align, .. } => {
// We need a representation that has:
// * The alignment of the most-aligned field
// * The size of the largest variant (rounded up to that alignment)
// of the size.
//
// FIXME #10604: this breaks when vector types are present.
- let (size, align) = union_size_and_align(&sts[..]);
- let align_s = align as u64;
- let discr_ty = ll_inttype(cx, ity);
- let discr_size = machine::llsize_of_alloc(cx, discr_ty);
- let padded_discr_size = roundup(discr_size, align);
- assert_eq!(size % align_s, 0); // Ensure division in align_units comes out evenly
- let align_units = (size - padded_discr_size) / align_s;
- let fill_ty = match align_s {
- 1 => Type::array(&Type::i8(cx), align_units),
- 2 => Type::array(&Type::i16(cx), align_units),
- 4 => Type::array(&Type::i32(cx), align_units),
- 8 if machine::llalign_of_min(cx, Type::i64(cx)) == 8 =>
- Type::array(&Type::i64(cx), align_units),
- a if a.count_ones() == 1 => Type::array(&Type::vector(&Type::i32(cx), a / 4),
- align_units),
- _ => bug!("unsupported enum alignment: {}", align)
- };
- assert_eq!(machine::llalign_of_min(cx, fill_ty), align);
+ let size = size.bytes();
+ let align = align.abi();
+ let discr_ty = Type::from_integer(cx, discr);
+ let discr_size = discr.size().bytes();
+ let padded_discr_size = roundup(discr_size, align as u32);
+ let variant_part_size = size-padded_discr_size;
+ let variant_fill = union_fill(cx, variant_part_size, align);
+
+ assert_eq!(machine::llalign_of_min(cx, variant_fill), align as u32);
assert_eq!(padded_discr_size % discr_size, 0); // Ensure discr_ty can fill pad evenly
let fields: Vec<Type> =
[discr_ty,
Type::array(&discr_ty, (padded_discr_size - discr_size)/discr_size),
- fill_ty].iter().cloned().collect();
+ variant_fill].iter().cloned().collect();
match name {
None => {
Type::struct_(cx, &fields[..], false)
}
}
}
+ _ => bug!("Unsupported type {} represented as {:#?}", t, l)
+ }
+}
+
+fn union_fill(cx: &CrateContext, size: u64, align: u64) -> Type {
+ assert_eq!(size%align, 0);
+ assert_eq!(align.count_ones(), 1, "Alignment must be a power fof 2. Got {}", align);
+ let align_units = size/align;
+ let dl = &cx.tcx().data_layout;
+ let layout_align = layout::Align::from_bytes(align, align).unwrap();
+ if let Some(ity) = layout::Integer::for_abi_align(dl, layout_align) {
+ Type::array(&Type::from_integer(cx, ity), align_units)
+ } else {
+ Type::array(&Type::vector(&Type::i32(cx), align/4),
+ align_units)
}
}
-fn struct_llfields<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, st: &Struct<'tcx>,
+
+fn struct_llfields<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, fields: &Vec<Ty<'tcx>>,
sizing: bool, dst: bool) -> Vec<Type> {
if sizing {
- st.fields.iter().filter(|&ty| !dst || type_is_sized(cx.tcx(), *ty))
+ fields.iter().filter(|&ty| !dst || type_is_sized(cx.tcx(), *ty))
.map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
} else {
- st.fields.iter().map(|&ty| type_of::in_memory_type_of(cx, ty)).collect()
+ fields.iter().map(|&ty| type_of::in_memory_type_of(cx, ty)).collect()
}
}
/// Obtain a representation of the discriminant sufficient to translate
/// destructuring; this may or may not involve the actual discriminant.
pub fn trans_switch<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
- r: &Repr<'tcx>,
+ t: Ty<'tcx>,
scrutinee: ValueRef,
range_assert: bool)
-> (BranchKind, Option<ValueRef>) {
- match *r {
- CEnum(..) | General(..) |
- RawNullablePointer { .. } | StructWrappedNullablePointer { .. } => {
- (BranchKind::Switch, Some(trans_get_discr(bcx, r, scrutinee, None, range_assert)))
+ let l = bcx.ccx().layout_of(t);
+ match *l {
+ layout::CEnum { .. } | layout::General { .. } |
+ layout::RawNullablePointer { .. } | layout::StructWrappedNullablePointer { .. } => {
+ (BranchKind::Switch, Some(trans_get_discr(bcx, t, scrutinee, None, range_assert)))
}
- Univariant(..) | UntaggedUnion(..) => {
+ layout::Univariant { .. } | layout::UntaggedUnion { .. } => {
// N.B.: Univariant means <= 1 enum variants (*not* == 1 variants).
(BranchKind::Single, None)
- }
+ },
+ _ => bug!("{} is not an enum.", t)
}
}
-pub fn is_discr_signed<'tcx>(r: &Repr<'tcx>) -> bool {
- match *r {
- CEnum(ity, ..) => ity.is_signed(),
- General(ity, _) => ity.is_signed(),
- Univariant(..) | UntaggedUnion(..) => false,
- RawNullablePointer { .. } => false,
- StructWrappedNullablePointer { .. } => false,
+pub fn is_discr_signed<'tcx>(l: &layout::Layout) -> bool {
+ match *l {
+ layout::CEnum { signed, .. }=> signed,
+ _ => false,
}
}
/// Obtain the actual discriminant of a value.
-pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
+pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>,
scrutinee: ValueRef, cast_to: Option<Type>,
range_assert: bool)
-> ValueRef {
- debug!("trans_get_discr r: {:?}", r);
- let val = match *r {
- CEnum(ity, min, max) => {
- load_discr(bcx, ity, scrutinee, min, max, range_assert)
+ let (def, substs) = match t.sty {
+ ty::TyAdt(ref def, substs) if def.adt_kind() == AdtKind::Enum => (def, substs),
+ _ => bug!("{} is not an enum", t)
+ };
+
+ debug!("trans_get_discr t: {:?}", t);
+ let l = bcx.ccx().layout_of(t);
+
+ let val = match *l {
+ layout::CEnum { discr, min, max, .. } => {
+ load_discr(bcx, discr, scrutinee, min, max, range_assert)
}
- General(ity, ref cases) => {
+ layout::General { discr, .. } => {
let ptr = StructGEP(bcx, scrutinee, 0);
- load_discr(bcx, ity, ptr, Disr(0), Disr(cases.len() as u64 - 1),
+ load_discr(bcx, discr, ptr, 0, def.variants.len() as u64 - 1,
range_assert)
}
- Univariant(..) | UntaggedUnion(..) => C_u8(bcx.ccx(), 0),
- RawNullablePointer { nndiscr, nnty, .. } => {
- let cmp = if nndiscr == Disr(0) { IntEQ } else { IntNE };
- let llptrty = type_of::sizing_type_of(bcx.ccx(), nnty);
+ layout::Univariant { .. } | layout::UntaggedUnion { .. } => C_u8(bcx.ccx(), 0),
+ layout::RawNullablePointer { nndiscr, .. } => {
+ let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
+ let llptrty = type_of::sizing_type_of(bcx.ccx(),
+ monomorphize::field_ty(bcx.ccx().tcx(), substs,
+ &def.variants[nndiscr as usize].fields[0]));
ICmp(bcx, cmp, Load(bcx, scrutinee), C_null(llptrty), DebugLoc::None)
}
- StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
+ layout::StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
struct_wrapped_nullable_bitdiscr(bcx, nndiscr, discrfield, scrutinee)
- }
+ },
+ _ => bug!("{} is not an enum", t)
};
match cast_to {
None => val,
- Some(llty) => if is_discr_signed(r) { SExt(bcx, val, llty) } else { ZExt(bcx, val, llty) }
+ Some(llty) => if is_discr_signed(&l) { SExt(bcx, val, llty) } else { ZExt(bcx, val, llty) }
}
}
-fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, discrfield: &DiscrField,
+fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: u64, discrfield: &layout::FieldPath,
scrutinee: ValueRef) -> ValueRef {
- let llptrptr = GEPi(bcx, scrutinee, &discrfield[..]);
+ let llptrptr = GEPi(bcx, scrutinee,
+ &discrfield.iter().map(|f| *f as usize).collect::<Vec<_>>()[..]);
let llptr = Load(bcx, llptrptr);
- let cmp = if nndiscr == Disr(0) { IntEQ } else { IntNE };
+ let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
ICmp(bcx, cmp, llptr, C_null(val_ty(llptr)), DebugLoc::None)
}
/// Helper for cases where the discriminant is simply loaded.
-fn load_discr(bcx: Block, ity: IntType, ptr: ValueRef, min: Disr, max: Disr,
+fn load_discr(bcx: Block, ity: layout::Integer, ptr: ValueRef, min: u64, max: u64,
range_assert: bool)
-> ValueRef {
- let llty = ll_inttype(bcx.ccx(), ity);
+ let llty = Type::from_integer(bcx.ccx(), ity);
assert_eq!(val_ty(ptr), llty.ptr_to());
- let bits = machine::llbitsize_of_real(bcx.ccx(), llty);
+ let bits = ity.size().bits();
assert!(bits <= 64);
let bits = bits as usize;
- let mask = Disr(!0u64 >> (64 - bits));
+ let mask = !0u64 >> (64 - bits);
// For a (max) discr of -1, max will be `-1 as usize`, which overflows.
// However, that is fine here (it would still represent the full range),
- if max.wrapping_add(Disr(1)) & mask == min & mask || !range_assert {
+ if max.wrapping_add(1) & mask == min & mask || !range_assert {
// i.e., if the range is everything. The lo==hi case would be
// rejected by the LLVM verifier (it would mean either an
// empty set, which is impossible, or the entire range of the
} else {
// llvm::ConstantRange can deal with ranges that wrap around,
// so an overflow on (max + 1) is fine.
- LoadRangeAssert(bcx, ptr, min.0, max.0.wrapping_add(1), /* signed: */ True)
+ LoadRangeAssert(bcx, ptr, min, max.wrapping_add(1), /* signed: */ True)
}
}
/// discriminant-like value returned by `trans_switch`.
///
/// This should ideally be less tightly tied to `_match`.
-pub fn trans_case<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr, discr: Disr)
+pub fn trans_case<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, value: Disr)
-> ValueRef {
- match *r {
- CEnum(ity, ..) => {
- C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true)
- }
- General(ity, _) => {
- C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true)
+ let l = bcx.ccx().layout_of(t);
+ match *l {
+ layout::CEnum { discr, .. }
+ | layout::General { discr, .. }=> {
+ C_integral(Type::from_integer(bcx.ccx(), discr), value.0, true)
}
- Univariant(..) | UntaggedUnion(..) => {
- bug!("no cases for univariants, structs or unions")
+ layout::RawNullablePointer { .. } |
+ layout::StructWrappedNullablePointer { .. } => {
+ assert!(value == Disr(0) || value == Disr(1));
+ C_bool(bcx.ccx(), value != Disr(0))
}
- RawNullablePointer { .. } |
- StructWrappedNullablePointer { .. } => {
- assert!(discr == Disr(0) || discr == Disr(1));
- C_bool(bcx.ccx(), discr != Disr(0))
+ _ => {
+ bug!("{} does not have a discriminant. Represented as {:#?}", t, l);
}
}
}
/// Set the discriminant for a new value of the given case of the given
/// representation.
-pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
- val: ValueRef, discr: Disr) {
- match *r {
- CEnum(ity, min, max) => {
- assert_discr_in_range(ity, min, max, discr);
- Store(bcx, C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true),
+pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>,
+ val: ValueRef, to: Disr) {
+ let l = bcx.ccx().layout_of(t);
+ match *l {
+ layout::CEnum{ discr, min, max, .. } => {
+ assert_discr_in_range(Disr(min), Disr(max), to);
+ Store(bcx, C_integral(Type::from_integer(bcx.ccx(), discr), to.0, true),
val);
}
- General(ity, _) => {
- Store(bcx, C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true),
+ layout::General{ discr, .. } => {
+ Store(bcx, C_integral(Type::from_integer(bcx.ccx(), discr), to.0, true),
StructGEP(bcx, val, 0));
}
- Univariant(_) => {
- assert_eq!(discr, Disr(0));
- }
- UntaggedUnion(..) => {
- assert_eq!(discr, Disr(0));
+ layout::Univariant { .. }
+ | layout::UntaggedUnion { .. }
+ | layout::Vector { .. } => {
+ assert_eq!(to, Disr(0));
}
- RawNullablePointer { nndiscr, nnty, ..} => {
- if discr != nndiscr {
+ layout::RawNullablePointer { nndiscr, .. } => {
+ let nnty = compute_fields(bcx.ccx(), t, nndiscr as usize, false)[0];
+ if to.0 != nndiscr {
let llptrty = type_of::sizing_type_of(bcx.ccx(), nnty);
Store(bcx, C_null(llptrty), val);
}
}
- StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
- if discr != nndiscr {
- let llptrptr = GEPi(bcx, val, &discrfield[..]);
- let llptrty = val_ty(llptrptr).element_type();
- Store(bcx, C_null(llptrty), llptrptr);
+ layout::StructWrappedNullablePointer { nndiscr, ref discrfield, ref nonnull, .. } => {
+ if to.0 != nndiscr {
+ if target_sets_discr_via_memset(bcx) {
+ // Issue #34427: As workaround for LLVM bug on
+ // ARM, use memset of 0 on whole struct rather
+ // than storing null to single target field.
+ let b = B(bcx);
+ let llptr = b.pointercast(val, Type::i8(b.ccx).ptr_to());
+ let fill_byte = C_u8(b.ccx, 0);
+ let size = C_uint(b.ccx, nonnull.stride().bytes());
+ let align = C_i32(b.ccx, nonnull.align.abi() as i32);
+ base::call_memset(&b, llptr, fill_byte, size, align, false);
+ } else {
+ let path = discrfield.iter().map(|&i| i as usize).collect::<Vec<_>>();
+ let llptrptr = GEPi(bcx, val, &path[..]);
+ let llptrty = val_ty(llptrptr).element_type();
+ Store(bcx, C_null(llptrty), llptrptr);
+ }
}
}
+ _ => bug!("Cannot handle {} represented as {:#?}", t, l)
}
}
-fn assert_discr_in_range(ity: IntType, min: Disr, max: Disr, discr: Disr) {
- match ity {
- attr::UnsignedInt(_) => {
- assert!(min <= discr);
- assert!(discr <= max);
- },
- attr::SignedInt(_) => {
- assert!(min.0 as i64 <= discr.0 as i64);
- assert!(discr.0 as i64 <= max.0 as i64);
- },
+fn target_sets_discr_via_memset<'blk, 'tcx>(bcx: Block<'blk, 'tcx>) -> bool {
+ bcx.sess().target.target.arch == "arm" || bcx.sess().target.target.arch == "aarch64"
+}
+
+fn assert_discr_in_range(min: Disr, max: Disr, discr: Disr) {
+ if min <= max {
+ assert!(min <= discr && discr <= max)
+ } else {
+ assert!(min <= discr || discr <= max)
}
}
/// Access a field, at a point when the value's case is known.
-pub fn trans_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
+pub fn trans_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>,
val: MaybeSizedValue, discr: Disr, ix: usize) -> ValueRef {
- trans_field_ptr_builder(&bcx.build(), r, val, discr, ix)
+ trans_field_ptr_builder(&bcx.build(), t, val, discr, ix)
}
/// Access a field, at a point when the value's case is known.
pub fn trans_field_ptr_builder<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>,
- r: &Repr<'tcx>,
+ t: Ty<'tcx>,
val: MaybeSizedValue,
discr: Disr, ix: usize)
-> ValueRef {
+ let l = bcx.ccx().layout_of(t);
+ debug!("trans_field_ptr_builder on {} represented as {:#?}", t, l);
// Note: if this ever needs to generate conditionals (e.g., if we
// decide to do some kind of cdr-coding-like non-unique repr
// someday), it will need to return a possibly-new bcx as well.
- match *r {
- CEnum(..) => {
- bug!("element access in C-like enum")
- }
- Univariant(ref st) => {
+ match *l {
+ layout::Univariant { ref variant, .. } => {
assert_eq!(discr, Disr(0));
- struct_field_ptr(bcx, st, val, ix, false)
- }
- General(_, ref cases) => {
- struct_field_ptr(bcx, &cases[discr.0 as usize], val, ix + 1, true)
- }
- UntaggedUnion(ref un) => {
- let ty = type_of::in_memory_type_of(bcx.ccx(), un.fields[ix]);
+ struct_field_ptr(bcx, &variant,
+ &compute_fields(bcx.ccx(), t, 0, false),
+ val, ix, false)
+ }
+ layout::Vector { count, .. } => {
+ assert_eq!(discr.0, 0);
+ assert!((ix as u64) < count);
+ bcx.struct_gep(val.value, ix)
+ }
+ layout::General { discr: d, ref variants, .. } => {
+ let mut fields = compute_fields(bcx.ccx(), t, discr.0 as usize, false);
+ fields.insert(0, d.to_ty(&bcx.ccx().tcx(), false));
+ struct_field_ptr(bcx, &variants[discr.0 as usize],
+ &fields,
+ val, ix + 1, true)
+ }
+ layout::UntaggedUnion { .. } => {
+ let fields = compute_fields(bcx.ccx(), t, 0, false);
+ let ty = type_of::in_memory_type_of(bcx.ccx(), fields[ix]);
if bcx.is_unreachable() { return C_undef(ty.ptr_to()); }
bcx.pointercast(val.value, ty.ptr_to())
}
- RawNullablePointer { nndiscr, ref nullfields, .. } |
- StructWrappedNullablePointer { nndiscr, ref nullfields, .. } if discr != nndiscr => {
+ layout::RawNullablePointer { nndiscr, .. } |
+ layout::StructWrappedNullablePointer { nndiscr, .. } if discr.0 != nndiscr => {
+ let nullfields = compute_fields(bcx.ccx(), t, (1-nndiscr) as usize, false);
// The unit-like case might have a nonzero number of unit-like fields.
// (e.d., Result of Either with (), as one side.)
let ty = type_of::type_of(bcx.ccx(), nullfields[ix]);
if bcx.is_unreachable() { return C_undef(ty.ptr_to()); }
bcx.pointercast(val.value, ty.ptr_to())
}
- RawNullablePointer { nndiscr, nnty, .. } => {
+ layout::RawNullablePointer { nndiscr, .. } => {
+ let nnty = compute_fields(bcx.ccx(), t, nndiscr as usize, false)[0];
assert_eq!(ix, 0);
- assert_eq!(discr, nndiscr);
+ assert_eq!(discr.0, nndiscr);
let ty = type_of::type_of(bcx.ccx(), nnty);
if bcx.is_unreachable() { return C_undef(ty.ptr_to()); }
bcx.pointercast(val.value, ty.ptr_to())
}
- StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => {
- assert_eq!(discr, nndiscr);
- struct_field_ptr(bcx, nonnull, val, ix, false)
+ layout::StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => {
+ assert_eq!(discr.0, nndiscr);
+ struct_field_ptr(bcx, &nonnull,
+ &compute_fields(bcx.ccx(), t, discr.0 as usize, false),
+ val, ix, false)
}
+ _ => bug!("element access in type without elements: {} represented as {:#?}", t, l)
}
}
fn struct_field_ptr<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>,
- st: &Struct<'tcx>, val: MaybeSizedValue,
+ st: &layout::Struct, fields: &Vec<Ty<'tcx>>, val: MaybeSizedValue,
ix: usize, needs_cast: bool) -> ValueRef {
let ccx = bcx.ccx();
- let fty = st.fields[ix];
+ let fty = fields[ix];
let ll_fty = type_of::in_memory_type_of(bcx.ccx(), fty);
if bcx.is_unreachable() {
return C_undef(ll_fty.ptr_to());
}
let ptr_val = if needs_cast {
- let fields = st.fields.iter().map(|&ty| {
+ let fields = fields.iter().map(|&ty| {
type_of::in_memory_type_of(ccx, ty)
}).collect::<Vec<_>>();
let real_ty = Type::struct_(ccx, &fields[..], st.packed);
let meta = val.meta;
- // Calculate the unaligned offset of the unsized field.
- let mut offset = 0;
- for &ty in &st.fields[0..ix] {
- let llty = type_of::sizing_type_of(ccx, ty);
- let type_align = type_of::align_of(ccx, ty);
- offset = roundup(offset, type_align);
- offset += machine::llsize_of_alloc(ccx, llty);
- }
+
+ let offset = st.offset_of_field(ix).bytes();
let unaligned_offset = C_uint(bcx.ccx(), offset);
// Get the alignment of the field
/// Currently the returned value has the same size as the type, but
/// this could be changed in the future to avoid allocating unnecessary
/// space after values of shorter-than-maximum cases.
-pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr: Disr,
+pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, discr: Disr,
vals: &[ValueRef]) -> ValueRef {
- match *r {
- CEnum(ity, min, max) => {
+ let l = ccx.layout_of(t);
+ let dl = &ccx.tcx().data_layout;
+ match *l {
+ layout::CEnum { discr: d, min, max, .. } => {
assert_eq!(vals.len(), 0);
- assert_discr_in_range(ity, min, max, discr);
- C_integral(ll_inttype(ccx, ity), discr.0, true)
- }
- General(ity, ref cases) => {
- let case = &cases[discr.0 as usize];
- let (max_sz, _) = union_size_and_align(&cases[..]);
- let lldiscr = C_integral(ll_inttype(ccx, ity), discr.0 as u64, true);
- let mut f = vec![lldiscr];
- f.extend_from_slice(vals);
- let mut contents = build_const_struct(ccx, case, &f[..]);
- contents.extend_from_slice(&[padding(ccx, max_sz - case.size)]);
+ assert_discr_in_range(Disr(min), Disr(max), discr);
+ C_integral(Type::from_integer(ccx, d), discr.0, true)
+ }
+ layout::General { discr: d, ref variants, .. } => {
+ let variant = &variants[discr.0 as usize];
+ let lldiscr = C_integral(Type::from_integer(ccx, d), discr.0 as u64, true);
+ let mut vals_with_discr = vec![lldiscr];
+ vals_with_discr.extend_from_slice(vals);
+ let mut contents = build_const_struct(ccx, &variant.offset_after_field[..],
+ &vals_with_discr[..], variant.packed);
+ let needed_padding = l.size(dl).bytes() - variant.min_size().bytes();
+ if needed_padding > 0 {
+ contents.push(padding(ccx, needed_padding));
+ }
C_struct(ccx, &contents[..], false)
}
- UntaggedUnion(ref un) => {
+ layout::UntaggedUnion { ref variants, .. }=> {
assert_eq!(discr, Disr(0));
- let contents = build_const_union(ccx, un, vals[0]);
- C_struct(ccx, &contents, un.packed)
+ let contents = build_const_union(ccx, variants, vals[0]);
+ C_struct(ccx, &contents, variants.packed)
}
- Univariant(ref st) => {
+ layout::Univariant { ref variant, .. } => {
assert_eq!(discr, Disr(0));
- let contents = build_const_struct(ccx, st, vals);
- C_struct(ccx, &contents[..], st.packed)
+ let contents = build_const_struct(ccx,
+ &variant.offset_after_field[..], vals, variant.packed);
+ C_struct(ccx, &contents[..], variant.packed)
+ }
+ layout::Vector { .. } => {
+ C_vector(vals)
}
- RawNullablePointer { nndiscr, nnty, .. } => {
- if discr == nndiscr {
+ layout::RawNullablePointer { nndiscr, .. } => {
+ let nnty = compute_fields(ccx, t, nndiscr as usize, false)[0];
+ if discr.0 == nndiscr {
assert_eq!(vals.len(), 1);
vals[0]
} else {
C_null(type_of::sizing_type_of(ccx, nnty))
}
}
- StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => {
- if discr == nndiscr {
+ layout::StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => {
+ if discr.0 == nndiscr {
C_struct(ccx, &build_const_struct(ccx,
- nonnull,
- vals),
+ &nonnull.offset_after_field[..],
+ vals, nonnull.packed),
false)
} else {
- let vals = nonnull.fields.iter().map(|&ty| {
+ let fields = compute_fields(ccx, t, nndiscr as usize, false);
+ let vals = fields.iter().map(|&ty| {
// Always use null even if it's not the `discrfield`th
// field; see #8506.
C_null(type_of::sizing_type_of(ccx, ty))
}).collect::<Vec<ValueRef>>();
C_struct(ccx, &build_const_struct(ccx,
- nonnull,
- &vals[..]),
+ &nonnull.offset_after_field[..],
+ &vals[..],
+ false),
false)
}
}
+ _ => bug!("trans_const: cannot handle type {} repreented as {:#?}", t, l)
}
}
-/// Compute struct field offsets relative to struct begin.
-fn compute_struct_field_offsets<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
- st: &Struct<'tcx>) -> Vec<u64> {
- let mut offsets = vec!();
-
- let mut offset = 0;
- for &ty in &st.fields {
- let llty = type_of::sizing_type_of(ccx, ty);
- if !st.packed {
- let type_align = type_of::align_of(ccx, ty);
- offset = roundup(offset, type_align);
- }
- offsets.push(offset);
- offset += machine::llsize_of_alloc(ccx, llty);
- }
- assert_eq!(st.fields.len(), offsets.len());
- offsets
-}
-
/// Building structs is a little complicated, because we might need to
/// insert padding if a field's value is less aligned than its type.
///
/// a two-element struct will locate it at offset 4, and accesses to it
/// will read the wrong memory.
fn build_const_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
- st: &Struct<'tcx>, vals: &[ValueRef])
+ offset_after_field: &[layout::Size],
+ vals: &[ValueRef],
+ packed: bool)
-> Vec<ValueRef> {
- assert_eq!(vals.len(), st.fields.len());
+ assert_eq!(vals.len(), offset_after_field.len());
- let target_offsets = compute_struct_field_offsets(ccx, st);
+ if vals.len() == 0 {
+ return Vec::new();
+ }
// offset of current value
let mut offset = 0;
let mut cfields = Vec::new();
+ let target_offsets = offset_after_field.iter().map(|i| i.bytes());
for (&val, target_offset) in vals.iter().zip(target_offsets) {
- if !st.packed {
+ assert!(!is_undef(val));
+ cfields.push(val);
+ offset += machine::llsize_of_alloc(ccx, val_ty(val));
+ if !packed {
let val_align = machine::llalign_of_min(ccx, val_ty(val));
offset = roundup(offset, val_align);
}
cfields.push(padding(ccx, target_offset - offset));
offset = target_offset;
}
- assert!(!is_undef(val));
- cfields.push(val);
- offset += machine::llsize_of_alloc(ccx, val_ty(val));
}
- assert!(st.sized && offset <= st.size);
- if offset != st.size {
- cfields.push(padding(ccx, st.size - offset));
+ let size = offset_after_field.last().unwrap();
+ if offset < size.bytes() {
+ cfields.push(padding(ccx, size.bytes() - offset));
}
cfields
}
fn build_const_union<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
- un: &Union<'tcx>,
+ un: &layout::Union,
field_val: ValueRef)
-> Vec<ValueRef> {
let mut cfields = vec![field_val];
let offset = machine::llsize_of_alloc(ccx, val_ty(field_val));
- let size = roundup(un.min_size, un.align);
+ let size = un.stride().bytes();
if offset != size {
cfields.push(padding(ccx, size - offset));
}
///
/// (Not to be confused with `common::const_get_elt`, which operates on
/// raw LLVM-level structs and arrays.)
-pub fn const_get_field(r: &Repr, val: ValueRef, _discr: Disr,
+pub fn const_get_field<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>,
+ val: ValueRef, _discr: Disr,
ix: usize) -> ValueRef {
- match *r {
- CEnum(..) => bug!("element access in C-like enum const"),
- Univariant(..) => const_struct_field(val, ix),
- UntaggedUnion(..) => const_struct_field(val, 0),
- General(..) => const_struct_field(val, ix + 1),
- RawNullablePointer { .. } => {
+ let l = ccx.layout_of(t);
+ match *l {
+ layout::CEnum { .. } => bug!("element access in C-like enum const"),
+ layout::Univariant { .. } | layout::Vector { .. } => const_struct_field(val, ix),
+ layout::UntaggedUnion { .. } => const_struct_field(val, 0),
+ layout::General { .. } => const_struct_field(val, ix + 1),
+ layout::RawNullablePointer { .. } => {
assert_eq!(ix, 0);
val
},
- StructWrappedNullablePointer{ .. } => const_struct_field(val, ix)
+ layout::StructWrappedNullablePointer{ .. } => const_struct_field(val, ix),
+ _ => bug!("{} does not have fields.", t)
}
}
use util::common::time;
use util::fs::fix_windows_verbatim_for_gcc;
use rustc::dep_graph::DepNode;
+use rustc::hir::def_id::CrateNum;
use rustc::hir::svh::Svh;
use rustc_back::tempdir::TempDir;
use rustc_incremental::IncrementalHashesMap;
}
pub fn each_linked_rlib(sess: &Session,
- f: &mut FnMut(ast::CrateNum, &Path)) {
+ f: &mut FnMut(CrateNum, &Path)) {
let crates = sess.cstore.used_crates(LinkagePreference::RequireStatic).into_iter();
let fmts = sess.dependency_formats.borrow();
let fmts = fmts.get(&config::CrateTypeExecutable)
bug!("could not find formats for rlibs")
});
for (cnum, path) in crates {
- match fmts[cnum as usize - 1] {
+ match fmts[cnum.as_usize() - 1] {
Linkage::NotLinked | Linkage::IncludedFromDylib => continue,
_ => {}
}
let empty_vec = Vec::new();
let empty_str = String::new();
let args = sess.opts.cg.link_args.as_ref().unwrap_or(&empty_vec);
- let mut args = args.iter().chain(used_link_args.iter());
+ let more_args = &sess.opts.cg.link_arg;
+ let mut args = args.iter().chain(more_args.iter()).chain(used_link_args.iter());
let relocation_model = sess.opts.cg.relocation_model.as_ref()
.unwrap_or(&empty_str);
if (t.options.relocation_model == "pic" || *relocation_model == "pic")
if let Some(ref args) = sess.opts.cg.link_args {
cmd.args(args);
}
+ cmd.args(&sess.opts.cg.link_arg);
cmd.args(&used_link_args);
}
// appear statically in an existing dylib, meaning we'll pick up all the
// symbols from the dylib.
let src = sess.cstore.used_crate_source(cnum);
- match data[cnum as usize - 1] {
+ match data[cnum.as_usize() - 1] {
// compiler-builtins are always placed last to ensure that they're
// linked correctly.
_ if sess.cstore.is_compiler_builtins(cnum) => {
sess: &Session,
tmpdir: &Path,
crate_type: config::CrateType,
- cnum: ast::CrateNum) {
+ cnum: CrateNum) {
let src = sess.cstore.used_crate_source(cnum);
let cratepath = &src.rlib.unwrap().0;
if !sess.lto() && crate_type != config::CrateTypeDylib {
use back::archive;
use middle::dependency_format::Linkage;
+use rustc::hir::def_id::CrateNum;
use session::Session;
use session::config::CrateType;
use session::config;
-use syntax::ast;
/// For all the linkers we support, and information they might
/// need out of the shared crate context before we get rid of it.
let deps = formats[&crate_type].iter();
symbols.extend(deps.enumerate().filter_map(|(i, f)| {
if *f == Linkage::Static {
- Some((i + 1) as ast::CrateNum)
+ Some(CrateNum::new(i + 1))
} else {
None
}
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::collections::HashSet;
+use std::env;
+use std::path::{Path, PathBuf};
+use std::fs;
+
+use rustc::hir::def_id::CrateNum;
+
+pub struct RPathConfig<'a> {
+ pub used_crates: Vec<(CrateNum, Option<PathBuf>)>,
+ pub out_filename: PathBuf,
+ pub is_like_osx: bool,
+ pub has_rpath: bool,
+ pub linker_is_gnu: bool,
+ pub get_install_prefix_lib_path: &'a mut FnMut() -> PathBuf,
+}
+
+pub fn get_rpath_flags(config: &mut RPathConfig) -> Vec<String> {
+ // No rpath on windows
+ if !config.has_rpath {
+ return Vec::new();
+ }
+
+ let mut flags = Vec::new();
+
+ debug!("preparing the RPATH!");
+
+ let libs = config.used_crates.clone();
+ let libs = libs.into_iter().filter_map(|(_, l)| l).collect::<Vec<_>>();
+ let rpaths = get_rpaths(config, &libs[..]);
+ flags.extend_from_slice(&rpaths_to_flags(&rpaths[..]));
+
+ // Use DT_RUNPATH instead of DT_RPATH if available
+ if config.linker_is_gnu {
+ flags.push("-Wl,--enable-new-dtags".to_string());
+ }
+
+ flags
+}
+
+fn rpaths_to_flags(rpaths: &[String]) -> Vec<String> {
+ let mut ret = Vec::new();
+ for rpath in rpaths {
+ ret.push(format!("-Wl,-rpath,{}", &(*rpath)));
+ }
+ return ret;
+}
+
+fn get_rpaths(config: &mut RPathConfig, libs: &[PathBuf]) -> Vec<String> {
+ debug!("output: {:?}", config.out_filename.display());
+ debug!("libs:");
+ for libpath in libs {
+ debug!(" {:?}", libpath.display());
+ }
+
+ // Use relative paths to the libraries. Binaries can be moved
+ // as long as they maintain the relative relationship to the
+ // crates they depend on.
+ let rel_rpaths = get_rpaths_relative_to_output(config, libs);
+
+ // And a final backup rpath to the global library location.
+ let fallback_rpaths = vec!(get_install_prefix_rpath(config));
+
+ fn log_rpaths(desc: &str, rpaths: &[String]) {
+ debug!("{} rpaths:", desc);
+ for rpath in rpaths {
+ debug!(" {}", *rpath);
+ }
+ }
+
+ log_rpaths("relative", &rel_rpaths[..]);
+ log_rpaths("fallback", &fallback_rpaths[..]);
+
+ let mut rpaths = rel_rpaths;
+ rpaths.extend_from_slice(&fallback_rpaths[..]);
+
+ // Remove duplicates
+ let rpaths = minimize_rpaths(&rpaths[..]);
+ return rpaths;
+}
+
+fn get_rpaths_relative_to_output(config: &mut RPathConfig,
+ libs: &[PathBuf]) -> Vec<String> {
+ libs.iter().map(|a| get_rpath_relative_to_output(config, a)).collect()
+}
+
+fn get_rpath_relative_to_output(config: &mut RPathConfig, lib: &Path) -> String {
+ // Mac doesn't appear to support $ORIGIN
+ let prefix = if config.is_like_osx {
+ "@loader_path"
+ } else {
+ "$ORIGIN"
+ };
+
+ let cwd = env::current_dir().unwrap();
+ let mut lib = fs::canonicalize(&cwd.join(lib)).unwrap_or(cwd.join(lib));
+ lib.pop();
+ let mut output = cwd.join(&config.out_filename);
+ output.pop();
+ let output = fs::canonicalize(&output).unwrap_or(output);
+ let relative = path_relative_from(&lib, &output)
+ .expect(&format!("couldn't create relative path from {:?} to {:?}", output, lib));
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ format!("{}/{}", prefix,
+ relative.to_str().expect("non-utf8 component in path"))
+}
+
+// This routine is adapted from the *old* Path's `path_relative_from`
+// function, which works differently from the new `relative_from` function.
+// In particular, this handles the case on unix where both paths are
+// absolute but with only the root as the common directory.
+fn path_relative_from(path: &Path, base: &Path) -> Option<PathBuf> {
+ use std::path::Component;
+
+ if path.is_absolute() != base.is_absolute() {
+ if path.is_absolute() {
+ Some(PathBuf::from(path))
+ } else {
+ None
+ }
+ } else {
+ let mut ita = path.components();
+ let mut itb = base.components();
+ let mut comps: Vec<Component> = vec![];
+ loop {
+ match (ita.next(), itb.next()) {
+ (None, None) => break,
+ (Some(a), None) => {
+ comps.push(a);
+ comps.extend(ita.by_ref());
+ break;
+ }
+ (None, _) => comps.push(Component::ParentDir),
+ (Some(a), Some(b)) if comps.is_empty() && a == b => (),
+ (Some(a), Some(b)) if b == Component::CurDir => comps.push(a),
+ (Some(_), Some(b)) if b == Component::ParentDir => return None,
+ (Some(a), Some(_)) => {
+ comps.push(Component::ParentDir);
+ for _ in itb {
+ comps.push(Component::ParentDir);
+ }
+ comps.push(a);
+ comps.extend(ita.by_ref());
+ break;
+ }
+ }
+ }
+ Some(comps.iter().map(|c| c.as_os_str()).collect())
+ }
+}
+
+
+fn get_install_prefix_rpath(config: &mut RPathConfig) -> String {
+ let path = (config.get_install_prefix_lib_path)();
+ let path = env::current_dir().unwrap().join(&path);
+ // FIXME (#9639): This needs to handle non-utf8 paths
+ path.to_str().expect("non-utf8 component in rpath").to_string()
+}
+
+fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
+ let mut set = HashSet::new();
+ let mut minimized = Vec::new();
+ for rpath in rpaths {
+ if set.insert(&rpath[..]) {
+ minimized.push(rpath.clone());
+ }
+ }
+ minimized
+}
+
+#[cfg(all(unix, test))]
+mod tests {
+ use super::{RPathConfig};
+ use super::{minimize_rpaths, rpaths_to_flags, get_rpath_relative_to_output};
+ use std::path::{Path, PathBuf};
+
+ #[test]
+ fn test_rpaths_to_flags() {
+ let flags = rpaths_to_flags(&[
+ "path1".to_string(),
+ "path2".to_string()
+ ]);
+ assert_eq!(flags,
+ ["-Wl,-rpath,path1",
+ "-Wl,-rpath,path2"]);
+ }
+
+ #[test]
+ fn test_minimize1() {
+ let res = minimize_rpaths(&[
+ "rpath1".to_string(),
+ "rpath2".to_string(),
+ "rpath1".to_string()
+ ]);
+ assert!(res == [
+ "rpath1",
+ "rpath2",
+ ]);
+ }
+
+ #[test]
+ fn test_minimize2() {
+ let res = minimize_rpaths(&[
+ "1a".to_string(),
+ "2".to_string(),
+ "2".to_string(),
+ "1a".to_string(),
+ "4a".to_string(),
+ "1a".to_string(),
+ "2".to_string(),
+ "3".to_string(),
+ "4a".to_string(),
+ "3".to_string()
+ ]);
+ assert!(res == [
+ "1a",
+ "2",
+ "4a",
+ "3",
+ ]);
+ }
+
+ #[test]
+ fn test_rpath_relative() {
+ if cfg!(target_os = "macos") {
+ let config = &mut RPathConfig {
+ used_crates: Vec::new(),
+ has_rpath: true,
+ is_like_osx: true,
+ linker_is_gnu: false,
+ out_filename: PathBuf::from("bin/rustc"),
+ get_install_prefix_lib_path: &mut || panic!(),
+ };
+ let res = get_rpath_relative_to_output(config,
+ Path::new("lib/libstd.so"));
+ assert_eq!(res, "@loader_path/../lib");
+ } else {
+ let config = &mut RPathConfig {
+ used_crates: Vec::new(),
+ out_filename: PathBuf::from("bin/rustc"),
+ get_install_prefix_lib_path: &mut || panic!(),
+ has_rpath: true,
+ is_like_osx: false,
+ linker_is_gnu: true,
+ };
+ let res = get_rpath_relative_to_output(config,
+ Path::new("lib/libstd.so"));
+ assert_eq!(res, "$ORIGIN/../lib");
+ }
+ }
+}
//! virtually impossible. Thus, symbol hash generation exclusively relies on
//! DefPaths which are much more robust in the face of changes to the code base.
-use common::{CrateContext, SharedCrateContext, gensym_name};
+use common::SharedCrateContext;
use monomorphize::Instance;
use util::sha2::{Digest, Sha256};
-use rustc::middle::{cstore, weak_lang_items};
-use rustc::hir::def_id::DefId;
+use rustc::middle::weak_lang_items;
+use rustc::hir::def_id::LOCAL_CRATE;
use rustc::hir::map as hir_map;
-use rustc::ty::{Ty, TyCtxt, TypeFoldable};
+use rustc::ty::{self, Ty, TypeFoldable};
+use rustc::ty::fold::TypeVisitor;
use rustc::ty::item_path::{self, ItemPathBuffer, RootMode};
use rustc::ty::subst::Substs;
use rustc::hir::map::definitions::{DefPath, DefPathData};
use syntax::parse::token::{self, InternedString};
use serialize::hex::ToHex;
-pub fn def_id_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> String {
- let def_path = tcx.def_path(def_id);
- def_path.to_string(tcx)
+use std::hash::Hasher;
+
+struct Sha256Hasher<'a>(&'a mut Sha256);
+
+impl<'a> Hasher for Sha256Hasher<'a> {
+ fn write(&mut self, msg: &[u8]) {
+ self.0.input(msg)
+ }
+
+ fn finish(&self) -> u64 {
+ bug!("Sha256Hasher::finish should not be called");
+ }
}
fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
// values for generic type parameters,
// if any.
- substs: Option<&Substs<'tcx>>)
+ substs: Option<&'tcx Substs<'tcx>>)
-> String {
debug!("get_symbol_hash(def_path={:?}, parameters={:?})",
def_path, substs);
let tcx = scx.tcx();
- return record_time(&tcx.sess.perf_stats.symbol_hash_time, || {
- let mut hash_state = scx.symbol_hasher().borrow_mut();
-
+ let mut hash_state = scx.symbol_hasher().borrow_mut();
+ record_time(&tcx.sess.perf_stats.symbol_hash_time, || {
hash_state.reset();
+ let mut hasher = Sha256Hasher(&mut hash_state);
// the main symbol name is not necessarily unique; hash in the
// compiler's internal def-path, guaranteeing each symbol has a
// truly unique path
- hash_state.input_str(&def_path.to_string(tcx));
+ def_path.deterministic_hash_to(tcx, &mut hasher);
// Include the main item-type. Note that, in this case, the
// assertions about `needs_subst` may not hold, but this item-type
// ought to be the same for every reference anyway.
+ let mut hasher = ty::util::TypeIdHasher::new(tcx, hasher);
assert!(!item_type.has_erasable_regions());
- let encoded_item_type = tcx.sess.cstore.encode_type(tcx, item_type, def_id_to_string);
- hash_state.input(&encoded_item_type[..]);
+ hasher.visit_ty(item_type);
// also include any type parameters (for generic items)
if let Some(substs) = substs {
- for t in substs.types() {
- assert!(!t.has_erasable_regions());
- assert!(!t.needs_subst());
- let encoded_type = tcx.sess.cstore.encode_type(tcx, t, def_id_to_string);
- hash_state.input(&encoded_type[..]);
- }
+ assert!(!substs.has_erasable_regions());
+ assert!(!substs.needs_subst());
+ substs.visit_with(&mut hasher);
}
-
- format!("h{}", truncated_hash_result(&mut *hash_state))
});
- fn truncated_hash_result(symbol_hasher: &mut Sha256) -> String {
- let output = symbol_hasher.result_bytes();
- // 64 bits should be enough to avoid collisions.
- output[.. 8].to_hex()
- }
+ // 64 bits should be enough to avoid collisions.
+ let output = hash_state.result_bytes();
+ format!("h{}", output[..8].to_hex())
}
impl<'a, 'tcx> Instance<'tcx> {
pub fn symbol_name(self, scx: &SharedCrateContext<'a, 'tcx>) -> String {
- let Instance { def: def_id, ref substs } = self;
+ let Instance { def: def_id, substs } = self;
debug!("symbol_name(def_id={:?}, substs={:?})",
def_id, substs);
scx.tcx().push_item_path(&mut buffer, def_id);
});
- mangle(buffer.names.into_iter(), Some(&hash[..]))
+ mangle(buffer.names.into_iter(), &hash)
}
}
-> String {
let empty_def_path = DefPath {
data: vec![],
- krate: cstore::LOCAL_CRATE,
+ krate: LOCAL_CRATE,
};
let hash = get_symbol_hash(scx, &empty_def_path, t, None);
let path = [token::intern_and_get_ident(prefix)];
- mangle(path.iter().cloned(), Some(&hash[..]))
-}
-
-/// Only symbols that are invisible outside their compilation unit should use a
-/// name generated by this function.
-pub fn internal_name_from_type_and_suffix<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
- t: Ty<'tcx>,
- suffix: &str)
- -> String {
- let path = [token::intern(&t.to_string()).as_str(),
- gensym_name(suffix).as_str()];
- let def_path = DefPath {
- data: vec![],
- krate: cstore::LOCAL_CRATE,
- };
- let hash = get_symbol_hash(ccx.shared(), &def_path, t, None);
- mangle(path.iter().cloned(), Some(&hash[..]))
+ mangle(path.iter().cloned(), &hash)
}
// Name sanitation. LLVM will happily accept identifiers with weird names, but
return result;
}
-pub fn mangle<PI: Iterator<Item=InternedString>>(path: PI, hash: Option<&str>) -> String {
+fn mangle<PI: Iterator<Item=InternedString>>(path: PI, hash: &str) -> String {
// Follow C++ namespace-mangling style, see
// http://en.wikipedia.org/wiki/Name_mangling for more info.
//
push(&mut n, &data);
}
- if let Some(s) = hash {
- push(&mut n, s)
- }
+ push(&mut n, hash);
n.push('E'); // End name-sequence.
n
store_fat_ptr(bcx, base, info, dst, dst_ty);
}
- // This can be extended to enums and tuples in the future.
- (&ty::TyAdt(def_a, _), &ty::TyAdt(def_b, _)) => {
+ (&ty::TyAdt(def_a, substs_a), &ty::TyAdt(def_b, substs_b)) => {
assert_eq!(def_a, def_b);
- let src_repr = adt::represent_type(bcx.ccx(), src_ty);
- let src_fields = match &*src_repr {
- &adt::Repr::Univariant(ref s) => &s.fields,
- _ => bug!("struct has non-univariant repr"),
- };
- let dst_repr = adt::represent_type(bcx.ccx(), dst_ty);
- let dst_fields = match &*dst_repr {
- &adt::Repr::Univariant(ref s) => &s.fields,
- _ => bug!("struct has non-univariant repr"),
- };
+ let src_fields = def_a.variants[0].fields.iter().map(|f| {
+ monomorphize::field_ty(bcx.tcx(), substs_a, f)
+ });
+ let dst_fields = def_b.variants[0].fields.iter().map(|f| {
+ monomorphize::field_ty(bcx.tcx(), substs_b, f)
+ });
let src = adt::MaybeSizedValue::sized(src);
let dst = adt::MaybeSizedValue::sized(dst);
- let iter = src_fields.iter().zip(dst_fields).enumerate();
+ let iter = src_fields.zip(dst_fields).enumerate();
for (i, (src_fty, dst_fty)) in iter {
if type_is_zero_size(bcx.ccx(), dst_fty) {
continue;
}
- let src_f = adt::trans_field_ptr(bcx, &src_repr, src, Disr(0), i);
- let dst_f = adt::trans_field_ptr(bcx, &dst_repr, dst, Disr(0), i);
+ let src_f = adt::trans_field_ptr(bcx, src_ty, src, Disr(0), i);
+ let dst_f = adt::trans_field_ptr(bcx, dst_ty, dst, Disr(0), i);
if src_fty == dst_fty {
memcpy_ty(bcx, dst_f, src_f, src_fty);
} else {
if !fcx.fn_ty.ret.is_ignore() {
let dest = fcx.llretslotptr.get().unwrap();
let dest_val = adt::MaybeSizedValue::sized(dest); // Can return unsized value
- let repr = adt::represent_type(ccx, sig.output);
let mut llarg_idx = fcx.fn_ty.ret.is_indirect() as usize;
let mut arg_idx = 0;
for (i, arg_ty) in sig.inputs.into_iter().enumerate() {
- let lldestptr = adt::trans_field_ptr(bcx, &repr, dest_val, Disr::from(disr), i);
+ let lldestptr = adt::trans_field_ptr(bcx, sig.output, dest_val, Disr::from(disr), i);
let arg = &fcx.fn_ty.args[arg_idx];
arg_idx += 1;
let b = &bcx.build();
arg.store_fn_arg(b, &mut llarg_idx, lldestptr);
}
}
- adt::trans_set_discr(bcx, &repr, dest, disr);
+ adt::trans_set_discr(bcx, sig.output, dest, disr);
}
fcx.finish(bcx, DebugLoc::None);
cx.export_map(),
cx.link_meta(),
reachable_ids,
- cx.mir_map(),
- cx.tcx().map.krate());
+ cx.mir_map());
let mut compressed = cstore.metadata_encoding_version().to_vec();
compressed.extend_from_slice(&flate::deflate_bytes(&metadata));
.iter()
.cloned()
.filter(|trans_item|{
- let def_id = match *trans_item {
- TransItem::DropGlue(..) => {
- return false
- },
- TransItem::Fn(ref instance) => {
- instance.def
- }
- TransItem::Static(node_id) => {
- tcx.map.local_def_id(node_id)
- }
- };
-
- trans_item.explicit_linkage(tcx).is_some() ||
- attr::contains_extern_indicator(tcx.sess.diagnostic(),
- &tcx.get_attrs(def_id))
+ trans_item.explicit_linkage(tcx).is_some()
})
.map(|trans_item| symbol_map.get_or_compute(scx, trans_item))
.collect();
node: hir::ImplItemKind::Method(..), .. }) => {
let def_id = tcx.map.local_def_id(id);
let generics = tcx.lookup_generics(def_id);
- generics.parent_types == 0 && generics.types.is_empty()
+ let attributes = tcx.get_attrs(def_id);
+ (generics.parent_types == 0 && generics.types.is_empty()) &&
+ // Functions marked with #[inline] are only ever translated
+ // with "internal" linkage and are never exported.
+ !attr::requests_inline(&attributes[..])
}
_ => false
partitioning::partition(scx,
items.iter().cloned(),
strategy,
- &inlining_map,
- scx.reachable())
+ &inlining_map)
});
assert!(scx.tcx().sess.opts.cg.codegen_units == codegen_units.len() ||
#![allow(non_upper_case_globals)]
use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector};
-use abi::{FnType, ArgType};
+use abi::{self, FnType, ArgType};
use context::CrateContext;
use type_::Type;
-use std::cmp;
-
-fn align_up_to(off: usize, a: usize) -> usize {
- return (off + a - 1) / a * a;
-}
-
-fn align(off: usize, ty: Type) -> usize {
- let a = ty_align(ty);
- return align_up_to(off, a);
-}
-
-fn ty_align(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 8,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- 1
- } else {
- let str_tys = ty.field_types();
- str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t)))
- }
- }
- Array => {
- let elt = ty.element_type();
- ty_align(elt)
- }
- Vector => {
- let len = ty.vector_length();
- let elt = ty.element_type();
- ty_align(elt) * len
- }
- _ => bug!("ty_align: unhandled type")
- }
-}
-
fn ty_size(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 8,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- let str_tys = ty.field_types();
- str_tys.iter().fold(0, |s, t| s + ty_size(*t))
- } else {
- let str_tys = ty.field_types();
- let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t));
- align(size, ty)
- }
- }
- Array => {
- let len = ty.array_length();
- let elt = ty.element_type();
- let eltsz = ty_size(elt);
- len * eltsz
- }
- Vector => {
- let len = ty.vector_length();
- let elt = ty.element_type();
- let eltsz = ty_size(elt);
- len * eltsz
- }
- _ => bug!("ty_size: unhandled type")
- }
+ abi::ty_size(ty, 8)
}
fn is_homogenous_aggregate_ty(ty: Type) -> Option<(Type, u64)> {
#![allow(non_upper_case_globals)]
use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector};
-use abi::{FnType, ArgType};
+use abi::{self, align_up_to, FnType, ArgType};
use context::CrateContext;
use type_::Type;
type TyAlignFn = fn(ty: Type) -> usize;
-fn align_up_to(off: usize, a: usize) -> usize {
- return (off + a - 1) / a * a;
-}
-
fn align(off: usize, ty: Type, align_fn: TyAlignFn) -> usize {
let a = align_fn(ty);
return align_up_to(off, a);
}
fn general_ty_align(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 4,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- 1
- } else {
- let str_tys = ty.field_types();
- str_tys.iter().fold(1, |a, t| cmp::max(a, general_ty_align(*t)))
- }
- }
- Array => {
- let elt = ty.element_type();
- general_ty_align(elt)
- }
- Vector => {
- let len = ty.vector_length();
- let elt = ty.element_type();
- general_ty_align(elt) * len
- }
- _ => bug!("ty_align: unhandled type")
- }
+ abi::ty_align(ty, 4)
}
// For more information see:
use libc::c_uint;
use std::cmp;
use llvm;
-use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector};
-use abi::{ArgType, FnType};
+use llvm::{Integer, Pointer, Float, Double, Vector};
+use abi::{self, align_up_to, ArgType, FnType};
use context::CrateContext;
use type_::Type;
-fn align_up_to(off: usize, a: usize) -> usize {
- return (off + a - 1) / a * a;
-}
-
-fn align(off: usize, ty: Type) -> usize {
- let a = ty_align(ty);
- return align_up_to(off, a);
-}
-
fn ty_align(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 4,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- 1
- } else {
- let str_tys = ty.field_types();
- str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t)))
- }
- }
- Array => {
- let elt = ty.element_type();
- ty_align(elt)
- }
- Vector => {
- let len = ty.vector_length();
- let elt = ty.element_type();
- ty_align(elt) * len
- }
- _ => bug!("ty_align: unhandled type")
- }
+ abi::ty_align(ty, 4)
}
fn ty_size(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 4,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- let str_tys = ty.field_types();
- str_tys.iter().fold(0, |s, t| s + ty_size(*t))
- } else {
- let str_tys = ty.field_types();
- let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t));
- align(size, ty)
- }
- }
- Array => {
- let len = ty.array_length();
- let elt = ty.element_type();
- let eltsz = ty_size(elt);
- len * eltsz
- }
- Vector => {
- let len = ty.vector_length();
- let elt = ty.element_type();
- let eltsz = ty_size(elt);
- len * eltsz
- }
- _ => bug!("ty_size: unhandled type")
- }
+ abi::ty_size(ty, 4)
}
fn classify_ret_ty(ccx: &CrateContext, ret: &mut ArgType) {
use libc::c_uint;
use std::cmp;
use llvm;
-use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector};
-use abi::{ArgType, FnType};
+use llvm::{Integer, Pointer, Float, Double, Vector};
+use abi::{self, align_up_to, ArgType, FnType};
use context::CrateContext;
use type_::Type;
-fn align_up_to(off: usize, a: usize) -> usize {
- return (off + a - 1) / a * a;
-}
-
-fn align(off: usize, ty: Type) -> usize {
- let a = ty_align(ty);
- return align_up_to(off, a);
-}
-
fn ty_align(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 8,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- 1
- } else {
- let str_tys = ty.field_types();
- str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t)))
- }
- }
- Array => {
- let elt = ty.element_type();
- ty_align(elt)
- }
- Vector => {
- let len = ty.vector_length();
- let elt = ty.element_type();
- ty_align(elt) * len
- }
- _ => bug!("ty_align: unhandled type")
- }
+ abi::ty_align(ty, 8)
}
fn ty_size(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 8,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- let str_tys = ty.field_types();
- str_tys.iter().fold(0, |s, t| s + ty_size(*t))
- } else {
- let str_tys = ty.field_types();
- let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t));
- align(size, ty)
- }
- }
- Array => {
- let len = ty.array_length();
- let elt = ty.element_type();
- let eltsz = ty_size(elt);
- len * eltsz
- }
- Vector => {
- let len = ty.vector_length();
- let elt = ty.element_type();
- let eltsz = ty_size(elt);
- len * eltsz
- }
- _ => bug!("ty_size: unhandled type")
- }
+ abi::ty_size(ty, 8)
}
fn classify_ret_ty(ccx: &CrateContext, ret: &mut ArgType) {
use libc::c_uint;
use llvm;
-use llvm::{Integer, Pointer, Float, Double, Struct, Array};
-use abi::{FnType, ArgType};
+use llvm::{Integer, Pointer, Float, Double, Vector};
+use abi::{self, align_up_to, FnType, ArgType};
use context::CrateContext;
use type_::Type;
use std::cmp;
-fn align_up_to(off: usize, a: usize) -> usize {
- return (off + a - 1) / a * a;
-}
-
-fn align(off: usize, ty: Type) -> usize {
- let a = ty_align(ty);
- return align_up_to(off, a);
-}
-
fn ty_align(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 4,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- 1
- } else {
- let str_tys = ty.field_types();
- str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t)))
- }
- }
- Array => {
- let elt = ty.element_type();
- ty_align(elt)
- }
- _ => bug!("ty_size: unhandled type")
+ if ty.kind() == Vector {
+ bug!("ty_size: unhandled type")
+ } else {
+ abi::ty_align(ty, 4)
}
}
fn ty_size(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 4,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- let str_tys = ty.field_types();
- str_tys.iter().fold(0, |s, t| s + ty_size(*t))
- } else {
- let str_tys = ty.field_types();
- let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t));
- align(size, ty)
- }
- }
- Array => {
- let len = ty.array_length();
- let elt = ty.element_type();
- let eltsz = ty_size(elt);
- len * eltsz
- }
- _ => bug!("ty_size: unhandled type")
+ if ty.kind() == Vector {
+ bug!("ty_size: unhandled type")
+ } else {
+ abi::ty_size(ty, 4)
}
}
// Alignment of 128 bit types is not currently handled, this will
// need to be fixed when PowerPC vector support is added.
-use llvm::{Integer, Pointer, Float, Double, Struct, Array};
-use abi::{FnType, ArgType};
+use llvm::{Integer, Pointer, Float, Double, Struct, Vector, Array};
+use abi::{self, FnType, ArgType};
use context::CrateContext;
use type_::Type;
-use std::cmp;
-
-fn align_up_to(off: usize, a: usize) -> usize {
- return (off + a - 1) / a * a;
-}
-
-fn align(off: usize, ty: Type) -> usize {
- let a = ty_align(ty);
- return align_up_to(off, a);
-}
-
-fn ty_align(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 8,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- 1
- } else {
- let str_tys = ty.field_types();
- str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t)))
- }
- }
- Array => {
- let elt = ty.element_type();
- ty_align(elt)
- }
- _ => bug!("ty_align: unhandled type")
- }
-}
-
fn ty_size(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 8,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- let str_tys = ty.field_types();
- str_tys.iter().fold(0, |s, t| s + ty_size(*t))
- } else {
- let str_tys = ty.field_types();
- let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t));
- align(size, ty)
- }
- }
- Array => {
- let len = ty.array_length();
- let elt = ty.element_type();
- let eltsz = ty_size(elt);
- len * eltsz
- }
- _ => bug!("ty_size: unhandled type")
+ if ty.kind() == Vector {
+ bug!("ty_size: unhandled type")
+ } else {
+ abi::ty_size(ty, 8)
}
}
// for a pre-z13 machine or using -mno-vx.
use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector};
-use abi::{FnType, ArgType};
+use abi::{align_up_to, FnType, ArgType};
use context::CrateContext;
use type_::Type;
use std::cmp;
-fn align_up_to(off: usize, a: usize) -> usize {
- return (off + a - 1) / a * a;
-}
-
fn align(off: usize, ty: Type) -> usize {
let a = ty_align(ty);
return align_up_to(off, a);
use llvm::{Integer, Pointer, Float, Double};
use llvm::{Struct, Array, Attribute, Vector};
-use abi::{ArgType, FnType};
+use abi::{self, ArgType, FnType};
use context::CrateContext;
use type_::Type;
-use std::cmp;
-
#[derive(Clone, Copy, PartialEq)]
enum RegClass {
NoClass,
}
fn ty_align(ty: Type) -> usize {
- match ty.kind() {
- Integer => ((ty.int_width() as usize) + 7) / 8,
- Pointer => 8,
- Float => 4,
- Double => 8,
- Struct => {
- if ty.is_packed() {
- 1
- } else {
- let str_tys = ty.field_types();
- str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t)))
- }
- }
- Array => {
- let elt = ty.element_type();
- ty_align(elt)
- }
- Vector => {
- let len = ty.vector_length();
- let elt = ty.element_type();
- ty_align(elt) * len
- }
- _ => bug!("ty_align: unhandled type")
- }
+ abi::ty_align(ty, 8)
}
fn ty_size(ty: Type) -> usize {
- match ty.kind() {
- Integer => (ty.int_width() as usize + 7) / 8,
- Pointer => 8,
- Float => 4,
- Double => 8,
- Struct => {
- let str_tys = ty.field_types();
- if ty.is_packed() {
- str_tys.iter().fold(0, |s, t| s + ty_size(*t))
- } else {
- let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t));
- align(size, ty)
- }
- }
- Array => {
- let len = ty.array_length();
- let elt = ty.element_type();
- let eltsz = ty_size(elt);
- len * eltsz
- }
- Vector => {
- let len = ty.vector_length();
- let elt = ty.element_type();
- let eltsz = ty_size(elt);
- len * eltsz
- }
-
- _ => bug!("ty_size: unhandled type")
- }
+ abi::ty_size(ty, 8)
}
fn all_mem(cls: &mut [RegClass]) {
pub use self::CalleeData::*;
use arena::TypedArena;
-use back::symbol_names;
use llvm::{self, ValueRef, get_params};
use rustc::hir::def_id::DefId;
use rustc::ty::subst::Substs;
let trait_ref = tcx.normalize_associated_type(&ty::Binder(trait_ref));
match common::fulfill_obligation(ccx.shared(), DUMMY_SP, trait_ref) {
traits::VtableImpl(vtable_impl) => {
- let impl_did = vtable_impl.impl_def_id;
- let mname = tcx.item_name(def_id);
- // create a concatenated set of substitutions which includes
- // those from the impl and those from the method:
- let mth = meth::get_impl_method(tcx, substs, impl_did, vtable_impl.substs, mname);
+ let name = tcx.item_name(def_id);
+ let (def_id, substs) = traits::find_method(tcx, name, substs, &vtable_impl);
// Translate the function, bypassing Callee::def.
// That is because default methods have the same ID as the
// trait method used to look up the impl method that ended
// up here, so calling Callee::def would infinitely recurse.
- let (llfn, ty) = get_fn(ccx, mth.method.def_id, mth.substs);
+ let (llfn, ty) = get_fn(ccx, def_id, substs);
Callee::ptr(llfn, ty)
}
traits::VtableClosure(vtable_closure) => {
// The substitutions should have no type parameters remaining
// after passing through fulfill_obligation
let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap();
+ let instance = Instance::new(def_id, substs);
let llfn = closure::trans_closure_method(ccx,
vtable_closure.closure_def_id,
vtable_closure.substs,
+ instance,
trait_closure_kind);
let method_ty = def_ty(ccx.shared(), def_id, substs);
}
traits::VtableFnPointer(vtable_fn_pointer) => {
let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap();
- let llfn = trans_fn_pointer_shim(ccx, trait_closure_kind, vtable_fn_pointer.fn_ty);
+ let instance = Instance::new(def_id, substs);
+ let llfn = trans_fn_pointer_shim(ccx, instance,
+ trait_closure_kind,
+ vtable_fn_pointer.fn_ty);
let method_ty = def_ty(ccx.shared(), def_id, substs);
Callee::ptr(llfn, method_ty)
pub fn reify<'a>(self, ccx: &CrateContext<'a, 'tcx>) -> ValueRef {
match self.data {
Fn(llfn) => llfn,
- Virtual(idx) => {
- meth::trans_object_shim(ccx, self.ty, idx)
- }
+ Virtual(_) => meth::trans_object_shim(ccx, self),
NamedTupleConstructor(disr) => match self.ty.sty {
ty::TyFnDef(def_id, substs, _) => {
let instance = Instance::new(def_id, substs);
/// ```
///
/// but for the bare function type given.
-pub fn trans_fn_pointer_shim<'a, 'tcx>(
+fn trans_fn_pointer_shim<'a, 'tcx>(
ccx: &'a CrateContext<'a, 'tcx>,
+ method_instance: Instance<'tcx>,
closure_kind: ty::ClosureKind,
bare_fn_ty: Ty<'tcx>)
-> ValueRef
debug!("tuple_fn_ty: {:?}", tuple_fn_ty);
//
- let function_name =
- symbol_names::internal_name_from_type_and_suffix(ccx,
- bare_fn_ty,
- "fn_pointer_shim");
+ let function_name = method_instance.symbol_name(ccx.shared());
let llfn = declare::define_internal_fn(ccx, &function_name, tuple_fn_ty);
attributes::set_frame_pointer_elimination(ccx, llfn);
//
// except according to those terms.
use arena::TypedArena;
-use back::symbol_names;
use llvm::{self, ValueRef, get_params};
use rustc::hir::def_id::DefId;
use abi::{Abi, FnType};
pub fn trans_closure_method<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>,
closure_def_id: DefId,
substs: ty::ClosureSubsts<'tcx>,
+ method_instance: Instance<'tcx>,
trait_closure_kind: ty::ClosureKind)
-> ValueRef
{
// fn call_once(mut self, ...) { call_mut(&mut self, ...) }
//
// These are both the same at trans time.
- trans_fn_once_adapter_shim(ccx, closure_def_id, substs, llfn)
+ trans_fn_once_adapter_shim(ccx, closure_def_id, substs, method_instance, llfn)
}
_ => {
bug!("trans_closure_adapter_shim: cannot convert {:?} to {:?}",
ccx: &'a CrateContext<'a, 'tcx>,
closure_def_id: DefId,
substs: ty::ClosureSubsts<'tcx>,
+ method_instance: Instance<'tcx>,
llreffn: ValueRef)
-> ValueRef
{
}));
// Create the by-value helper.
- let function_name =
- symbol_names::internal_name_from_type_and_suffix(ccx, llonce_fn_ty, "once_shim");
+ let function_name = method_instance.symbol_name(ccx.shared());
let lloncefn = declare::declare_fn(ccx, &function_name, llonce_fn_ty);
attributes::set_frame_pointer_elimination(ccx, lloncefn);
//! The collection algorithm handles this more or less transparently. If it is
//! about to create a translation item for something with an external `DefId`,
//! it will take a look if the MIR for that item is available, and if so just
-//! proceed normally. If the MIR is not available, it assumes that that item is
+//! proceed normally. If the MIR is not available, it assumes that the item is
//! just linked to and no node is created; which is exactly what we want, since
//! no machine code should be generated in the current crate for such an item.
//!
use syntax_pos::DUMMY_SP;
use base::custom_coerce_unsize_info;
use context::SharedCrateContext;
-use common::{fulfill_obligation, normalize_and_test_predicates, type_is_sized};
+use common::{fulfill_obligation, type_is_sized};
use glue::{self, DropGlueKind};
-use meth;
use monomorphize::{self, Instance};
use util::nodemap::{FnvHashSet, FnvHashMap, DefIdMap};
callees: &[TransItem<'tcx>],
inlining_map: &mut InliningMap<'tcx>) {
let is_inlining_candidate = |trans_item: &TransItem<'tcx>| {
- trans_item.is_from_extern_crate() || trans_item.requests_inline(tcx)
+ trans_item.needs_local_copy(tcx)
};
let inlining_candidates = callees.into_iter()
fn visit_lvalue(&mut self,
lvalue: &mir::Lvalue<'tcx>,
- context: mir_visit::LvalueContext,
+ context: mir_visit::LvalueContext<'tcx>,
location: Location) {
debug!("visiting lvalue {:?}", *lvalue);
fn can_result_in_trans_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> bool {
- if !match tcx.lookup_item_type(def_id).ty.sty {
- ty::TyFnDef(def_id, ..) => {
+ match tcx.lookup_item_type(def_id).ty.sty {
+ ty::TyFnDef(def_id, _, f) => {
// Some constructors also have type TyFnDef but they are
// always instantiated inline and don't result in
// translation item. Same for FFI functions.
- match tcx.map.get_if_local(def_id) {
- Some(hir_map::NodeVariant(_)) |
- Some(hir_map::NodeStructCtor(_)) |
- Some(hir_map::NodeForeignItem(_)) => false,
- Some(_) => true,
- None => {
- tcx.sess.cstore.variant_kind(def_id).is_none()
+ if let Some(hir_map::NodeForeignItem(_)) = tcx.map.get_if_local(def_id) {
+ return false;
+ }
+
+ if let Some(adt_def) = f.sig.output().skip_binder().ty_adt_def() {
+ if adt_def.variants.iter().any(|v| def_id == v.did) {
+ return false;
}
}
}
- ty::TyClosure(..) => true,
- _ => false
- } {
- return false;
+ ty::TyClosure(..) => {}
+ _ => return false
}
can_have_local_instance(tcx, def_id)
// Now that we know which impl is being used, we can dispatch to
// the actual function:
match vtbl {
- traits::VtableImpl(traits::VtableImplData {
- impl_def_id: impl_did,
- substs: impl_substs,
- nested: _ }) =>
- {
- let impl_method = meth::get_impl_method(tcx,
- rcvr_substs,
- impl_did,
- impl_substs,
- trait_method.name);
- Some((impl_method.method.def_id, &impl_method.substs))
+ traits::VtableImpl(impl_data) => {
+ Some(traits::find_method(tcx, trait_method.name, rcvr_substs, &impl_data))
}
// If we have a closure or a function pointer, we will also encounter
// the concrete closure/function somewhere else (during closure or fn
if let ty::TyTrait(ref trait_ty) = trait_ty.sty {
let poly_trait_ref = trait_ty.principal.with_self_ty(scx.tcx(), impl_ty);
+ let param_substs = Substs::empty(scx.tcx());
// Walk all methods of the trait, including those of its supertraits
- for trait_ref in traits::supertraits(scx.tcx(), poly_trait_ref) {
- let vtable = fulfill_obligation(scx, DUMMY_SP, trait_ref);
- match vtable {
- traits::VtableImpl(
- traits::VtableImplData {
- impl_def_id,
- substs,
- nested: _ }) => {
- let items = meth::get_vtable_methods(scx.tcx(), impl_def_id, substs)
- .into_iter()
- // filter out None values
- .filter_map(|opt_impl_method| opt_impl_method)
- // create translation items
- .filter_map(|impl_method| {
- if can_have_local_instance(scx.tcx(), impl_method.method.def_id) {
- Some(create_fn_trans_item(scx,
- impl_method.method.def_id,
- impl_method.substs,
- Substs::empty(scx.tcx())))
- } else {
- None
- }
- });
-
- output.extend(items);
- }
- _ => { /* */ }
- }
- }
+ let methods = traits::get_vtable_methods(scx.tcx(), poly_trait_ref);
+ let methods = methods.filter_map(|method| method)
+ .filter_map(|(def_id, substs)| do_static_dispatch(scx, def_id, substs, param_substs))
+ .filter(|&(def_id, _)| can_have_local_instance(scx.tcx(), def_id))
+ .map(|(def_id, substs)| create_fn_trans_item(scx, def_id, substs, param_substs));
+ output.extend(methods);
// Also add the destructor
let dg_type = glue::get_drop_glue_type(scx.tcx(), impl_ty);
- if glue::type_needs_drop(scx.tcx(), dg_type) {
- output.push(TransItem::DropGlue(DropGlueKind::Ty(dg_type)));
- }
+ output.push(TransItem::DropGlue(DropGlueKind::Ty(dg_type)));
}
}
let impl_substs = Substs::for_item(tcx, impl_def_id,
|_, _| tcx.mk_region(ty::ReErased),
|_, _| tcx.types.err);
- let mth = meth::get_impl_method(tcx,
- callee_substs,
- impl_def_id,
- impl_substs,
- method.name);
-
- assert!(mth.is_provided);
-
- let predicates = mth.method.predicates.predicates.subst(tcx, &mth.substs);
- if !normalize_and_test_predicates(tcx, predicates) {
+ let impl_data = traits::VtableImplData {
+ impl_def_id: impl_def_id,
+ substs: impl_substs,
+ nested: vec![]
+ };
+ let (def_id, substs) = traits::find_method(tcx,
+ method.name,
+ callee_substs,
+ &impl_data);
+
+ let predicates = tcx.lookup_predicates(def_id).predicates
+ .subst(tcx, substs);
+ if !traits::normalize_and_test_predicates(tcx, predicates) {
continue;
}
if can_have_local_instance(tcx, method.def_id) {
- let empty_substs = tcx.erase_regions(&mth.substs);
let item = create_fn_trans_item(scx,
method.def_id,
callee_substs,
- empty_substs);
+ tcx.erase_regions(&substs));
output.push(item);
}
}
llsize_of_alloc(ccx, llty) == 0
}
-/// Generates a unique symbol based off the name given. This is used to create
-/// unique symbols for things like closures.
-pub fn gensym_name(name: &str) -> ast::Name {
- let num = token::gensym(name).0;
- // use one colon which will get translated to a period by the mangler, and
- // we're guaranteed that `num` is globally unique for this crate.
- token::gensym(&format!("{}:{}", name, num))
-}
-
/*
* A note on nomenclature of linking: "extern", "foreign", and "upcall".
*
}
}
-pub fn const_to_int(v: ValueRef) -> i64 {
- unsafe {
- llvm::LLVMConstIntGetSExtValue(v)
- }
-}
-
pub fn const_to_uint(v: ValueRef) -> u64 {
unsafe {
llvm::LLVMConstIntGetZExtValue(v)
})
}
-/// Normalizes the predicates and checks whether they hold. If this
-/// returns false, then either normalize encountered an error or one
-/// of the predicates did not hold. Used when creating vtables to
-/// check for unsatisfiable methods.
-pub fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- predicates: Vec<ty::Predicate<'tcx>>)
- -> bool
-{
- debug!("normalize_and_test_predicates(predicates={:?})",
- predicates);
-
- tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| {
- let mut selcx = SelectionContext::new(&infcx);
- let mut fulfill_cx = traits::FulfillmentContext::new();
- let cause = traits::ObligationCause::dummy();
- let traits::Normalized { value: predicates, obligations } =
- traits::normalize(&mut selcx, cause.clone(), &predicates);
- for obligation in obligations {
- fulfill_cx.register_predicate_obligation(&infcx, obligation);
- }
- for predicate in predicates {
- let obligation = traits::Obligation::new(cause.clone(), predicate);
- fulfill_cx.register_predicate_obligation(&infcx, obligation);
- }
-
- fulfill_cx.select_all_or_error(&infcx).is_ok()
- })
-}
-
pub fn langcall(tcx: TyCtxt,
span: Option<Span>,
msg: &str,
use rustc::traits;
use rustc::mir::mir_map::MirMap;
use rustc::mir::repr as mir;
-use adt;
use base;
use builder::Builder;
use common::BuilderRef_res;
lltypes: RefCell<FnvHashMap<Ty<'tcx>, Type>>,
llsizingtypes: RefCell<FnvHashMap<Ty<'tcx>, Type>>,
- adt_reprs: RefCell<FnvHashMap<Ty<'tcx>, Rc<adt::Repr<'tcx>>>>,
type_hashcodes: RefCell<FnvHashMap<Ty<'tcx>, String>>,
int_type: Type,
opaque_vec_type: Type,
statics_to_rauw: RefCell::new(Vec::new()),
lltypes: RefCell::new(FnvHashMap()),
llsizingtypes: RefCell::new(FnvHashMap()),
- adt_reprs: RefCell::new(FnvHashMap()),
type_hashcodes: RefCell::new(FnvHashMap()),
int_type: Type::from_ref(ptr::null_mut()),
opaque_vec_type: Type::from_ref(ptr::null_mut()),
&self.local().llsizingtypes
}
- pub fn adt_reprs<'a>(&'a self) -> &'a RefCell<FnvHashMap<Ty<'tcx>, Rc<adt::Repr<'tcx>>>> {
- &self.local().adt_reprs
- }
-
pub fn symbol_hasher<'a>(&'a self) -> &'a RefCell<Sha256> {
&self.shared.symbol_hasher
}
pub fn layout_of(&self, ty: Ty<'tcx>) -> &'tcx ty::layout::Layout {
self.tcx().infer_ctxt(None, None, traits::Reveal::All).enter(|infcx| {
ty.layout(&infcx).unwrap_or_else(|e| {
- bug!("failed to get layout for `{}`: {}", ty, e);
+ match e {
+ ty::layout::LayoutError::SizeOverflow(_) =>
+ self.sess().fatal(&e.to_string()),
+ _ => bug!("failed to get layout for `{}`: {}", ty, e)
+ }
})
})
}
// Find all the scopes with variables defined in them.
let mut has_variables = BitVector::new(mir.visibility_scopes.len());
- for var in &mir.var_decls {
- has_variables.insert(var.source_info.scope.index());
+ for var in mir.vars_iter() {
+ let decl = &mir.local_decls[var];
+ has_variables.insert(decl.source_info.unwrap().scope.index());
}
// Instantiate all scopes.
use rustc::hir::def_id::DefId;
use rustc::ty::subst::Substs;
use rustc::hir;
-use {type_of, adt, machine, monomorphize};
+use {type_of, machine, monomorphize};
use common::CrateContext;
use type_::Type;
-use rustc::ty::{self, AdtKind, Ty};
+use rustc::ty::{self, AdtKind, Ty, layout};
use session::config;
use util::nodemap::FnvHashMap;
use util::common::path2cstr;
use std::path::Path;
use std::ptr;
use std::rc::Rc;
-use syntax;
use syntax::util::interner::Interner;
use syntax::ast;
use syntax::parse::token;
// offset of zero bytes).
struct EnumMemberDescriptionFactory<'tcx> {
enum_type: Ty<'tcx>,
- type_rep: Rc<adt::Repr<'tcx>>,
+ type_rep: &'tcx layout::Layout,
discriminant_type_metadata: Option<DIType>,
containing_scope: DIScope,
file_metadata: DIFile,
fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>)
-> Vec<MemberDescription> {
let adt = &self.enum_type.ty_adt_def().unwrap();
+ let substs = match self.enum_type.sty {
+ ty::TyAdt(def, ref s) if def.adt_kind() == AdtKind::Enum => s,
+ _ => bug!("{} is not an enum", self.enum_type)
+ };
match *self.type_rep {
- adt::General(_, ref struct_defs) => {
+ layout::General { ref variants, .. } => {
let discriminant_info = RegularDiscriminant(self.discriminant_type_metadata
.expect(""));
- struct_defs
+ variants
.iter()
.enumerate()
.map(|(i, struct_def)| {
}
}).collect()
},
- adt::Univariant(ref struct_def) => {
+ layout::Univariant{ ref variant, .. } => {
assert!(adt.variants.len() <= 1);
if adt.variants.is_empty() {
member_description_factory) =
describe_enum_variant(cx,
self.enum_type,
- struct_def,
+ variant,
&adt.variants[0],
NoDiscriminant,
self.containing_scope,
]
}
}
- adt::RawNullablePointer { nndiscr: non_null_variant_index, nnty, .. } => {
+ layout::RawNullablePointer { nndiscr: non_null_variant_index, .. } => {
// As far as debuginfo is concerned, the pointer this enum
// represents is still wrapped in a struct. This is to make the
// DWARF representation of enums uniform.
// First create a description of the artificial wrapper struct:
- let non_null_variant = &adt.variants[non_null_variant_index.0 as usize];
+ let non_null_variant = &adt.variants[non_null_variant_index as usize];
let non_null_variant_name = non_null_variant.name.as_str();
// The llvm type and metadata of the pointer
+ let nnty = monomorphize::field_ty(cx.tcx(), &substs, &non_null_variant.fields[0] );
let non_null_llvm_type = type_of::type_of(cx, nnty);
let non_null_type_metadata = type_metadata(cx, nnty, self.span);
// Encode the information about the null variant in the union
// member's name.
- let null_variant_index = (1 - non_null_variant_index.0) as usize;
+ let null_variant_index = (1 - non_null_variant_index) as usize;
let null_variant_name = adt.variants[null_variant_index].name;
let union_member_name = format!("RUST$ENCODED$ENUM${}${}",
0,
}
]
},
- adt::StructWrappedNullablePointer { nonnull: ref struct_def,
+ layout::StructWrappedNullablePointer { nonnull: ref struct_def,
nndiscr,
ref discrfield, ..} => {
// Create a description of the non-null variant
describe_enum_variant(cx,
self.enum_type,
struct_def,
- &adt.variants[nndiscr.0 as usize],
+ &adt.variants[nndiscr as usize],
OptimizedDiscriminant,
self.containing_scope,
self.span);
// Encode the information about the null variant in the union
// member's name.
- let null_variant_index = (1 - nndiscr.0) as usize;
+ let null_variant_index = (1 - nndiscr) as usize;
let null_variant_name = adt.variants[null_variant_index].name;
let discrfield = discrfield.iter()
.skip(1)
}
]
},
- adt::CEnum(..) | adt::UntaggedUnion(..) => {
- span_bug!(self.span, "This should be unreachable.")
- }
+ layout::CEnum { .. } => span_bug!(self.span, "This should be unreachable."),
+ ref l @ _ => bug!("Not an enum layout: {:#?}", l)
}
}
}
// full RecursiveTypeDescription.
fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
enum_type: Ty<'tcx>,
- struct_def: &adt::Struct<'tcx>,
+ struct_def: &layout::Struct,
variant: ty::VariantDef<'tcx>,
discriminant_info: EnumDiscriminantInfo,
containing_scope: DIScope,
span: Span)
-> (DICompositeType, Type, MemberDescriptionFactory<'tcx>) {
+ let substs = match enum_type.sty {
+ ty::TyAdt(def, s) if def.adt_kind() == AdtKind::Enum => s,
+ ref t @ _ => bug!("{:#?} is not an enum", t)
+ };
+
+ let maybe_discr_and_signed: Option<(layout::Integer, bool)> = match *cx.layout_of(enum_type) {
+ layout::CEnum {discr, ..} => Some((discr, true)),
+ layout::General{discr, ..} => Some((discr, false)),
+ layout::Univariant { .. }
+ | layout::RawNullablePointer { .. }
+ | layout::StructWrappedNullablePointer { .. } => None,
+ ref l @ _ => bug!("This should be unreachable. Type is {:#?} layout is {:#?}", enum_type, l)
+ };
+
+ let mut field_tys = variant.fields.iter().map(|f: ty::FieldDef<'tcx>| {
+ monomorphize::field_ty(cx.tcx(), &substs, f)
+ }).collect::<Vec<_>>();
+
+ if let Some((discr, signed)) = maybe_discr_and_signed {
+ field_tys.insert(0, discr.to_ty(&cx.tcx(), signed));
+ }
+
+
let variant_llvm_type =
- Type::struct_(cx, &struct_def.fields
+ Type::struct_(cx, &field_tys
.iter()
- .map(|&t| type_of::type_of(cx, t))
+ .map(|t| type_of::type_of(cx, t))
.collect::<Vec<_>>()
,
struct_def.packed);
// Build an array of (field name, field type) pairs to be captured in the factory closure.
let args: Vec<(String, Ty)> = arg_names.iter()
- .zip(&struct_def.fields)
+ .zip(field_tys.iter())
.map(|(s, &t)| (s.to_string(), t))
.collect();
let file_metadata = unknown_file_metadata(cx);
let variants = &enum_type.ty_adt_def().unwrap().variants;
-
let enumerators_metadata: Vec<DIDescriptor> = variants
.iter()
.map(|v| {
})
.collect();
- let discriminant_type_metadata = |inttype: syntax::attr::IntType| {
+ let discriminant_type_metadata = |inttype: layout::Integer, signed: bool| {
let disr_type_key = (enum_def_id, inttype);
let cached_discriminant_type_metadata = debug_context(cx).created_enum_disr_types
.borrow()
match cached_discriminant_type_metadata {
Some(discriminant_type_metadata) => discriminant_type_metadata,
None => {
- let discriminant_llvm_type = adt::ll_inttype(cx, inttype);
+ let discriminant_llvm_type = Type::from_integer(cx, inttype);
let (discriminant_size, discriminant_align) =
size_and_align_of(cx, discriminant_llvm_type);
let discriminant_base_type_metadata =
type_metadata(cx,
- adt::ty_of_inttype(cx.tcx(), inttype),
+ inttype.to_ty(&cx.tcx(), signed),
syntax_pos::DUMMY_SP);
let discriminant_name = get_enum_discriminant_name(cx, enum_def_id);
}
};
- let type_rep = adt::represent_type(cx, enum_type);
+ let type_rep = cx.layout_of(enum_type);
let discriminant_type_metadata = match *type_rep {
- adt::CEnum(inttype, ..) => {
- return FinalMetadata(discriminant_type_metadata(inttype))
+ layout::CEnum { discr, signed, .. } => {
+ return FinalMetadata(discriminant_type_metadata(discr, signed))
},
- adt::RawNullablePointer { .. } |
- adt::StructWrappedNullablePointer { .. } |
- adt::Univariant(..) | adt::UntaggedUnion(..) => None,
- adt::General(inttype, _) => Some(discriminant_type_metadata(inttype)),
+ layout::RawNullablePointer { .. } |
+ layout::StructWrappedNullablePointer { .. } |
+ layout::Univariant { .. } => None,
+ layout::General { discr, .. } => Some(discriminant_type_metadata(discr, false)),
+ ref l @ _ => bug!("Not an enum layout: {:#?}", l)
};
let enum_llvm_type = type_of::type_of(cx, enum_type);
enum_llvm_type,
EnumMDF(EnumMemberDescriptionFactory {
enum_type: enum_type,
- type_rep: type_rep.clone(),
+ type_rep: type_rep,
discriminant_type_metadata: discriminant_type_metadata,
containing_scope: containing_scope,
file_metadata: file_metadata,
use syntax_pos::{self, Span, Pos};
use syntax::ast;
-use syntax::attr::IntType;
+use rustc::ty::layout;
pub mod gdb;
mod utils;
builder: DIBuilderRef,
current_debug_location: Cell<InternalDebugLocation>,
created_files: RefCell<FnvHashMap<String, DIFile>>,
- created_enum_disr_types: RefCell<FnvHashMap<(DefId, IntType), DIType>>,
+ created_enum_disr_types: RefCell<FnvHashMap<(DefId, layout::Integer), DIType>>,
type_map: RefCell<TypeMap<'tcx>>,
namespace_map: RefCell<DefIdMap<DIScope>>,
t: Ty<'tcx>) -> Ty<'tcx> {
assert!(t.is_normalized_for_trans());
+ let t = tcx.erase_regions(&t);
+
// Even if there is no dtor for t, there might be one deeper down and we
// might need to pass in the vtable ptr.
if !type_is_sized(tcx, t) {
g: DropGlueKind<'tcx>) -> ValueRef {
let g = g.map_ty(|t| get_drop_glue_type(ccx.tcx(), t));
match ccx.drop_glues().borrow().get(&g) {
- Some(&(glue, _)) => return glue,
+ Some(&(glue, _)) => glue,
None => {
- debug!("Could not find drop glue for {:?} -- {} -- {}. \
- Falling back to on-demand instantiation.",
+ bug!("Could not find drop glue for {:?} -- {} -- {}.",
g,
TransItem::DropGlue(g).to_raw_string(),
ccx.codegen_unit().name());
}
}
-
- // FIXME: #34151
- // Normally, getting here would indicate a bug in trans::collector,
- // since it seems to have missed a translation item. When we are
- // translating with non-MIR-based trans, however, the results of the
- // collector are not entirely reliable since it bases its analysis
- // on MIR. Thus, we'll instantiate the missing function on demand in
- // this codegen unit, so that things keep working.
-
- TransItem::DropGlue(g).predefine(ccx, llvm::InternalLinkage);
- TransItem::DropGlue(g).define(ccx);
-
- // Now that we made sure that the glue function is in ccx.drop_glues,
- // give it another try
- get_drop_glue_core(ccx, g)
}
pub fn implement_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
let _icx = push_ctxt("drop_structural_ty");
fn iter_variant<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
- repr: &adt::Repr<'tcx>,
+ t: Ty<'tcx>,
av: adt::MaybeSizedValue,
variant: ty::VariantDef<'tcx>,
substs: &Substs<'tcx>)
for (i, field) in variant.fields.iter().enumerate() {
let arg = monomorphize::field_ty(tcx, substs, field);
cx = drop_ty(cx,
- adt::trans_field_ptr(cx, repr, av, Disr::from(variant.disr_val), i),
+ adt::trans_field_ptr(cx, t, av, Disr::from(variant.disr_val), i),
arg, DebugLoc::None);
}
return cx;
let mut cx = cx;
match t.sty {
ty::TyClosure(_, ref substs) => {
- let repr = adt::represent_type(cx.ccx(), t);
for (i, upvar_ty) in substs.upvar_tys.iter().enumerate() {
- let llupvar = adt::trans_field_ptr(cx, &repr, value, Disr(0), i);
+ let llupvar = adt::trans_field_ptr(cx, t, value, Disr(0), i);
cx = drop_ty(cx, llupvar, upvar_ty, DebugLoc::None);
}
}
|bb, vv| drop_ty(bb, vv, unit_ty, DebugLoc::None));
}
ty::TyTuple(ref args) => {
- let repr = adt::represent_type(cx.ccx(), t);
for (i, arg) in args.iter().enumerate() {
- let llfld_a = adt::trans_field_ptr(cx, &repr, value, Disr(0), i);
+ let llfld_a = adt::trans_field_ptr(cx, t, value, Disr(0), i);
cx = drop_ty(cx, llfld_a, *arg, DebugLoc::None);
}
}
ty::TyAdt(adt, substs) => match adt.adt_kind() {
AdtKind::Struct => {
- let repr = adt::represent_type(cx.ccx(), t);
let VariantInfo { fields, discr } = VariantInfo::from_ty(cx.tcx(), t, None);
for (i, &Field(_, field_ty)) in fields.iter().enumerate() {
- let llfld_a = adt::trans_field_ptr(cx, &repr, value, Disr::from(discr), i);
+ let llfld_a = adt::trans_field_ptr(cx, t, value, Disr::from(discr), i);
let val = if type_is_sized(cx.tcx(), field_ty) {
llfld_a
AdtKind::Enum => {
let fcx = cx.fcx;
let ccx = fcx.ccx;
-
- let repr = adt::represent_type(ccx, t);
let n_variants = adt.variants.len();
// NB: we must hit the discriminant first so that structural
// comparison know not to proceed when the discriminants differ.
- match adt::trans_switch(cx, &repr, av, false) {
+ match adt::trans_switch(cx, t, av, false) {
(adt::BranchKind::Single, None) => {
if n_variants != 0 {
assert!(n_variants == 1);
- cx = iter_variant(cx, &repr, adt::MaybeSizedValue::sized(av),
+ cx = iter_variant(cx, t, adt::MaybeSizedValue::sized(av),
&adt.variants[0], substs);
}
}
let variant_cx = fcx.new_block(&format!("enum-iter-variant-{}",
&variant.disr_val
.to_string()));
- let case_val = adt::trans_case(cx, &repr, Disr::from(variant.disr_val));
+ let case_val = adt::trans_case(cx, t, Disr::from(variant.disr_val));
AddCase(llswitch, case_val, variant_cx.llbb);
let variant_cx = iter_variant(variant_cx,
- &repr,
+ t,
value,
variant,
substs);
let val_ty = substs.type_at(0);
match val_ty.sty {
ty::TyAdt(adt, ..) if adt.is_enum() => {
- let repr = adt::represent_type(ccx, val_ty);
- adt::trans_get_discr(bcx, &repr, llargs[0],
+ adt::trans_get_discr(bcx, val_ty, llargs[0],
Some(llret_ty), true)
}
_ => C_null(llret_ty)
// destructors, and the contents are SIMD
// etc.
assert!(!bcx.fcx.type_needs_drop(arg_type));
-
- let repr = adt::represent_type(bcx.ccx(), arg_type);
- let repr_ptr = &repr;
let arg = adt::MaybeSizedValue::sized(llarg);
(0..contents.len())
.map(|i| {
- Load(bcx, adt::trans_field_ptr(bcx, repr_ptr, arg, Disr(0), i))
+ Load(bcx, adt::trans_field_ptr(bcx, arg_type, arg, Disr(0), i))
})
.collect()
}
pub use disr::Disr;
pub mod back {
- pub use rustc_back::rpath;
pub use rustc::hir::svh;
pub mod archive;
pub mod symbol_names;
pub mod write;
pub mod msvc;
+ pub mod rpath;
}
pub mod diagnostics;
// ______________________________________________________________________
// compute sizeof / alignof
-// Returns the number of bytes clobbered by a Store to this type.
-pub fn llsize_of_store(cx: &CrateContext, ty: Type) -> llsize {
- unsafe {
- return llvm::LLVMStoreSizeOfType(cx.td(), ty.to_ref());
- }
-}
-
// Returns the number of bytes between successive elements of type T in an
// array of T. This is the "ABI" size. It includes any ABI-mandated padding.
pub fn llsize_of_alloc(cx: &CrateContext, ty: Type) -> llsize {
}
}
-// Returns, as near as we can figure, the "real" size of a type. As in, the
-// bits in this number of bytes actually carry data related to the datum
-// with the type. Not junk, accidentally-damaged words, or whatever.
-// Note that padding of the type will be included for structs, but not for the
-// other types (i.e. SIMD types).
-// Rounds up to the nearest byte though, so if you have a 1-bit
-// value, we return 1 here, not 0. Most of rustc works in bytes. Be warned
-// that LLVM *does* distinguish between e.g. a 1-bit value and an 8-bit value
-// at the codegen level! In general you should prefer `llbitsize_of_real`
-// below.
-pub fn llsize_of_real(cx: &CrateContext, ty: Type) -> llsize {
- unsafe {
- let nbits = llvm::LLVMSizeOfTypeInBits(cx.td(), ty.to_ref());
- if nbits & 7 != 0 {
- // Not an even number of bytes, spills into "next" byte.
- 1 + (nbits >> 3)
- } else {
- nbits >> 3
- }
- }
-}
-
/// Returns the "real" size of the type in bits.
pub fn llbitsize_of_real(cx: &CrateContext, ty: Type) -> llbits {
unsafe {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::rc::Rc;
-
use attributes;
use arena::TypedArena;
-use back::symbol_names;
use llvm::{ValueRef, get_params};
-use rustc::hir::def_id::DefId;
-use rustc::ty::subst::{Subst, Substs};
-use rustc::traits::{self, Reveal};
+use rustc::traits;
use abi::FnType;
use base::*;
use build::*;
-use callee::{Callee, Virtual, trans_fn_pointer_shim};
-use closure;
+use callee::Callee;
use common::*;
use consts;
use debuginfo::DebugLoc;
use declare;
use glue;
use machine;
+use monomorphize::Instance;
use type_::Type;
use type_of::*;
use value::Value;
-use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
-
-use syntax::ast::Name;
-use syntax_pos::DUMMY_SP;
+use rustc::ty;
// drop_glue pointer, size, align.
const VTABLE_OFFSET: usize = 3;
/// In fact, all virtual calls can be thought of as normal trait calls
/// that go through this shim function.
pub fn trans_object_shim<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>,
- method_ty: Ty<'tcx>,
- vtable_index: usize)
+ callee: Callee<'tcx>)
-> ValueRef {
let _icx = push_ctxt("trans_object_shim");
let tcx = ccx.tcx();
- debug!("trans_object_shim(vtable_index={}, method_ty={:?})",
- vtable_index,
- method_ty);
+ debug!("trans_object_shim({:?})", callee);
+
+ let (sig, abi, function_name) = match callee.ty.sty {
+ ty::TyFnDef(def_id, substs, f) => {
+ let instance = Instance::new(def_id, substs);
+ (&f.sig, f.abi, instance.symbol_name(ccx.shared()))
+ }
+ _ => bug!()
+ };
- let sig = tcx.erase_late_bound_regions(&method_ty.fn_sig());
+ let sig = tcx.erase_late_bound_regions(sig);
let sig = tcx.normalize_associated_type(&sig);
- let fn_ty = FnType::new(ccx, method_ty.fn_abi(), &sig, &[]);
+ let fn_ty = FnType::new(ccx, abi, &sig, &[]);
- let function_name =
- symbol_names::internal_name_from_type_and_suffix(ccx, method_ty, "object_shim");
- let llfn = declare::define_internal_fn(ccx, &function_name, method_ty);
+ let llfn = declare::define_internal_fn(ccx, &function_name, callee.ty);
attributes::set_frame_pointer_elimination(ccx, llfn);
let (block_arena, fcx): (TypedArena<_>, FunctionContext);
let mut bcx = fcx.init(false);
let dest = fcx.llretslotptr.get();
-
- debug!("trans_object_shim: method_offset_in_vtable={}",
- vtable_index);
-
let llargs = get_params(fcx.llfn);
-
- let callee = Callee {
- data: Virtual(vtable_index),
- ty: method_ty
- };
bcx = callee.call(bcx, DebugLoc::None,
&llargs[fcx.fn_ty.ret.is_indirect() as usize..], dest).bcx;
}
// Not in the cache. Build it.
- let methods = traits::supertraits(tcx, trait_ref.clone()).flat_map(|trait_ref| {
- let vtable = fulfill_obligation(ccx.shared(), DUMMY_SP, trait_ref.clone());
- match vtable {
- // Should default trait error here?
- traits::VtableDefaultImpl(_) |
- traits::VtableBuiltin(_) => {
- Vec::new().into_iter()
- }
- traits::VtableImpl(
- traits::VtableImplData {
- impl_def_id: id,
- substs,
- nested: _ }) => {
- let nullptr = C_null(Type::nil(ccx).ptr_to());
- get_vtable_methods(tcx, id, substs)
- .into_iter()
- .map(|opt_mth| opt_mth.map_or(nullptr, |mth| {
- Callee::def(ccx, mth.method.def_id, &mth.substs).reify(ccx)
- }))
- .collect::<Vec<_>>()
- .into_iter()
- }
- traits::VtableClosure(
- traits::VtableClosureData {
- closure_def_id,
- substs,
- nested: _ }) => {
- let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_ref.def_id()).unwrap();
- let llfn = closure::trans_closure_method(ccx,
- closure_def_id,
- substs,
- trait_closure_kind);
- vec![llfn].into_iter()
- }
- traits::VtableFnPointer(
- traits::VtableFnPointerData {
- fn_ty: bare_fn_ty,
- nested: _ }) => {
- let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_ref.def_id()).unwrap();
- vec![trans_fn_pointer_shim(ccx, trait_closure_kind, bare_fn_ty)].into_iter()
- }
- traits::VtableObject(ref data) => {
- // this would imply that the Self type being erased is
- // an object type; this cannot happen because we
- // cannot cast an unsized type into a trait object
- bug!("cannot get vtable for an object type: {:?}",
- data);
- }
- traits::VtableParam(..) => {
- bug!("resolved vtable for {:?} to bad vtable {:?} in trans",
- trait_ref,
- vtable);
- }
- }
+ let nullptr = C_null(Type::nil(ccx).ptr_to());
+ let methods = traits::get_vtable_methods(tcx, trait_ref).map(|opt_mth| {
+ opt_mth.map_or(nullptr, |(def_id, substs)| {
+ Callee::def(ccx, def_id, substs).reify(ccx)
+ })
});
let size_ty = sizing_type_of(ccx, trait_ref.self_ty());
let size = machine::llsize_of_alloc(ccx, size_ty);
let align = align_of(ccx, trait_ref.self_ty());
- let components: Vec<_> = vec![
+ let components: Vec<_> = [
// Generate a destructor for the vtable.
glue::get_drop_glue(ccx, trait_ref.self_ty()),
C_uint(ccx, size),
C_uint(ccx, align)
- ].into_iter().chain(methods).collect();
+ ].iter().cloned().chain(methods).collect();
let vtable_const = C_struct(ccx, &components, false);
let align = machine::llalign_of_pref(ccx, val_ty(vtable_const));
ccx.vtables().borrow_mut().insert(trait_ref, vtable);
vtable
}
-
-pub fn get_vtable_methods<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- impl_id: DefId,
- substs: &'tcx Substs<'tcx>)
- -> Vec<Option<ImplMethod<'tcx>>>
-{
- debug!("get_vtable_methods(impl_id={:?}, substs={:?}", impl_id, substs);
-
- let trait_id = match tcx.impl_trait_ref(impl_id) {
- Some(t_id) => t_id.def_id,
- None => bug!("make_impl_vtable: don't know how to \
- make a vtable for a type impl!")
- };
-
- tcx.populate_implementations_for_trait_if_necessary(trait_id);
-
- let trait_item_def_ids = tcx.trait_item_def_ids(trait_id);
- trait_item_def_ids
- .iter()
-
- // Filter out non-method items.
- .filter_map(|item_def_id| {
- match *item_def_id {
- ty::MethodTraitItemId(def_id) => Some(def_id),
- _ => None,
- }
- })
-
- // Now produce pointers for each remaining method. If the
- // method could never be called from this object, just supply
- // null.
- .map(|trait_method_def_id| {
- debug!("get_vtable_methods: trait_method_def_id={:?}",
- trait_method_def_id);
-
- let trait_method_type = match tcx.impl_or_trait_item(trait_method_def_id) {
- ty::MethodTraitItem(m) => m,
- _ => bug!("should be a method, not other assoc item"),
- };
- let name = trait_method_type.name;
-
- // Some methods cannot be called on an object; skip those.
- if !tcx.is_vtable_safe_method(trait_id, &trait_method_type) {
- debug!("get_vtable_methods: not vtable safe");
- return None;
- }
-
- debug!("get_vtable_methods: trait_method_type={:?}",
- trait_method_type);
-
- // the method may have some early-bound lifetimes, add
- // regions for those
- let method_substs = Substs::for_item(tcx, trait_method_def_id,
- |_, _| tcx.mk_region(ty::ReErased),
- |_, _| tcx.types.err);
-
- // The substitutions we have are on the impl, so we grab
- // the method type from the impl to substitute into.
- let mth = get_impl_method(tcx, method_substs, impl_id, substs, name);
-
- debug!("get_vtable_methods: mth={:?}", mth);
-
- // If this is a default method, it's possible that it
- // relies on where clauses that do not hold for this
- // particular set of type parameters. Note that this
- // method could then never be called, so we do not want to
- // try and trans it, in that case. Issue #23435.
- if mth.is_provided {
- let predicates = mth.method.predicates.predicates.subst(tcx, &mth.substs);
- if !normalize_and_test_predicates(tcx, predicates) {
- debug!("get_vtable_methods: predicates do not hold");
- return None;
- }
- }
-
- Some(mth)
- })
- .collect()
-}
-
-#[derive(Debug)]
-pub struct ImplMethod<'tcx> {
- pub method: Rc<ty::Method<'tcx>>,
- pub substs: &'tcx Substs<'tcx>,
- pub is_provided: bool
-}
-
-/// Locates the applicable definition of a method, given its name.
-pub fn get_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- substs: &'tcx Substs<'tcx>,
- impl_def_id: DefId,
- impl_substs: &'tcx Substs<'tcx>,
- name: Name)
- -> ImplMethod<'tcx>
-{
- assert!(!substs.needs_infer());
-
- let trait_def_id = tcx.trait_id_of_impl(impl_def_id).unwrap();
- let trait_def = tcx.lookup_trait_def(trait_def_id);
-
- match trait_def.ancestors(impl_def_id).fn_defs(tcx, name).next() {
- Some(node_item) => {
- let substs = tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| {
- let substs = substs.rebase_onto(tcx, trait_def_id, impl_substs);
- let substs = traits::translate_substs(&infcx, impl_def_id,
- substs, node_item.node);
- tcx.lift(&substs).unwrap_or_else(|| {
- bug!("trans::meth::get_impl_method: translate_substs \
- returned {:?} which contains inference types/regions",
- substs);
- })
- });
- ImplMethod {
- method: node_item.item,
- substs: substs,
- is_provided: node_item.node.is_from_trait(),
- }
- }
- None => {
- bug!("method {:?} not found in {:?}", name, impl_def_id)
- }
- }
-}
use rustc::mir::traversal;
use common::{self, Block, BlockAndBuilder};
use glue;
-use std::iter;
use super::rvalue;
pub fn lvalue_locals<'bcx, 'tcx>(bcx: Block<'bcx,'tcx>,
analyzer.visit_mir(mir);
- let local_types = mir.arg_decls.iter().map(|a| a.ty)
- .chain(mir.var_decls.iter().map(|v| v.ty))
- .chain(mir.temp_decls.iter().map(|t| t.ty))
- .chain(iter::once(mir.return_ty));
- for (index, ty) in local_types.enumerate() {
+ for (index, ty) in mir.local_decls.iter().map(|l| l.ty).enumerate() {
let ty = bcx.monomorphize(&ty);
debug!("local {} has type {:?}", index, ty);
if ty.is_scalar() ||
fn new(mir: &'mir mir::Mir<'tcx>,
bcx: &'mir BlockAndBuilder<'bcx, 'tcx>)
-> LocalAnalyzer<'mir, 'bcx, 'tcx> {
- let local_count = mir.count_locals();
LocalAnalyzer {
mir: mir,
bcx: bcx,
- lvalue_locals: BitVector::new(local_count),
- seen_assigned: BitVector::new(local_count)
+ lvalue_locals: BitVector::new(mir.local_decls.len()),
+ seen_assigned: BitVector::new(mir.local_decls.len())
}
}
location: Location) {
debug!("visit_assign(block={:?}, lvalue={:?}, rvalue={:?})", block, lvalue, rvalue);
- if let Some(index) = self.mir.local_index(lvalue) {
+ if let mir::Lvalue::Local(index) = *lvalue {
self.mark_assigned(index);
if !rvalue::rvalue_creates_operand(self.mir, self.bcx, rvalue) {
self.mark_as_lvalue(index);
fn visit_lvalue(&mut self,
lvalue: &mir::Lvalue<'tcx>,
- context: LvalueContext,
+ context: LvalueContext<'tcx>,
location: Location) {
debug!("visit_lvalue(lvalue={:?}, context={:?})", lvalue, context);
// Allow uses of projections of immediate pair fields.
if let mir::Lvalue::Projection(ref proj) = *lvalue {
- if self.mir.local_index(&proj.base).is_some() {
+ if let mir::Lvalue::Local(_) = proj.base {
let ty = proj.base.ty(self.mir, self.bcx.tcx());
let ty = self.bcx.monomorphize(&ty.to_ty(self.bcx.tcx()));
}
}
- if let Some(index) = self.mir.local_index(lvalue) {
+ if let mir::Lvalue::Local(index) = *lvalue {
match context {
LvalueContext::Call => {
self.mark_assigned(index);
LvalueContext::Store |
LvalueContext::Inspect |
LvalueContext::Borrow { .. } |
- LvalueContext::Projection => {
+ LvalueContext::Projection(..) => {
self.mark_as_lvalue(index);
}
mir::TerminatorKind::Switch { ref discr, ref adt_def, ref targets } => {
let discr_lvalue = self.trans_lvalue(&bcx, discr);
let ty = discr_lvalue.ty.to_ty(bcx.tcx());
- let repr = adt::represent_type(bcx.ccx(), ty);
let discr = bcx.with_block(|bcx|
- adt::trans_get_discr(bcx, &repr, discr_lvalue.llval, None, true)
+ adt::trans_get_discr(bcx, ty, discr_lvalue.llval, None, true)
);
let mut bb_hist = FnvHashMap();
if default_bb != Some(target) {
let llbb = llblock(self, target);
let llval = bcx.with_block(|bcx| adt::trans_case(
- bcx, &repr, Disr::from(adt_variant.disr_val)));
+ bcx, ty, Disr::from(adt_variant.disr_val)));
build::AddCase(switch, llval, llbb)
}
}
}
let llval = if let Some(cast_ty) = ret.cast {
- let index = mir.local_index(&mir::Lvalue::ReturnPointer).unwrap();
- let op = match self.locals[index] {
+ let op = match self.locals[mir::RETURN_POINTER] {
LocalRef::Operand(Some(op)) => op,
LocalRef::Operand(None) => bug!("use of return before def"),
LocalRef::Lvalue(tr_lvalue) => {
}
load
} else {
- let op = self.trans_consume(&bcx, &mir::Lvalue::ReturnPointer);
+ let op = self.trans_consume(&bcx, &mir::Lvalue::Local(mir::RETURN_POINTER));
op.pack_if_pair(&bcx).immediate()
};
bcx.ret(llval);
// Handle both by-ref and immediate tuples.
match tuple.val {
Ref(llval) => {
- let base_repr = adt::represent_type(bcx.ccx(), tuple.ty);
let base = adt::MaybeSizedValue::sized(llval);
for (n, &ty) in arg_types.iter().enumerate() {
- let ptr = adt::trans_field_ptr_builder(bcx, &base_repr, base, Disr(0), n);
+ let ptr = adt::trans_field_ptr_builder(bcx, tuple.ty, base, Disr(0), n);
let val = if common::type_is_fat_ptr(bcx.tcx(), ty) {
let (lldata, llextra) = load_fat_ptr(bcx, ptr);
Pair(lldata, llextra)
if fn_ret_ty.is_ignore() {
return ReturnDest::Nothing;
}
- let dest = if let Some(index) = self.mir.local_index(dest) {
+ let dest = if let mir::Lvalue::Local(index) = *dest {
let ret_ty = self.monomorphized_lvalue_ty(dest);
match self.locals[index] {
LocalRef::Lvalue(dest) => dest,
use rustc::ty::cast::{CastTy, IntTy};
use rustc::ty::subst::Substs;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
-use {abi, adt, base, Disr};
+use {abi, adt, base, Disr, machine};
use callee::Callee;
-use common::{self, BlockAndBuilder, CrateContext, const_get_elt, val_ty};
+use common::{self, BlockAndBuilder, CrateContext, const_get_elt, val_ty, type_is_sized};
use common::{C_array, C_bool, C_bytes, C_floating_f64, C_integral};
use common::{C_null, C_struct, C_str_slice, C_undef, C_uint};
use common::{const_to_opt_int, const_to_opt_uint};
fn new(ccx: &'a CrateContext<'a, 'tcx>,
mir: &'a mir::Mir<'tcx>,
substs: &'tcx Substs<'tcx>,
- args: IndexVec<mir::Arg, Const<'tcx>>)
+ args: IndexVec<mir::Local, Const<'tcx>>)
-> MirConstContext<'a, 'tcx> {
let mut context = MirConstContext {
ccx: ccx,
mir: mir,
substs: substs,
- locals: (0..mir.count_locals()).map(|_| None).collect(),
+ locals: (0..mir.local_decls.len()).map(|_| None).collect(),
};
for (i, arg) in args.into_iter().enumerate() {
- let index = mir.local_index(&mir::Lvalue::Arg(mir::Arg::new(i))).unwrap();
+ // Locals after local 0 are the function arguments
+ let index = mir::Local::new(i + 1);
context.locals[index] = Some(arg);
}
context
fn trans_def(ccx: &'a CrateContext<'a, 'tcx>,
mut instance: Instance<'tcx>,
- args: IndexVec<mir::Arg, Const<'tcx>>)
+ args: IndexVec<mir::Local, Const<'tcx>>)
-> Result<Const<'tcx>, ConstEvalErr> {
// Try to resolve associated constants.
if let Some(trait_id) = ccx.tcx().trait_of_item(instance.def) {
let vtable = common::fulfill_obligation(ccx.shared(), DUMMY_SP, trait_ref);
if let traits::VtableImpl(vtable_impl) = vtable {
let name = ccx.tcx().item_name(instance.def);
- for ac in ccx.tcx().associated_consts(vtable_impl.impl_def_id) {
- if ac.name == name {
- instance = Instance::new(ac.def_id, vtable_impl.substs);
- break;
- }
+ let ac = ccx.tcx().impl_or_trait_items(vtable_impl.impl_def_id)
+ .iter().filter_map(|&def_id| {
+ match ccx.tcx().impl_or_trait_item(def_id) {
+ ty::ConstTraitItem(ac) => Some(ac),
+ _ => None
+ }
+ }).find(|ic| ic.name == name);
+ if let Some(ac) = ac {
+ instance = Instance::new(ac.def_id, vtable_impl.substs);
}
}
}
}
}
mir::StatementKind::StorageLive(_) |
- mir::StatementKind::StorageDead(_) => {}
+ mir::StatementKind::StorageDead(_) |
+ mir::StatementKind::Nop => {}
mir::StatementKind::SetDiscriminant{ .. } => {
span_bug!(span, "SetDiscriminant should not appear in constants?");
}
mir::TerminatorKind::Goto { target } => target,
mir::TerminatorKind::Return => {
failure?;
- let index = self.mir.local_index(&mir::Lvalue::ReturnPointer).unwrap();
- return Ok(self.locals[index].unwrap_or_else(|| {
+ return Ok(self.locals[mir::RETURN_POINTER].unwrap_or_else(|| {
span_bug!(span, "no returned value in constant");
}));
}
}
fn store(&mut self, dest: &mir::Lvalue<'tcx>, value: Const<'tcx>, span: Span) {
- if let Some(index) = self.mir.local_index(dest) {
+ if let mir::Lvalue::Local(index) = *dest {
self.locals[index] = Some(value);
} else {
span_bug!(span, "assignment to {:?} in constant", dest);
-> Result<ConstLvalue<'tcx>, ConstEvalErr> {
let tcx = self.ccx.tcx();
- if let Some(index) = self.mir.local_index(lvalue) {
+ if let mir::Lvalue::Local(index) = *lvalue {
return Ok(self.locals[index].unwrap_or_else(|| {
span_bug!(span, "{:?} not initialized", lvalue)
}).as_lvalue());
}
let lvalue = match *lvalue {
- mir::Lvalue::Var(_) |
- mir::Lvalue::Temp(_) |
- mir::Lvalue::Arg(_) |
- mir::Lvalue::ReturnPointer => bug!(), // handled above
+ mir::Lvalue::Local(_) => bug!(), // handled above
mir::Lvalue::Static(def_id) => {
ConstLvalue {
base: Base::Static(consts::get_static(self.ccx, def_id)),
}
}
mir::ProjectionElem::Field(ref field, _) => {
- let base_repr = adt::represent_type(self.ccx, tr_base.ty);
- let llprojected = adt::const_get_field(&base_repr, base.llval,
+ let llprojected = adt::const_get_field(self.ccx, tr_base.ty, base.llval,
Disr(0), field.index());
let llextra = if is_sized {
ptr::null_mut()
}
_ => Disr(0)
};
- let repr = adt::represent_type(self.ccx, dest_ty);
Const::new(
- adt::trans_const(self.ccx, &repr, disr, &fields),
+ adt::trans_const(self.ccx, dest_ty, disr, &fields),
dest_ty
)
}
let ll_t_out = type_of::immediate_type_of(self.ccx, cast_ty);
let llval = operand.llval;
let signed = if let CastTy::Int(IntTy::CEnum) = r_t_in {
- let repr = adt::represent_type(self.ccx, operand.ty);
- adt::is_discr_signed(&repr)
+ let l = self.ccx.layout_of(operand.ty);
+ adt::is_discr_signed(&l)
} else {
operand.ty.is_signed()
};
let base = match tr_lvalue.base {
Base::Value(llval) => {
- let align = type_of::align_of(self.ccx, ty);
+ // FIXME: may be wrong for &*(&simd_vec as &fmt::Debug)
+ let align = if type_is_sized(self.ccx.tcx(), ty) {
+ type_of::align_of(self.ccx, ty)
+ } else {
+ self.ccx.tcx().data_layout.pointer_align.abi() as machine::llalign
+ };
if bk == mir::BorrowKind::Mut {
consts::addr_of_mut(self.ccx, llval, align, "ref_mut")
} else {
let ccx = bcx.ccx();
let tcx = bcx.tcx();
- if let Some(index) = self.mir.local_index(lvalue) {
+ if let mir::Lvalue::Local(index) = *lvalue {
match self.locals[index] {
LocalRef::Lvalue(lvalue) => {
return lvalue;
}
let result = match *lvalue {
- mir::Lvalue::Var(_) |
- mir::Lvalue::Temp(_) |
- mir::Lvalue::Arg(_) |
- mir::Lvalue::ReturnPointer => bug!(), // handled above
+ mir::Lvalue::Local(_) => bug!(), // handled above
mir::Lvalue::Static(def_id) => {
let const_ty = self.monomorphized_lvalue_ty(lvalue);
LvalueRef::new_sized(consts::get_static(ccx, def_id),
mir::ProjectionElem::Deref => bug!(),
mir::ProjectionElem::Field(ref field, _) => {
let base_ty = tr_base.ty.to_ty(tcx);
- let base_repr = adt::represent_type(ccx, base_ty);
let discr = match tr_base.ty {
LvalueTy::Ty { .. } => 0,
LvalueTy::Downcast { adt_def: _, substs: _, variant_index: v } => v,
} else {
adt::MaybeSizedValue::unsized_(tr_base.llval, tr_base.llextra)
};
- let llprojected = adt::trans_field_ptr_builder(bcx, &base_repr, base,
+ let llprojected = adt::trans_field_ptr_builder(bcx, base_ty, base,
Disr(discr), field.index());
let llextra = if is_sized {
ptr::null_mut()
lvalue: &mir::Lvalue<'tcx>, f: F) -> U
where F: FnOnce(&mut Self, LvalueRef<'tcx>) -> U
{
- if let Some(index) = self.mir.local_index(lvalue) {
+ if let mir::Lvalue::Local(index) = *lvalue {
match self.locals[index] {
LocalRef::Lvalue(lvalue) => f(self, lvalue),
LocalRef::Operand(None) => {
// Allocate variable and temp allocas
mircx.locals = {
let args = arg_local_refs(&bcx, &mir, &mircx.scopes, &lvalue_locals);
- let vars = mir.var_decls.iter().enumerate().map(|(i, decl)| {
+
+ let mut allocate_local = |local| {
+ let decl = &mir.local_decls[local];
let ty = bcx.monomorphize(&decl.ty);
- let debug_scope = mircx.scopes[decl.source_info.scope];
- let dbg = debug_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo;
- let local = mir.local_index(&mir::Lvalue::Var(mir::Var::new(i))).unwrap();
- if !lvalue_locals.contains(local.index()) && !dbg {
- return LocalRef::new_operand(bcx.ccx(), ty);
- }
+ if let Some(name) = decl.name {
+ // User variable
+ let source_info = decl.source_info.unwrap();
+ let debug_scope = mircx.scopes[source_info.scope];
+ let dbg = debug_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo;
- let lvalue = LvalueRef::alloca(&bcx, ty, &decl.name.as_str());
- if dbg {
- let dbg_loc = mircx.debug_loc(decl.source_info);
- if let DebugLoc::ScopeAt(scope, span) = dbg_loc {
- bcx.with_block(|bcx| {
- declare_local(bcx, decl.name, ty, scope,
- VariableAccess::DirectVariable { alloca: lvalue.llval },
- VariableKind::LocalVariable, span);
- });
- } else {
- panic!("Unexpected");
+ if !lvalue_locals.contains(local.index()) && !dbg {
+ debug!("alloc: {:?} ({}) -> operand", local, name);
+ return LocalRef::new_operand(bcx.ccx(), ty);
}
- }
- LocalRef::Lvalue(lvalue)
- });
-
- let locals = mir.temp_decls.iter().enumerate().map(|(i, decl)| {
- (mir::Lvalue::Temp(mir::Temp::new(i)), decl.ty)
- }).chain(iter::once((mir::Lvalue::ReturnPointer, mir.return_ty)));
-
- args.into_iter().chain(vars).chain(locals.map(|(lvalue, ty)| {
- let ty = bcx.monomorphize(&ty);
- let local = mir.local_index(&lvalue).unwrap();
- if lvalue == mir::Lvalue::ReturnPointer && fcx.fn_ty.ret.is_indirect() {
- let llretptr = llvm::get_param(fcx.llfn, 0);
- LocalRef::Lvalue(LvalueRef::new_sized(llretptr, LvalueTy::from_ty(ty)))
- } else if lvalue_locals.contains(local.index()) {
- LocalRef::Lvalue(LvalueRef::alloca(&bcx, ty, &format!("{:?}", lvalue)))
+
+ debug!("alloc: {:?} ({}) -> lvalue", local, name);
+ let lvalue = LvalueRef::alloca(&bcx, ty, &name.as_str());
+ if dbg {
+ let dbg_loc = mircx.debug_loc(source_info);
+ if let DebugLoc::ScopeAt(scope, span) = dbg_loc {
+ bcx.with_block(|bcx| {
+ declare_local(bcx, name, ty, scope,
+ VariableAccess::DirectVariable { alloca: lvalue.llval },
+ VariableKind::LocalVariable, span);
+ });
+ } else {
+ panic!("Unexpected");
+ }
+ }
+ LocalRef::Lvalue(lvalue)
} else {
- // If this is an immediate local, we do not create an
- // alloca in advance. Instead we wait until we see the
- // definition and update the operand there.
- LocalRef::new_operand(bcx.ccx(), ty)
+ // Temporary or return pointer
+ if local == mir::RETURN_POINTER && fcx.fn_ty.ret.is_indirect() {
+ debug!("alloc: {:?} (return pointer) -> lvalue", local);
+ let llretptr = llvm::get_param(fcx.llfn, 0);
+ LocalRef::Lvalue(LvalueRef::new_sized(llretptr, LvalueTy::from_ty(ty)))
+ } else if lvalue_locals.contains(local.index()) {
+ debug!("alloc: {:?} -> lvalue", local);
+ LocalRef::Lvalue(LvalueRef::alloca(&bcx, ty, &format!("{:?}", local)))
+ } else {
+ // If this is an immediate local, we do not create an
+ // alloca in advance. Instead we wait until we see the
+ // definition and update the operand there.
+ debug!("alloc: {:?} -> operand", local);
+ LocalRef::new_operand(bcx.ccx(), ty)
+ }
}
- })).collect()
+ };
+
+ let retptr = allocate_local(mir::RETURN_POINTER);
+ iter::once(retptr)
+ .chain(args.into_iter())
+ .chain(mir.vars_and_temps_iter().map(allocate_local))
+ .collect()
};
// Branch to the START block
None
};
- mir.arg_decls.iter().enumerate().map(|(arg_index, arg_decl)| {
+ mir.args_iter().enumerate().map(|(arg_index, local)| {
+ let arg_decl = &mir.local_decls[local];
let arg_ty = bcx.monomorphize(&arg_decl.ty);
- let local = mir.local_index(&mir::Lvalue::Arg(mir::Arg::new(arg_index))).unwrap();
- if arg_decl.spread {
+
+ if Some(local) == mir.spread_arg {
// This argument (e.g. the last argument in the "rust-call" ABI)
// is a tuple that was spread at the ABI level and now we have
// to reconstruct it into a tuple local variable, from multiple
bcx.with_block(|bcx| arg_scope.map(|scope| {
// Is this a regular argument?
if arg_index > 0 || mir.upvar_decls.is_empty() {
- declare_local(bcx, arg_decl.debug_name, arg_ty, scope,
- VariableAccess::DirectVariable { alloca: llval },
+ declare_local(bcx, arg_decl.name.unwrap_or(keywords::Invalid.name()), arg_ty,
+ scope, VariableAccess::DirectVariable { alloca: llval },
VariableKind::ArgumentVariable(arg_index + 1),
bcx.fcx().span.unwrap_or(DUMMY_SP));
return;
// watch out for locals that do not have an
// alloca; they are handled somewhat differently
- if let Some(index) = self.mir.local_index(lvalue) {
+ if let mir::Lvalue::Local(index) = *lvalue {
match self.locals[index] {
LocalRef::Operand(Some(o)) => {
return o;
// Moves out of pair fields are trivial.
if let &mir::Lvalue::Projection(ref proj) = lvalue {
- if let Some(index) = self.mir.local_index(&proj.base) {
+ if let mir::Lvalue::Local(index) = proj.base {
if let LocalRef::Operand(Some(o)) = self.locals[index] {
match (o.val, &proj.elem) {
(OperandValue::Pair(a, b),
mir::Rvalue::Aggregate(ref kind, ref operands) => {
match *kind {
mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => {
- let repr = adt::represent_type(bcx.ccx(), dest.ty.to_ty(bcx.tcx()));
let disr = Disr::from(adt_def.variants[variant_index].disr_val);
bcx.with_block(|bcx| {
- adt::trans_set_discr(bcx, &repr, dest.llval, Disr::from(disr));
+ adt::trans_set_discr(bcx,
+ dest.ty.to_ty(bcx.tcx()), dest.llval, Disr::from(disr));
});
for (i, operand) in operands.iter().enumerate() {
let op = self.trans_operand(&bcx, operand);
if !common::type_is_zero_size(bcx.ccx(), op.ty) {
let val = adt::MaybeSizedValue::sized(dest.llval);
let field_index = active_field_index.unwrap_or(i);
- let lldest_i = adt::trans_field_ptr_builder(&bcx, &repr, val,
- disr, field_index);
+ let lldest_i = adt::trans_field_ptr_builder(&bcx,
+ dest.ty.to_ty(bcx.tcx()),
+ val, disr, field_index);
self.store_operand(&bcx, lldest_i, op);
}
}
let ll_t_in = type_of::immediate_type_of(bcx.ccx(), operand.ty);
let ll_t_out = type_of::immediate_type_of(bcx.ccx(), cast_ty);
let (llval, signed) = if let CastTy::Int(IntTy::CEnum) = r_t_in {
- let repr = adt::represent_type(bcx.ccx(), operand.ty);
+ let l = bcx.ccx().layout_of(operand.ty);
let discr = match operand.val {
OperandValue::Immediate(llval) => llval,
OperandValue::Ref(llptr) => {
bcx.with_block(|bcx| {
- adt::trans_get_discr(bcx, &repr, llptr, None, true)
+ adt::trans_get_discr(bcx, operand.ty, llptr, None, true)
})
}
OperandValue::Pair(..) => bug!("Unexpected Pair operand")
};
- (discr, adt::is_discr_signed(&repr))
+ (discr, adt::is_discr_signed(&l))
} else {
(operand.immediate(), operand.ty.is_signed())
};
debug_loc.apply(bcx.fcx());
match statement.kind {
mir::StatementKind::Assign(ref lvalue, ref rvalue) => {
- if let Some(index) = self.mir.local_index(lvalue) {
+ if let mir::Lvalue::Local(index) = *lvalue {
match self.locals[index] {
LocalRef::Lvalue(tr_dest) => {
self.trans_rvalue(bcx, tr_dest, rvalue, debug_loc)
}
mir::StatementKind::SetDiscriminant{ref lvalue, variant_index} => {
let ty = self.monomorphized_lvalue_ty(lvalue);
- let repr = adt::represent_type(bcx.ccx(), ty);
let lvalue_transed = self.trans_lvalue(&bcx, lvalue);
bcx.with_block(|bcx|
adt::trans_set_discr(bcx,
- &repr,
+ ty,
lvalue_transed.llval,
Disr::from(variant_index))
);
mir::StatementKind::StorageDead(ref lvalue) => {
self.trans_storage_liveness(bcx, lvalue, base::Lifetime::End)
}
+ mir::StatementKind::Nop => bcx,
}
}
lvalue: &mir::Lvalue<'tcx>,
intrinsic: base::Lifetime)
-> BlockAndBuilder<'bcx, 'tcx> {
- if let Some(index) = self.mir.local_index(lvalue) {
+ if let mir::Lvalue::Local(index) = *lvalue {
if let LocalRef::Lvalue(tr_lval) = self.locals[index] {
intrinsic.call(&bcx, tr_lval.llval);
}
use syntax::ast::NodeId;
use syntax::parse::token::{self, InternedString};
use trans_item::TransItem;
-use util::nodemap::{FnvHashMap, FnvHashSet, NodeSet};
+use util::nodemap::{FnvHashMap, FnvHashSet};
pub enum PartitioningStrategy {
/// Generate one codegen unit per source-level module.
pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
trans_items: I,
strategy: PartitioningStrategy,
- inlining_map: &InliningMap<'tcx>,
- reachable: &NodeSet)
+ inlining_map: &InliningMap<'tcx>)
-> Vec<CodegenUnit<'tcx>>
where I: Iterator<Item = TransItem<'tcx>>
{
let tcx = scx.tcx();
- if let PartitioningStrategy::FixedUnitCount(1) = strategy {
- // If there is only a single codegen-unit, we can use a very simple
- // scheme and don't have to bother with doing much analysis.
- return vec![single_codegen_unit(tcx, trans_items, reachable)];
- }
-
// In the first step, we place all regular translation items into their
// respective 'home' codegen unit. Regular translation items are all
// functions and statics defined in the local crate.
let mut initial_partitioning = place_root_translation_items(scx,
- trans_items,
- reachable);
+ trans_items);
debug_dump(tcx, "INITIAL PARTITONING:", initial_partitioning.codegen_units.iter());
struct PostInliningPartitioning<'tcx>(Vec<CodegenUnit<'tcx>>);
fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
- trans_items: I,
- _reachable: &NodeSet)
+ trans_items: I)
-> PreInliningPartitioning<'tcx>
where I: Iterator<Item = TransItem<'tcx>>
{
let mut codegen_units = FnvHashMap();
for trans_item in trans_items {
- let is_root = !trans_item.is_instantiated_only_on_demand();
+ let is_root = !trans_item.is_instantiated_only_on_demand(tcx);
if is_root {
let characteristic_def_id = characteristic_def_id_of_trans_item(scx, trans_item);
// This is a non-generic functions, we always
// make it visible externally on the chance that
// it might be used in another codegen unit.
+ // Later on base::internalize_symbols() will
+ // assign "internal" linkage to those symbols
+ // that are not referenced from other codegen
+ // units (and are not publicly visible).
llvm::ExternalLinkage
} else {
// In the current setup, generic functions cannot
// reliably in that case.
new_codegen_unit.items.insert(trans_item, llvm::InternalLinkage);
} else {
- assert!(trans_item.is_instantiated_only_on_demand());
// We can't be sure if this will also be instantiated
// somewhere else, so we add an instance here with
// InternalLinkage so we don't get any conflicts.
return token::intern_and_get_ident(&mod_path[..]);
}
-fn single_codegen_unit<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- trans_items: I,
- reachable: &NodeSet)
- -> CodegenUnit<'tcx>
- where I: Iterator<Item = TransItem<'tcx>>
-{
- let mut items = FnvHashMap();
-
- for trans_item in trans_items {
- let linkage = trans_item.explicit_linkage(tcx).unwrap_or_else(|| {
- match trans_item {
- TransItem::Static(node_id) => {
- if reachable.contains(&node_id) {
- llvm::ExternalLinkage
- } else {
- llvm::PrivateLinkage
- }
- }
- TransItem::DropGlue(_) => {
- llvm::InternalLinkage
- }
- TransItem::Fn(instance) => {
- if trans_item.is_generic_fn() {
- // FIXME(mw): Assigning internal linkage to all
- // monomorphizations is potentially a waste of space
- // since monomorphizations could be shared between
- // crates. The main reason for making them internal is
- // a limitation in MingW's binutils that cannot deal
- // with COFF object that have more than 2^15 sections,
- // which is something that can happen for large programs
- // when every function gets put into its own COMDAT
- // section.
- llvm::InternalLinkage
- } else if trans_item.is_from_extern_crate() {
- // FIXME(mw): It would be nice if we could mark these as
- // `AvailableExternallyLinkage`, since they should have
- // been instantiated in the extern crate. But this
- // sometimes leads to crashes on Windows because LLVM
- // does not handle exception handling table instantiation
- // reliably in that case.
- llvm::InternalLinkage
- } else if reachable.contains(&tcx.map
- .as_local_node_id(instance.def)
- .unwrap()) {
- llvm::ExternalLinkage
- } else {
- // Functions that are not visible outside this crate can
- // be marked as internal.
- llvm::InternalLinkage
- }
- }
- }
- });
-
- items.insert(trans_item, linkage);
- }
-
- CodegenUnit::new(
- numbered_codegen_unit_name(&tcx.crate_name[..], 0),
- items)
-}
-
fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString {
token::intern_and_get_ident(&format!("{}{}{}",
crate_name,
}
}
- pub fn requests_inline(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
- match *self {
- TransItem::Fn(ref instance) => {
- instance.substs.types().next().is_some() || {
- let attributes = tcx.get_attrs(instance.def);
- attr::requests_inline(&attributes[..])
- }
- }
- TransItem::DropGlue(..) => true,
- TransItem::Static(..) => false,
- }
- }
-
pub fn is_from_extern_crate(&self) -> bool {
match *self {
TransItem::Fn(ref instance) => !instance.def.is_local(),
}
}
- pub fn is_instantiated_only_on_demand(&self) -> bool {
+ /// True if the translation item should only be translated to LLVM IR if
+ /// it is referenced somewhere (like inline functions, for example).
+ pub fn is_instantiated_only_on_demand(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
+ if self.explicit_linkage(tcx).is_some() {
+ return false;
+ }
+
match *self {
TransItem::Fn(ref instance) => {
- !instance.def.is_local() || instance.substs.types().next().is_some()
+ !instance.def.is_local() ||
+ instance.substs.types().next().is_some() ||
+ attr::requests_inline(&tcx.get_attrs(instance.def)[..])
}
TransItem::DropGlue(..) => true,
TransItem::Static(..) => false,
}
}
+ /// Returns true if there has to be a local copy of this TransItem in every
+ /// codegen unit that references it (as with inlined functions, for example)
+ pub fn needs_local_copy(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
+ // Currently everything that is instantiated only on demand is done so
+ // with "internal" linkage, so we need a copy to be present in every
+ // codegen unit.
+ // This is coincidental: We could also instantiate something only if it
+ // is referenced (e.g. a regular, private function) but place it in its
+ // own codegen unit with "external" linkage.
+ self.is_instantiated_only_on_demand(tcx)
+ }
+
pub fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<llvm::Linkage> {
let def_id = match *self {
TransItem::Fn(ref instance) => instance.def,
use util::nodemap::FnvHashMap;
use syntax::ast;
+use rustc::ty::layout;
use std::ffi::CString;
use std::fmt;
llvm::LLVMGetIntTypeWidth(self.to_ref()) as u64
}
}
+
+ pub fn from_integer(cx: &CrateContext, i: layout::Integer) -> Type {
+ use rustc::ty::layout::Integer::*;
+ match i {
+ I1 => Type::i1(cx),
+ I8 => Type::i8(cx),
+ I16 => Type::i16(cx),
+ I32 => Type::i32(cx),
+ I64 => Type::i64(cx),
+ }
+ }
+
+ pub fn from_primitive(ccx: &CrateContext, p: layout::Primitive) -> Type {
+ match p {
+ layout::Int(i) => Type::from_integer(ccx, i),
+ layout::F32 => Type::f32(ccx),
+ layout::F64 => Type::f64(ccx),
+ layout::Pointer => bug!("It is not possible to convert Pointer directly to Type.")
+ }
+ }
}
/* Memory-managed object interface to type handles. */
use syntax::ast;
-// LLVM doesn't like objects that are too big. Issue #17913
-fn ensure_array_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
- llet: Type,
- size: machine::llsize,
- scapegoat: Ty<'tcx>) {
- let esz = machine::llsize_of_alloc(ccx, llet);
- match esz.checked_mul(size) {
- Some(n) if n < ccx.obj_size_bound() => {}
- _ => { ccx.report_overbig_object(scapegoat) }
- }
-}
// A "sizing type" is an LLVM type, the size and alignment of which are
// guaranteed to be equivalent to what you would get out of `type_of()`. It's
ty::TyArray(ty, size) => {
let llty = sizing_type_of(cx, ty);
let size = size as u64;
- ensure_array_fits_in_address_space(cx, llty, size, t);
Type::array(&llty, size)
}
}
let llet = type_of(cx, e);
let n = t.simd_size(cx.tcx()) as u64;
- ensure_array_fits_in_address_space(cx, llet, n, t);
Type::vector(&llet, n)
}
ty::TyTuple(..) | ty::TyAdt(..) | ty::TyClosure(..) => {
- let repr = adt::represent_type(cx, t);
- adt::sizing_type_of(cx, &repr, false)
+ adt::sizing_type_of(cx, t, false)
}
ty::TyProjection(..) | ty::TyInfer(..) | ty::TyParam(..) |
ty::TyClosure(..) => {
// Only create the named struct, but don't fill it in. We
// fill it in *after* placing it into the type cache.
- let repr = adt::represent_type(cx, t);
- adt::incomplete_type_of(cx, &repr, "closure")
+ adt::incomplete_type_of(cx, t, "closure")
}
ty::TyBox(ty) |
ty::TyArray(ty, size) => {
let size = size as u64;
- // we must use `sizing_type_of` here as the type may
- // not be fully initialized.
- let szty = sizing_type_of(cx, ty);
- ensure_array_fits_in_address_space(cx, szty, size, t);
-
let llty = in_memory_type_of(cx, ty);
Type::array(&llty, size)
}
}
ty::TyTuple(ref tys) if tys.is_empty() => Type::nil(cx),
ty::TyTuple(..) => {
- let repr = adt::represent_type(cx, t);
- adt::type_of(cx, &repr)
+ adt::type_of(cx, t)
}
ty::TyAdt(..) if t.is_simd() => {
let e = t.simd_type(cx.tcx());
}
let llet = in_memory_type_of(cx, e);
let n = t.simd_size(cx.tcx()) as u64;
- ensure_array_fits_in_address_space(cx, llet, n, t);
Type::vector(&llet, n)
}
ty::TyAdt(def, substs) => {
// fill it in *after* placing it into the type cache. This
// avoids creating more than one copy of the enum when one
// of the enum's variants refers to the enum itself.
- let repr = adt::represent_type(cx, t);
let name = llvm_type_name(cx, def.did, substs);
- adt::incomplete_type_of(cx, &repr, &name[..])
+ adt::incomplete_type_of(cx, t, &name[..])
}
ty::TyInfer(..) |
// If this was an enum or struct, fill in the type now.
match t.sty {
ty::TyAdt(..) | ty::TyClosure(..) if !t.is_simd() => {
- let repr = adt::represent_type(cx, t);
- adt::finish_type_of(cx, &repr, &mut llty);
+ adt::finish_type_of(cx, t, &mut llty);
}
_ => ()
}
pub fn align_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>)
-> machine::llalign {
- let llty = sizing_type_of(cx, t);
- machine::llalign_of_min(cx, llty)
+ let layout = cx.layout_of(t);
+ layout.align(&cx.tcx().data_layout).abi() as machine::llalign
}
fn llvm_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
format!("{}<{}>", base, strings.join(", "))
};
- if did.krate == 0 {
+ if did.is_local() {
tstr
} else {
format!("{}.{}", did.krate, tstr)
// For now, require that parenthetical notation be used
// only with `Fn()` etc.
if !self.tcx().sess.features.borrow().unboxed_closures && trait_def.paren_sugar {
- emit_feature_err(&self.tcx().sess.parse_sess.span_diagnostic,
+ emit_feature_err(&self.tcx().sess.parse_sess,
"unboxed_closures", span, GateIssue::Language,
"\
the precise format of `Fn`-family traits' \
// For now, require that parenthetical notation be used
// only with `Fn()` etc.
if !self.tcx().sess.features.borrow().unboxed_closures && !trait_def.paren_sugar {
- emit_feature_err(&self.tcx().sess.parse_sess.span_diagnostic,
+ emit_feature_err(&self.tcx().sess.parse_sess,
"unboxed_closures", span, GateIssue::Language,
"\
parenthetical notation is only stable when used with `Fn`-family traits");
return tcx.types.err;
}
- let mut associated_types: FnvHashSet<(DefId, ast::Name)> =
- traits::supertraits(tcx, principal)
- .flat_map(|tr| {
- let trait_def = tcx.lookup_trait_def(tr.def_id());
- trait_def.associated_type_names
- .clone()
- .into_iter()
- .map(move |associated_type_name| (tr.def_id(), associated_type_name))
- })
- .collect();
+ let mut associated_types = FnvHashSet::default();
+ for tr in traits::supertraits(tcx, principal) {
+ if let Some(trait_id) = tcx.map.as_local_node_id(tr.def_id()) {
+ use collect::trait_associated_type_names;
+
+ associated_types.extend(trait_associated_type_names(tcx, trait_id)
+ .map(|name| (tr.def_id(), name)))
+ } else {
+ let trait_items = tcx.impl_or_trait_items(tr.def_id());
+ associated_types.extend(trait_items.iter().filter_map(|&def_id| {
+ match tcx.impl_or_trait_item(def_id) {
+ ty::TypeTraitItem(ref item) => Some(item.name),
+ _ => None
+ }
+ }).map(|name| (tr.def_id(), name)));
+ }
+ }
for projection_bound in &projection_bounds {
let pair = (projection_bound.0.projection_ty.trait_ref.def_id,
// Find the type of the associated item, and the trait where the associated
// item is declared.
let bound = match (&ty.sty, ty_path_def) {
- (_, Def::SelfTy(Some(trait_did), Some(impl_id))) => {
- // For Def::SelfTy() values inlined from another crate, the
- // impl_id will be DUMMY_NODE_ID, which would cause problems
- // here. But we should never run into an impl from another crate
- // in this pass.
- assert!(impl_id != ast::DUMMY_NODE_ID);
-
+ (_, Def::SelfTy(Some(_), Some(impl_def_id))) => {
// `Self` in an impl of a trait - we have a concrete self type and a
// trait reference.
- let trait_ref = tcx.impl_trait_ref(tcx.map.local_def_id(impl_id)).unwrap();
+ let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap();
let trait_ref = if let Some(free_substs) = self.get_free_substs() {
trait_ref.subst(tcx, free_substs)
} else {
trait_ref
};
- if self.ensure_super_predicates(span, trait_did).is_err() {
+ if self.ensure_super_predicates(span, trait_ref.def_id).is_err() {
return (tcx.types.err, Def::Err);
}
item.expect("missing associated type").def_id()
};
- (ty, Def::AssociatedTy(trait_did, item_did))
+ (ty, Def::AssociatedTy(item_did))
}
fn qpath_to_ty(&self,
tcx.types.err
}
}
- Def::SelfTy(_, Some(impl_id)) => {
+ Def::SelfTy(_, Some(def_id)) => {
// Self in impl (we know the concrete type).
- // For Def::SelfTy() values inlined from another crate, the
- // impl_id will be DUMMY_NODE_ID, which would cause problems
- // here. But we should never run into an impl from another crate
- // in this pass.
- assert!(impl_id != ast::DUMMY_NODE_ID);
-
tcx.prohibit_type_params(base_segments);
+ let impl_id = tcx.map.as_local_node_id(def_id).unwrap();
let ty = tcx.node_id_to_type(impl_id);
if let Some(free_substs) = self.get_free_substs() {
ty.subst(tcx, free_substs)
tcx.prohibit_type_params(base_segments);
tcx.mk_self_type()
}
- Def::AssociatedTy(trait_did, _) => {
+ Def::AssociatedTy(def_id) => {
tcx.prohibit_type_params(&base_segments[..base_segments.len()-2]);
+ let trait_did = tcx.parent_def_id(def_id).unwrap();
self.qpath_to_ty(rscope,
span,
param_mode,
// what the type of the binding `x` ought to be
match tcx.expect_def(pat.id) {
Def::Err => {}
- Def::Local(_, var_id) => {
+ Def::Local(def_id) => {
+ let var_id = tcx.map.as_local_node_id(def_id).unwrap();
if var_id != pat.id {
let vt = self.local_ty(pat.span, var_id);
self.demand_eqtype(pat.span, vt, typ);
for &Spanned { node: ref field, span } in fields {
let field_ty = match used_fields.entry(field.name) {
Occupied(occupied) => {
- let mut err = struct_span_err!(tcx.sess, span, E0025,
- "field `{}` bound multiple times \
- in the pattern",
- field.name);
- span_note!(&mut err, *occupied.get(),
- "field `{}` previously bound here",
- field.name);
- err.emit();
+ struct_span_err!(tcx.sess, span, E0025,
+ "field `{}` bound multiple times \
+ in the pattern",
+ field.name)
+ .span_label(span,
+ &format!("multiple uses of `{}` in pattern", field.name))
+ .span_label(*occupied.get(), &format!("first use of `{}`", field.name))
+ .emit();
tcx.types.err
}
Vacant(vacant) => {
TupleArgumentsFlag};
use CrateCtxt;
-use middle::cstore::LOCAL_CRATE;
use hir::def::Def;
-use hir::def_id::DefId;
+use hir::def_id::{DefId, LOCAL_CRATE};
use rustc::{infer, traits};
use rustc::ty::{self, LvaluePreference, Ty};
use syntax::parse::token;
_ => bug!("{:?} is not a method", impl_m)
};
- struct_span_err!(tcx.sess, span, E0049,
+ let mut err = struct_span_err!(tcx.sess, span, E0049,
"method `{}` has {} type parameter{} \
but its trait declaration has {} type parameter{}",
trait_m.name,
num_impl_m_type_params,
if num_impl_m_type_params == 1 {""} else {"s"},
num_trait_m_type_params,
- if num_trait_m_type_params == 1 {""} else {"s"})
- .span_label(trait_item_span.unwrap(),
- &format!("expected {}",
- &if num_trait_m_type_params != 1 {
- format!("{} type parameters",
- num_trait_m_type_params)
- } else {
- format!("{} type parameter",
- num_trait_m_type_params)
- }))
- .span_label(span, &format!("found {}",
- &if num_impl_m_type_params != 1 {
- format!("{} type parameters", num_impl_m_type_params)
- } else {
- format!("1 type parameter")
- }))
- .emit();
+ if num_trait_m_type_params == 1 {""} else {"s"});
+
+ let mut suffix = None;
+
+ if let Some(span) = trait_item_span {
+ err.span_label(span,
+ &format!("expected {}",
+ &if num_trait_m_type_params != 1 {
+ format!("{} type parameters", num_trait_m_type_params)
+ } else {
+ format!("{} type parameter", num_trait_m_type_params)
+ }));
+ } else {
+ suffix = Some(format!(", expected {}", num_trait_m_type_params));
+ }
+
+ err.span_label(span,
+ &format!("found {}{}",
+ &if num_impl_m_type_params != 1 {
+ format!("{} type parameters", num_impl_m_type_params)
+ } else {
+ format!("1 type parameter")
+ },
+ suffix.as_ref().map(|s| &s[..]).unwrap_or("")));
+
+ err.emit();
+
return;
}
if impl_m.fty.sig.0.inputs.len() != trait_m.fty.sig.0.inputs.len() {
- span_err!(tcx.sess, impl_m_span, E0050,
+ let trait_number_args = trait_m.fty.sig.0.inputs.len();
+ let impl_number_args = impl_m.fty.sig.0.inputs.len();
+ let trait_m_node_id = tcx.map.as_local_node_id(trait_m.def_id);
+ let trait_span = if let Some(trait_id) = trait_m_node_id {
+ match tcx.map.expect_trait_item(trait_id).node {
+ TraitItem_::MethodTraitItem(ref trait_m_sig, _) => {
+ if let Some(arg) = trait_m_sig.decl.inputs.get(
+ if trait_number_args > 0 {
+ trait_number_args - 1
+ } else {
+ 0
+ }) {
+ Some(arg.pat.span)
+ } else {
+ trait_item_span
+ }
+ }
+ _ => bug!("{:?} is not a method", impl_m)
+ }
+ } else {
+ trait_item_span
+ };
+ let impl_m_node_id = tcx.map.as_local_node_id(impl_m.def_id).unwrap();
+ let impl_span = match tcx.map.expect_impl_item(impl_m_node_id).node {
+ ImplItemKind::Method(ref impl_m_sig, _) => {
+ if let Some(arg) = impl_m_sig.decl.inputs.get(
+ if impl_number_args > 0 {
+ impl_number_args - 1
+ } else {
+ 0
+ }) {
+ arg.pat.span
+ } else {
+ impl_m_span
+ }
+ }
+ _ => bug!("{:?} is not a method", impl_m)
+ };
+ let mut err = struct_span_err!(tcx.sess, impl_span, E0050,
"method `{}` has {} parameter{} \
but the declaration in trait `{}` has {}",
trait_m.name,
- impl_m.fty.sig.0.inputs.len(),
- if impl_m.fty.sig.0.inputs.len() == 1 {""} else {"s"},
+ impl_number_args,
+ if impl_number_args == 1 {""} else {"s"},
tcx.item_path_str(trait_m.def_id),
- trait_m.fty.sig.0.inputs.len());
+ trait_number_args);
+ if let Some(trait_span) = trait_span {
+ err.span_label(trait_span,
+ &format!("trait requires {}",
+ &if trait_number_args != 1 {
+ format!("{} parameters", trait_number_args)
+ } else {
+ format!("{} parameter", trait_number_args)
+ }));
+ }
+ err.span_label(impl_span,
+ &format!("expected {}, found {}",
+ &if trait_number_args != 1 {
+ format!("{} parameters", trait_number_args)
+ } else {
+ format!("{} parameter", trait_number_args)
+ },
+ impl_number_args));
+ err.emit();
return;
}
assoc::normalize_associated_types_in(&infcx,
&mut fulfillment_cx,
impl_c_span,
- 0,
+ ast::CRATE_NODE_ID,
&impl_ty);
debug!("compare_const_impl: impl_ty={:?}",
assoc::normalize_associated_types_in(&infcx,
&mut fulfillment_cx,
impl_c_span,
- 0,
+ ast::CRATE_NODE_ID,
&trait_ty);
debug!("compare_const_impl: trait_ty={:?}",
span_err!(tcx.sess, it.span, E0444,
"platform-specific intrinsic has invalid number of \
arguments: found {}, expected {}",
- intr.inputs.len(), sig.inputs.len());
+ sig.inputs.len(), intr.inputs.len());
return
}
let input_pairs = intr.inputs.iter().zip(&sig.inputs);
// Trait must have a method named `m_name` and it should not have
// type parameters or early-bound regions.
let tcx = self.tcx;
- let method_item = self.trait_item(trait_def_id, m_name).unwrap();
+ let method_item = self.impl_or_trait_item(trait_def_id, m_name).unwrap();
let method_ty = method_item.as_opt_method().unwrap();
assert_eq!(method_ty.generics.types.len(), 0);
assert_eq!(method_ty.generics.regions.len(), 0);
Ok(def)
}
- /// Find item with name `item_name` defined in `trait_def_id`
- /// and return it, or `None`, if no such item.
- pub fn trait_item(&self,
- trait_def_id: DefId,
- item_name: ast::Name)
- -> Option<ty::ImplOrTraitItem<'tcx>>
+ /// Find item with name `item_name` defined in impl/trait `def_id`
+ /// and return it, or `None`, if no such item was defined there.
+ pub fn impl_or_trait_item(&self,
+ def_id: DefId,
+ item_name: ast::Name)
+ -> Option<ty::ImplOrTraitItem<'tcx>>
{
- let trait_items = self.tcx.trait_items(trait_def_id);
- trait_items.iter()
- .find(|item| item.name() == item_name)
- .cloned()
- }
-
- pub fn impl_item(&self,
- impl_def_id: DefId,
- item_name: ast::Name)
- -> Option<ty::ImplOrTraitItem<'tcx>>
- {
- let impl_items = self.tcx.impl_items.borrow();
- let impl_items = impl_items.get(&impl_def_id).unwrap();
- impl_items
+ self.tcx.impl_or_trait_items(def_id)
.iter()
- .map(|&did| self.tcx.impl_or_trait_item(did.def_id()))
+ .map(|&did| self.tcx.impl_or_trait_item(did))
.find(|m| m.name() == item_name)
}
}
debug!("assemble_inherent_impl_probe {:?}", impl_def_id);
- let item = match self.impl_item(impl_def_id) {
+ let item = match self.impl_or_trait_item(impl_def_id) {
Some(m) => m,
None => { return; } // No method with correct name on this impl
};
let tcx = self.tcx;
for bound_trait_ref in traits::transitive_bounds(tcx, bounds) {
- let item = match self.trait_item(bound_trait_ref.def_id()) {
+ let item = match self.impl_or_trait_item(bound_trait_ref.def_id()) {
Some(v) => v,
None => { continue; }
};
self.tcx.erase_late_bound_regions(value)
}
- fn impl_item(&self, impl_def_id: DefId)
- -> Option<ty::ImplOrTraitItem<'tcx>>
+ /// Find item with name `item_name` defined in impl/trait `def_id`
+ /// and return it, or `None`, if no such item was defined there.
+ fn impl_or_trait_item(&self, def_id: DefId)
+ -> Option<ty::ImplOrTraitItem<'tcx>>
{
- self.fcx.impl_item(impl_def_id, self.item_name)
- }
-
- /// Find item with name `item_name` defined in `trait_def_id`
- /// and return it, or `None`, if no such item.
- fn trait_item(&self, trait_def_id: DefId)
- -> Option<ty::ImplOrTraitItem<'tcx>>
- {
- self.fcx.trait_item(trait_def_id, self.item_name)
+ self.fcx.impl_or_trait_item(def_id, self.item_name)
}
}
use check::{FnCtxt};
use rustc::hir::map as hir_map;
use rustc::ty::{self, Ty, ToPolyTraitRef, ToPredicate, TypeFoldable};
-use middle::cstore;
use hir::def::Def;
-use hir::def_id::DefId;
+use hir::def_id::{CRATE_DEF_INDEX, DefId};
use middle::lang_items::FnOnceTraitLangItem;
use rustc::ty::subst::Substs;
use rustc::traits::{Obligation, SelectionContext};
CandidateSource::ImplSource(impl_did) => {
// Provide the best span we can. Use the item, if local to crate, else
// the impl, if local to crate (item may be defaulted), else nothing.
- let item = self.impl_item(impl_did, item_name)
+ let item = self.impl_or_trait_item(impl_did, item_name)
.or_else(|| {
- self.trait_item(
+ self.impl_or_trait_item(
self.tcx.impl_trait_ref(impl_did).unwrap().def_id,
item_name
}
}
CandidateSource::TraitSource(trait_did) => {
- let item = self.trait_item(trait_did, item_name).unwrap();
+ let item = self.impl_or_trait_item(trait_did, item_name).unwrap();
let item_span = self.tcx.map.def_id_span(item.def_id(), span);
span_note!(err, item_span,
"candidate #{} is defined in the trait `{}`",
// implementing a trait would be legal but is rejected
// here).
(type_is_local || info.def_id.is_local())
- && self.trait_item(info.def_id, item_name).is_some()
+ && self.impl_or_trait_item(info.def_id, item_name).is_some()
})
.collect::<Vec<_>>();
// Cross-crate:
let mut external_mods = FnvHashSet();
- fn handle_external_def(traits: &mut AllTraitsVec,
+ fn handle_external_def(ccx: &CrateCtxt,
+ traits: &mut AllTraitsVec,
external_mods: &mut FnvHashSet<DefId>,
- ccx: &CrateCtxt,
- cstore: &for<'a> cstore::CrateStore<'a>,
- dl: cstore::DefLike) {
- match dl {
- cstore::DlDef(Def::Trait(did)) => {
- traits.push(TraitInfo::new(did));
+ def_id: DefId) {
+ match ccx.tcx.sess.cstore.describe_def(def_id) {
+ Some(Def::Trait(_)) => {
+ traits.push(TraitInfo::new(def_id));
}
- cstore::DlDef(Def::Mod(did)) => {
- if !external_mods.insert(did) {
+ Some(Def::Mod(_)) => {
+ if !external_mods.insert(def_id) {
return;
}
- for child in cstore.item_children(did) {
- handle_external_def(traits, external_mods,
- ccx, cstore, child.def)
+ for child in ccx.tcx.sess.cstore.item_children(def_id) {
+ handle_external_def(ccx, traits, external_mods, child.def_id)
}
}
_ => {}
}
}
- let cstore = &*ccx.tcx.sess.cstore;
-
for cnum in ccx.tcx.sess.cstore.crates() {
- for child in cstore.crate_top_level_items(cnum) {
- handle_external_def(&mut traits, &mut external_mods,
- ccx, cstore, child.def)
- }
+ handle_external_def(ccx, &mut traits, &mut external_mods, DefId {
+ krate: cnum,
+ index: CRATE_DEF_INDEX
+ });
}
*ccx.all_traits.borrow_mut() = Some(traits);
use astconv::{AstConv, ast_region_to_region, PathParamMode};
use dep_graph::DepNode;
use fmt_macros::{Parser, Piece, Position};
-use middle::cstore::LOCAL_CRATE;
use hir::def::{Def, PathResolution};
-use hir::def_id::DefId;
+use hir::def_id::{DefId, LOCAL_CRATE};
use hir::pat_util;
use rustc::infer::{self, InferCtxt, InferOk, TypeOrigin, TypeTrace, type_variable};
use rustc::ty::subst::{Subst, Substs};
let mut err = struct_span_err!(
tcx.sess, impl_item.span, E0520,
"`{}` specializes an item from a parent `impl`, but \
- neither that item nor the `impl` are marked `default`",
+ that item is not marked `default`",
impl_item.name);
err.span_label(impl_item.span, &format!("cannot specialize default item `{}`",
impl_item.name));
match tcx.span_of_impl(parent_impl) {
Ok(span) => {
err.span_label(span, &"parent `impl` is here");
- err.note(&format!("to specialize, either the parent `impl` or `{}` \
- in the parent `impl` must be marked `default`",
+ err.note(&format!("to specialize, `{}` in the parent `impl` must be marked `default`",
impl_item.name));
}
Err(cname) => {
assoc_name: ast::Name)
-> bool
{
- let trait_def = self.tcx().lookup_trait_def(trait_def_id);
- trait_def.associated_type_names.contains(&assoc_name)
+ self.tcx().impl_or_trait_items(trait_def_id).iter().any(|&def_id| {
+ match self.tcx().impl_or_trait_item(def_id) {
+ ty::TypeTraitItem(ref item) => item.name == assoc_name,
+ _ => false
+ }
+ })
}
fn ty_infer(&self, _span: Span) -> Ty<'tcx> {
writeback_errors: Cell::new(false),
err_count_on_creation: inh.tcx.sess.err_count(),
ret_ty: rty,
- ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)),
+ ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal,
+ ast::CRATE_NODE_ID)),
inh: inh,
}
}
match self.locals.borrow().get(&nid) {
Some(&t) => t,
None => {
- span_err!(self.tcx.sess, span, E0513,
- "no type for local variable {}",
- nid);
+ struct_span_err!(self.tcx.sess, span, E0513,
+ "no type for local variable {}",
+ self.tcx.map.node_to_string(nid))
+ .span_label(span, &"no type for variable")
+ .emit();
self.tcx.types.err
}
}
.unwrap_or(type_variable::Default {
ty: self.next_ty_var(),
origin_span: syntax_pos::DUMMY_SP,
- def_id: self.tcx.map.local_def_id(0) // what do I put here?
+ // what do I put here?
+ def_id: self.tcx.map.local_def_id(ast::CRATE_NODE_ID)
});
// This is to ensure that we elimnate any non-determinism from the error
if let Some(field_name) = Self::suggest_field_name(variant,
&field.name,
skip_fields.collect()) {
- err.span_label(field.name.span,&format!("did you mean `{}`?",field_name));
+ err.span_label(field.name.span,
+ &format!("field does not exist - did you mean `{}`?", field_name));
+ } else {
+ match ty.sty {
+ ty::TyAdt(adt, ..) if adt.is_enum() => {
+ err.span_label(field.name.span, &format!("`{}::{}` does not have this field",
+ ty, variant.name.as_str()));
+ }
+ _ => {
+ err.span_label(field.name.span, &format!("`{}` does not have this field", ty));
+ }
+ }
};
err.emit();
}
self.set_tainted_by_errors();
return None;
}
- Def::Variant(type_did, _) | Def::Struct(type_did) | Def::Union(type_did) => {
+ Def::Variant(did) => {
+ let type_did = self.tcx.parent_def_id(did).unwrap();
+ Some((type_did, self.tcx.expect_variant_def(def)))
+ }
+ Def::Struct(type_did) | Def::Union(type_did) => {
Some((type_did, self.tcx.expect_variant_def(def)))
}
Def::TyAlias(did) => {
if let Some((def_id, variant)) = variant {
if variant.kind == ty::VariantKind::Tuple &&
!self.tcx.sess.features.borrow().relaxed_adts {
- emit_feature_err(&self.tcx.sess.parse_sess.span_diagnostic,
+ emit_feature_err(&self.tcx.sess.parse_sess,
"relaxed_adts", span, GateIssue::Language,
"tuple structs and variants in struct patterns are unstable");
}
// Case 1 and 1b. Reference to a *type* or *enum variant*.
Def::Struct(def_id) |
Def::Union(def_id) |
- Def::Variant(_, def_id) |
+ Def::Variant(def_id) |
Def::Enum(def_id) |
Def::TyAlias(def_id) |
- Def::AssociatedTy(_, def_id) |
+ Def::AssociatedTy(def_id) |
Def::Trait(def_id) => {
// Everything but the final segment should have no
// parameters at all.
// here. If they do, an error will have been reported
// elsewhere. (I hope)
Def::Mod(..) |
- Def::ForeignMod(..) |
Def::PrimTy(..) |
Def::SelfTy(..) |
Def::TyParam(..) |
self.tcx.prohibit_type_params(&segments[..segments.len() - poly_segments]);
match def {
- Def::Local(_, nid) | Def::Upvar(_, nid, ..) => {
+ Def::Local(def_id) | Def::Upvar(def_id, ..) => {
+ let nid = self.tcx.map.as_local_node_id(def_id).unwrap();
let ty = self.local_ty(span, nid);
let ty = self.normalize_associated_types_in(span, &ty);
self.write_ty(node_id, ty);
self.fcx.tcx.with_freevars(expr.id, |freevars| {
for freevar in freevars {
- let var_node_id = freevar.def.var_id();
+ let def_id = freevar.def.def_id();
+ let var_node_id = self.fcx.tcx.map.as_local_node_id(def_id).unwrap();
let upvar_id = ty::UpvarId { var_id: var_node_id,
closure_expr_id: expr.id };
debug!("seed upvar_id {:?}", upvar_id);
// implemented.
let tcx = self.fcx.tcx;
tcx.with_freevars(closure_id, |freevars| {
- freevars.iter()
- .map(|freevar| {
- let freevar_node_id = freevar.def.var_id();
- let freevar_ty = self.fcx.node_ty(freevar_node_id);
- let upvar_id = ty::UpvarId {
- var_id: freevar_node_id,
- closure_expr_id: closure_id
- };
- let capture = self.fcx.upvar_capture(upvar_id).unwrap();
-
- debug!("freevar_node_id={:?} freevar_ty={:?} capture={:?}",
- freevar_node_id, freevar_ty, capture);
-
- match capture {
- ty::UpvarCapture::ByValue => freevar_ty,
- ty::UpvarCapture::ByRef(borrow) =>
- tcx.mk_ref(borrow.region,
- ty::TypeAndMut {
- ty: freevar_ty,
- mutbl: borrow.kind.to_mutbl_lossy(),
- }),
- }
- })
- .collect()
- })
+ freevars.iter().map(|freevar| {
+ let def_id = freevar.def.def_id();
+ let var_id = tcx.map.as_local_node_id(def_id).unwrap();
+ let freevar_ty = self.fcx.node_ty(var_id);
+ let upvar_id = ty::UpvarId {
+ var_id: var_id,
+ closure_expr_id: closure_id
+ };
+ let capture = self.fcx.upvar_capture(upvar_id).unwrap();
+
+ debug!("var_id={:?} freevar_ty={:?} capture={:?}",
+ var_id, freevar_ty, capture);
+
+ match capture {
+ ty::UpvarCapture::ByValue => freevar_ty,
+ ty::UpvarCapture::ByRef(borrow) =>
+ tcx.mk_ref(borrow.region,
+ ty::TypeAndMut {
+ ty: freevar_ty,
+ mutbl: borrow.kind.to_mutbl_lossy(),
+ }),
+ }
+ }).collect()
+ })
}
fn adjust_upvar_borrow_kind_for_consume(&mut self,
use rustc::ty::subst::Subst;
use rustc::ty::{self, TyCtxt, TypeFoldable};
use rustc::traits::{self, Reveal};
-use rustc::ty::{ImplOrTraitItemId, ConstTraitItemId};
-use rustc::ty::{MethodTraitItemId, TypeTraitItemId, ParameterEnvironment};
+use rustc::ty::ParameterEnvironment;
use rustc::ty::{Ty, TyBool, TyChar, TyError};
use rustc::ty::{TyParam, TyRawPtr};
use rustc::ty::{TyRef, TyAdt, TyTrait, TyNever, TyTuple};
use rustc::hir::{Item, ItemImpl};
use rustc::hir;
+use std::rc::Rc;
+
mod orphan;
mod overlap;
mod unsafety;
-struct CoherenceChecker<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+struct CoherenceChecker<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
crate_context: &'a CrateCtxt<'a, 'gcx>,
inference_context: InferCtxt<'a, 'gcx, 'tcx>,
}
-struct CoherenceCheckVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- cc: &'a CoherenceChecker<'a, 'gcx, 'tcx>
+struct CoherenceCheckVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
+ cc: &'a CoherenceChecker<'a, 'gcx, 'tcx>,
}
impl<'a, 'gcx, 'tcx, 'v> intravisit::Visitor<'v> for CoherenceCheckVisitor<'a, 'gcx, 'tcx> {
}
impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> {
-
// Returns the def ID of the base type, if there is one.
fn get_base_type_def_id(&self, span: Span, ty: Ty<'tcx>) -> Option<DefId> {
match ty.sty {
- TyAdt(def, _) => {
- Some(def.did)
- }
+ TyAdt(def, _) => Some(def.did),
- TyTrait(ref t) => {
- Some(t.principal.def_id())
- }
+ TyTrait(ref t) => Some(t.principal.def_id()),
- TyBox(_) => {
- self.inference_context.tcx.lang_items.owned_box()
- }
+ TyBox(_) => self.inference_context.tcx.lang_items.owned_box(),
- TyBool | TyChar | TyInt(..) | TyUint(..) | TyFloat(..) |
- TyStr | TyArray(..) | TySlice(..) | TyFnDef(..) | TyFnPtr(_) |
- TyTuple(..) | TyParam(..) | TyError | TyNever |
- TyRawPtr(_) | TyRef(..) | TyProjection(..) => {
- None
- }
+ TyBool | TyChar | TyInt(..) | TyUint(..) | TyFloat(..) | TyStr | TyArray(..) |
+ TySlice(..) | TyFnDef(..) | TyFnPtr(_) | TyTuple(..) | TyParam(..) | TyError |
+ TyNever | TyRawPtr(_) | TyRef(..) | TyProjection(..) => None,
TyInfer(..) | TyClosure(..) | TyAnon(..) => {
// `ty` comes from a user declaration so we should only expect types
// that the user can type
- span_bug!(
- span,
- "coherence encountered unexpected type searching for base type: {}",
- ty);
+ span_bug!(span,
+ "coherence encountered unexpected type searching for base type: {}",
+ ty);
}
}
}
// Check implementations and traits. This populates the tables
// containing the inherent methods and extension methods. It also
// builds up the trait inheritance table.
- self.crate_context.tcx.visit_all_items_in_krate(
- DepNode::CoherenceCheckImpl,
- &mut CoherenceCheckVisitor { cc: self });
+ self.crate_context.tcx.visit_all_items_in_krate(DepNode::CoherenceCheckImpl,
+ &mut CoherenceCheckVisitor { cc: self });
// Populate the table of destructors. It might seem a bit strange to
// do this here, but it's actually the most convenient place, since
}
}
- tcx.impl_items.borrow_mut().insert(impl_did, impl_items);
+ tcx.impl_or_trait_item_def_ids.borrow_mut().insert(impl_did, Rc::new(impl_items));
}
fn add_inherent_impl(&self, base_def_id: DefId, impl_def_id: DefId) {
fn add_trait_impl(&self, impl_trait_ref: ty::TraitRef<'gcx>, impl_def_id: DefId) {
debug!("add_trait_impl: impl_trait_ref={:?} impl_def_id={:?}",
- impl_trait_ref, impl_def_id);
+ impl_trait_ref,
+ impl_def_id);
let trait_def = self.crate_context.tcx.lookup_trait_def(impl_trait_ref.def_id);
trait_def.record_local_impl(self.crate_context.tcx, impl_def_id, impl_trait_ref);
}
// Converts an implementation in the AST to a vector of items.
- fn create_impl_from_item(&self, item: &Item) -> Vec<ImplOrTraitItemId> {
+ fn create_impl_from_item(&self, item: &Item) -> Vec<DefId> {
match item.node {
ItemImpl(.., ref impl_items) => {
- impl_items.iter().map(|impl_item| {
- let impl_def_id = self.crate_context.tcx.map.local_def_id(impl_item.id);
- match impl_item.node {
- hir::ImplItemKind::Const(..) => {
- ConstTraitItemId(impl_def_id)
- }
- hir::ImplItemKind::Method(..) => {
- MethodTraitItemId(impl_def_id)
- }
- hir::ImplItemKind::Type(_) => {
- TypeTraitItemId(impl_def_id)
- }
- }
- }).collect()
+ impl_items.iter()
+ .map(|impl_item| self.crate_context.tcx.map.local_def_id(impl_item.id))
+ .collect()
}
_ => {
span_bug!(item.span, "can't convert a non-impl to an impl");
}
}
- //
// Destructors
//
fn populate_destructors(&self) {
let tcx = self.crate_context.tcx;
let drop_trait = match tcx.lang_items.drop_trait() {
- Some(id) => id, None => { return }
+ Some(id) => id,
+ None => return,
};
tcx.populate_implementations_for_trait_if_necessary(drop_trait);
let drop_trait = tcx.lookup_trait_def(drop_trait);
- let impl_items = tcx.impl_items.borrow();
+ let impl_items = tcx.impl_or_trait_item_def_ids.borrow();
drop_trait.for_each_impl(tcx, |impl_did| {
let items = impl_items.get(&impl_did).unwrap();
let self_type = tcx.lookup_item_type(impl_did);
match self_type.ty.sty {
ty::TyAdt(type_def, _) => {
- type_def.set_destructor(method_def_id.def_id());
+ type_def.set_destructor(method_def_id);
}
_ => {
// Destructors only work on nominal types.
match tcx.map.find(impl_node_id) {
Some(hir_map::NodeItem(item)) => {
let span = match item.node {
- ItemImpl(.., ref ty, _) => {
- ty.span
- },
- _ => item.span
+ ItemImpl(.., ref ty, _) => ty.span,
+ _ => item.span,
};
- struct_span_err!(tcx.sess, span, E0120,
- "the Drop trait may only be implemented on structures")
+ struct_span_err!(tcx.sess,
+ span,
+ E0120,
+ "the Drop trait may only be implemented on \
+ structures")
.span_label(span,
&format!("implementing Drop requires a struct"))
.emit();
let copy_trait = tcx.lookup_trait_def(copy_trait);
copy_trait.for_each_impl(tcx, |impl_did| {
- debug!("check_implementations_of_copy: impl_did={:?}",
- impl_did);
+ debug!("check_implementations_of_copy: impl_did={:?}", impl_did);
let impl_node_id = if let Some(n) = tcx.map.as_local_node_id(impl_did) {
n
} else {
debug!("check_implementations_of_copy(): impl not in this \
crate");
- return
+ return;
};
let self_type = tcx.lookup_item_type(impl_did);
match param_env.can_type_implement_copy(tcx, self_type, span) {
Ok(()) => {}
Err(CopyImplementationError::InfrigingField(name)) => {
- struct_span_err!(tcx.sess, span, E0204,
- "the trait `Copy` may not be implemented for \
- this type")
- .span_label(span, &format!(
- "field `{}` does not implement `Copy`", name)
- )
- .emit()
-
+ struct_span_err!(tcx.sess,
+ span,
+ E0204,
+ "the trait `Copy` may not be implemented for this type")
+ .span_label(span, &format!("field `{}` does not implement `Copy`", name))
+ .emit()
}
Err(CopyImplementationError::InfrigingVariant(name)) => {
let item = tcx.map.expect_item(impl_node_id);
span
};
- struct_span_err!(tcx.sess, span, E0205,
+ struct_span_err!(tcx.sess,
+ span,
+ E0205,
"the trait `Copy` may not be implemented for this type")
- .span_label(span, &format!("variant `{}` does not implement `Copy`",
- name))
+ .span_label(span,
+ &format!("variant `{}` does not implement `Copy`", name))
.emit()
}
Err(CopyImplementationError::NotAnAdt) => {
span
};
- struct_span_err!(tcx.sess, span, E0206,
+ struct_span_err!(tcx.sess,
+ span,
+ E0206,
"the trait `Copy` may not be implemented for this type")
.span_label(span, &format!("type is not a structure or enumeration"))
.emit();
}
Err(CopyImplementationError::HasDestructor) => {
- struct_span_err!(tcx.sess, span, E0184,
- "the trait `Copy` may not be implemented for this type; \
- the type has a destructor")
+ struct_span_err!(tcx.sess,
+ span,
+ E0184,
+ "the trait `Copy` may not be implemented for this type; the \
+ type has a destructor")
.span_label(span, &format!("Copy not allowed on types with destructors"))
.emit();
}
let trait_ref = self.crate_context.tcx.impl_trait_ref(impl_did).unwrap();
let target = trait_ref.substs.type_at(1);
debug!("check_implementations_of_coerce_unsized: {:?} -> {:?} (bound)",
- source, target);
+ source,
+ target);
let span = tcx.map.span(impl_node_id);
let param_env = ParameterEnvironment::for_item(tcx, impl_node_id);
assert!(!source.has_escaping_regions());
debug!("check_implementations_of_coerce_unsized: {:?} -> {:?} (free)",
- source, target);
+ source,
+ target);
tcx.infer_ctxt(None, Some(param_env), Reveal::ExactMatch).enter(|infcx| {
let origin = TypeOrigin::Misc(span);
- let check_mutbl = |mt_a: ty::TypeAndMut<'gcx>, mt_b: ty::TypeAndMut<'gcx>,
+ let check_mutbl = |mt_a: ty::TypeAndMut<'gcx>,
+ mt_b: ty::TypeAndMut<'gcx>,
mk_ptr: &Fn(Ty<'gcx>) -> Ty<'gcx>| {
if (mt_a.mutbl, mt_b.mutbl) == (hir::MutImmutable, hir::MutMutable) {
- infcx.report_mismatched_types(origin, mk_ptr(mt_b.ty),
- target, ty::error::TypeError::Mutability);
+ infcx.report_mismatched_types(origin,
+ mk_ptr(mt_b.ty),
+ target,
+ ty::error::TypeError::Mutability);
}
(mt_a.ty, mt_b.ty, unsize_trait, None)
};
}
(&ty::TyAdt(def_a, substs_a), &ty::TyAdt(def_b, substs_b))
- if def_a.is_struct() && def_b.is_struct() => {
+ if def_a.is_struct() && def_b.is_struct() => {
if def_a != def_b {
let source_path = tcx.item_path_str(def_a.did);
let target_path = tcx.item_path_str(def_b.did);
- span_err!(tcx.sess, span, E0377,
+ span_err!(tcx.sess,
+ span,
+ E0377,
"the trait `CoerceUnsized` may only be implemented \
for a coercion between structures with the same \
definition; expected {}, found {}",
- source_path, target_path);
+ source_path,
+ target_path);
return;
}
let fields = &def_a.struct_variant().fields;
- let diff_fields = fields.iter().enumerate().filter_map(|(i, f)| {
- let (a, b) = (f.ty(tcx, substs_a), f.ty(tcx, substs_b));
-
- if f.unsubst_ty().is_phantom_data() {
- // Ignore PhantomData fields
- None
- } else if infcx.sub_types(false, origin, b, a).is_ok() {
- // Ignore fields that aren't significantly changed
- None
- } else {
- // Collect up all fields that were significantly changed
- // i.e. those that contain T in coerce_unsized T -> U
- Some((i, a, b))
- }
- }).collect::<Vec<_>>();
+ let diff_fields = fields.iter()
+ .enumerate()
+ .filter_map(|(i, f)| {
+ let (a, b) = (f.ty(tcx, substs_a), f.ty(tcx, substs_b));
+
+ if f.unsubst_ty().is_phantom_data() {
+ // Ignore PhantomData fields
+ None
+ } else if infcx.sub_types(false, origin, b, a).is_ok() {
+ // Ignore fields that aren't significantly changed
+ None
+ } else {
+ // Collect up all fields that were significantly changed
+ // i.e. those that contain T in coerce_unsized T -> U
+ Some((i, a, b))
+ }
+ })
+ .collect::<Vec<_>>();
if diff_fields.is_empty() {
- span_err!(tcx.sess, span, E0374,
+ span_err!(tcx.sess,
+ span,
+ E0374,
"the trait `CoerceUnsized` may only be implemented \
for a coercion between structures with one field \
being coerced, none found");
tcx.map.span(impl_node_id)
};
- let mut err = struct_span_err!(tcx.sess, span, E0375,
- "implementing the trait `CoerceUnsized` \
- requires multiple coercions");
+ let mut err = struct_span_err!(tcx.sess,
+ span,
+ E0375,
+ "implementing the trait \
+ `CoerceUnsized` requires multiple \
+ coercions");
err.note("`CoerceUnsized` may only be implemented for \
a coercion between structures with one field being coerced");
err.note(&format!("currently, {} fields need coercions: {}",
- diff_fields.len(),
- diff_fields.iter().map(|&(i, a, b)| {
- format!("{} ({} to {})", fields[i].name, a, b)
- }).collect::<Vec<_>>().join(", ") ));
+ diff_fields.len(),
+ diff_fields.iter()
+ .map(|&(i, a, b)| {
+ format!("{} ({} to {})", fields[i].name, a, b)
+ })
+ .collect::<Vec<_>>()
+ .join(", ")));
err.span_label(span, &format!("requires multiple coercions"));
err.emit();
return;
}
_ => {
- span_err!(tcx.sess, span, E0376,
+ span_err!(tcx.sess,
+ span,
+ E0376,
"the trait `CoerceUnsized` may only be implemented \
for a coercion between structures");
return;
// Register an obligation for `A: Trait<B>`.
let cause = traits::ObligationCause::misc(span, impl_node_id);
- let predicate = tcx.predicate_for_trait_def(cause, trait_def_id, 0,
- source, &[target]);
+ let predicate =
+ tcx.predicate_for_trait_def(cause, trait_def_id, 0, source, &[target]);
fulfill_cx.register_predicate_obligation(&infcx, predicate);
// Check that all transitive obligations are satisfied.
// Finally, resolve all regions.
let mut free_regions = FreeRegionMap::new();
- free_regions.relate_free_regions_from_predicates(
- &infcx.parameter_environment.caller_bounds);
+ free_regions.relate_free_regions_from_predicates(&infcx.parameter_environment
+ .caller_bounds);
infcx.resolve_regions_and_report_errors(&free_regions, impl_node_id);
if let Some(kind) = kind {
fn enforce_trait_manually_implementable(tcx: TyCtxt, sp: Span, trait_def_id: DefId) {
if tcx.sess.features.borrow().unboxed_closures {
// the feature gate allows all of them
- return
+ return;
}
let did = Some(trait_def_id);
let li = &tcx.lang_items;
} else if did == li.fn_once_trait() {
"FnOnce"
} else {
- return // everything OK
+ return; // everything OK
};
let mut err = struct_span_err!(tcx.sess,
sp,
E0183,
"manual implementations of `{}` are experimental",
trait_name);
- help!(&mut err, "add `#![feature(unboxed_closures)]` to the crate attributes to enable");
+ help!(&mut err,
+ "add `#![feature(unboxed_closures)]` to the crate attributes to enable");
err.emit();
}
let _task = ccx.tcx.dep_graph.in_task(DepNode::Coherence);
ccx.tcx.infer_ctxt(None, None, Reveal::ExactMatch).enter(|infcx| {
CoherenceChecker {
- crate_context: ccx,
- inference_context: infcx,
- }.check();
+ crate_context: ccx,
+ inference_context: infcx,
+ }
+ .check();
});
unsafety::check(ccx.tcx);
orphan::check(ccx.tcx);
//! Orphan checker: every impl either implements a trait defined in this
//! crate or pertains to a type defined in this crate.
-use middle::cstore::LOCAL_CRATE;
-use hir::def_id::DefId;
+use hir::def_id::{DefId, LOCAL_CRATE};
use rustc::traits;
use rustc::ty::{self, TyCtxt};
use syntax::ast;
tcx.visit_all_items_in_krate(DepNode::CoherenceOrphanCheck, &mut orphan);
}
-struct OrphanChecker<'cx, 'tcx:'cx> {
- tcx: TyCtxt<'cx, 'tcx, 'tcx>
+struct OrphanChecker<'cx, 'tcx: 'cx> {
+ tcx: TyCtxt<'cx, 'tcx, 'tcx>,
}
impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> {
fn check_def_id(&self, item: &hir::Item, def_id: DefId) {
if def_id.krate != LOCAL_CRATE {
- struct_span_err!(self.tcx.sess, item.span, E0116,
- "cannot define inherent `impl` for a type outside of the \
- crate where the type is defined")
- .span_label(item.span, &format!("impl for type defined outside of crate."))
+ struct_span_err!(self.tcx.sess,
+ item.span,
+ E0116,
+ "cannot define inherent `impl` for a type outside of the crate \
+ where the type is defined")
+ .span_label(item.span,
+ &format!("impl for type defined outside of crate."))
.note("define and implement a trait or new type instead")
.emit();
}
ty: &str,
span: Span) {
match lang_def_id {
- Some(lang_def_id) if lang_def_id == impl_def_id => { /* OK */ },
+ Some(lang_def_id) if lang_def_id == impl_def_id => {
+ // OK
+ }
_ => {
- struct_span_err!(self.tcx.sess, span, E0390,
- "only a single inherent implementation marked with `#[lang = \"{}\"]` \
- is allowed for the `{}` primitive", lang, ty)
+ struct_span_err!(self.tcx.sess,
+ span,
+ E0390,
+ "only a single inherent implementation marked with `#[lang = \
+ \"{}\"]` is allowed for the `{}` primitive",
+ lang,
+ ty)
.span_help(span, "consider using a trait to implement these methods")
.emit();
}
return;
}
_ => {
- struct_span_err!(self.tcx.sess, ty.span, E0118,
+ struct_span_err!(self.tcx.sess,
+ ty.span,
+ E0118,
"no base type found for inherent implementation")
- .span_label(ty.span, &format!("impl requires a base type"))
- .note(&format!("either implement a trait on it or create a newtype \
- to wrap it instead"))
- .emit();
+ .span_label(ty.span, &format!("impl requires a base type"))
+ .note(&format!("either implement a trait on it or create a newtype \
+ to wrap it instead"))
+ .emit();
return;
}
}
let trait_ref = self.tcx.impl_trait_ref(def_id).unwrap();
let trait_def_id = trait_ref.def_id;
match traits::orphan_check(self.tcx, def_id) {
- Ok(()) => { }
+ Ok(()) => {}
Err(traits::OrphanCheckErr::NoLocalInputType) => {
- struct_span_err!(
- self.tcx.sess, item.span, E0117,
- "only traits defined in the current crate can be \
- implemented for arbitrary types")
- .span_label(item.span, &format!("impl doesn't use types inside crate"))
- .note(&format!("the impl does not reference any \
- types defined in this crate"))
- .emit();
+ struct_span_err!(self.tcx.sess,
+ item.span,
+ E0117,
+ "only traits defined in the current crate can be \
+ implemented for arbitrary types")
+ .span_label(item.span, &format!("impl doesn't use types inside crate"))
+ .note(&format!("the impl does not reference any types defined in \
+ this crate"))
+ .emit();
return;
}
Err(traits::OrphanCheckErr::UncoveredTy(param_ty)) => {
- span_err!(self.tcx.sess, item.span, E0210,
+ span_err!(self.tcx.sess,
+ item.span,
+ E0210,
"type parameter `{}` must be used as the type parameter for \
some local type (e.g. `MyStruct<T>`); only traits defined in \
the current crate can be implemented for a type parameter",
trait_ref,
trait_def_id,
self.tcx.trait_has_default_impl(trait_def_id));
- if
- self.tcx.trait_has_default_impl(trait_def_id) &&
- trait_def_id.krate != LOCAL_CRATE
- {
+ if self.tcx.trait_has_default_impl(trait_def_id) &&
+ trait_def_id.krate != LOCAL_CRATE {
let self_ty = trait_ref.self_ty();
let opt_self_def_id = match self_ty.sty {
ty::TyAdt(self_def, _) => Some(self_def.did),
if self_def_id.is_local() {
None
} else {
- Some(format!(
- "cross-crate traits with a default impl, like `{}`, \
- can only be implemented for a struct/enum type \
- defined in the current crate",
- self.tcx.item_path_str(trait_def_id)))
+ Some(format!("cross-crate traits with a default impl, like `{}`, \
+ can only be implemented for a struct/enum type \
+ defined in the current crate",
+ self.tcx.item_path_str(trait_def_id)))
}
}
_ => {
- Some(format!(
- "cross-crate traits with a default impl, like `{}`, \
- can only be implemented for a struct/enum type, \
- not `{}`",
- self.tcx.item_path_str(trait_def_id),
- self_ty))
+ Some(format!("cross-crate traits with a default impl, like `{}`, can \
+ only be implemented for a struct/enum type, not `{}`",
+ self.tcx.item_path_str(trait_def_id),
+ self_ty))
}
};
// Disallow *all* explicit impls of `Sized` and `Unsize` for now.
if Some(trait_def_id) == self.tcx.lang_items.sized_trait() {
- struct_span_err!(self.tcx.sess, item.span, E0322,
- "explicit impls for the `Sized` trait are not permitted")
+ struct_span_err!(self.tcx.sess,
+ item.span,
+ E0322,
+ "explicit impls for the `Sized` trait are not permitted")
.span_label(item.span, &format!("impl of 'Sized' not allowed"))
.emit();
return;
}
if Some(trait_def_id) == self.tcx.lang_items.unsize_trait() {
- span_err!(self.tcx.sess, item.span, E0328,
+ span_err!(self.tcx.sess,
+ item.span,
+ E0328,
"explicit impls for the `Unsize` trait are not permitted");
return;
}
self.tcx.map.node_to_string(item.id));
let trait_ref = self.tcx.impl_trait_ref(def_id).unwrap();
if trait_ref.def_id.krate != LOCAL_CRATE {
- struct_span_err!(self.tcx.sess, item_trait_ref.path.span, E0318,
- "cannot create default implementations for traits outside the \
- crate they're defined in; define a new trait instead")
+ struct_span_err!(self.tcx.sess,
+ item_trait_ref.path.span,
+ E0318,
+ "cannot create default implementations for traits outside \
+ the crate they're defined in; define a new trait instead")
.span_label(item_trait_ref.path.span,
&format!("`{}` trait not defined in this crate",
item_trait_ref.path))
}
}
-impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OrphanChecker<'cx, 'tcx> {
+impl<'cx, 'tcx, 'v> intravisit::Visitor<'v> for OrphanChecker<'cx, 'tcx> {
fn visit_item(&mut self, item: &hir::Item) {
self.check_item(item);
}
use lint;
pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- let mut overlap = OverlapChecker { tcx: tcx,
- default_impls: DefIdMap() };
+ let mut overlap = OverlapChecker {
+ tcx: tcx,
+ default_impls: DefIdMap(),
+ };
// this secondary walk specifically checks for some other cases,
// like defaulted traits, for which additional overlap rules exist
tcx.visit_all_items_in_krate(DepNode::CoherenceOverlapCheckSpecial, &mut overlap);
}
-struct OverlapChecker<'cx, 'tcx:'cx> {
+struct OverlapChecker<'cx, 'tcx: 'cx> {
tcx: TyCtxt<'cx, 'tcx, 'tcx>,
// maps from a trait def-id to an impl id
impl<'cx, 'tcx> OverlapChecker<'cx, 'tcx> {
fn check_for_common_items_in_impls(&self, impl1: DefId, impl2: DefId) {
#[derive(Copy, Clone, PartialEq)]
- enum Namespace { Type, Value }
+ enum Namespace {
+ Type,
+ Value,
+ }
fn name_and_namespace<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- item: &ty::ImplOrTraitItemId)
- -> (ast::Name, Namespace)
- {
- let name = tcx.impl_or_trait_item(item.def_id()).name();
- (name, match *item {
- ty::TypeTraitItemId(..) => Namespace::Type,
- ty::ConstTraitItemId(..) => Namespace::Value,
- ty::MethodTraitItemId(..) => Namespace::Value,
- })
+ def_id: DefId)
+ -> (ast::Name, Namespace) {
+ let item = tcx.impl_or_trait_item(def_id);
+ (item.name(),
+ match item {
+ ty::TypeTraitItem(..) => Namespace::Type,
+ ty::ConstTraitItem(..) => Namespace::Value,
+ ty::MethodTraitItem(..) => Namespace::Value,
+ })
}
- let impl_items = self.tcx.impl_items.borrow();
+ let impl_items = self.tcx.impl_or_trait_item_def_ids.borrow();
- for item1 in &impl_items[&impl1] {
+ for &item1 in &impl_items[&impl1][..] {
let (name, namespace) = name_and_namespace(self.tcx, item1);
- for item2 in &impl_items[&impl2] {
+ for &item2 in &impl_items[&impl2][..] {
if (name, namespace) == name_and_namespace(self.tcx, item2) {
let msg = format!("duplicate definitions with name `{}`", name);
- let node_id = self.tcx.map.as_local_node_id(item1.def_id()).unwrap();
+ let node_id = self.tcx.map.as_local_node_id(item1).unwrap();
self.tcx.sess.add_lint(lint::builtin::OVERLAPPING_INHERENT_IMPLS,
node_id,
- self.tcx.span_of_impl(item1.def_id()).unwrap(),
+ self.tcx.span_of_impl(item1).unwrap(),
msg);
}
}
let inherent_impls = self.tcx.inherent_impls.borrow();
let impls = match inherent_impls.get(&ty_def_id) {
Some(impls) => impls,
- None => return
+ None => return,
};
for (i, &impl1_def_id) in impls.iter().enumerate() {
- for &impl2_def_id in &impls[(i+1)..] {
+ for &impl2_def_id in &impls[(i + 1)..] {
self.tcx.infer_ctxt(None, None, Reveal::ExactMatch).enter(|infcx| {
if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() {
self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id)
}
}
-impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> {
+impl<'cx, 'tcx, 'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> {
fn visit_item(&mut self, item: &'v hir::Item) {
match item.node {
- hir::ItemEnum(..) | hir::ItemStruct(..) | hir::ItemUnion(..) => {
+ hir::ItemEnum(..) |
+ hir::ItemStruct(..) |
+ hir::ItemUnion(..) => {
let type_def_id = self.tcx.map.local_def_id(item.id);
self.check_for_overlapping_inherent_impls(type_def_id);
}
let prev_default_impl = self.default_impls.insert(trait_ref.def_id, item.id);
if let Some(prev_id) = prev_default_impl {
- let mut err = struct_span_err!(
- self.tcx.sess,
- self.tcx.span_of_impl(impl_def_id).unwrap(), E0521,
- "redundant default implementations of trait `{}`:",
- trait_ref);
- err.span_note(self.tcx.span_of_impl(self.tcx.map.local_def_id(prev_id))
+ let mut err = struct_span_err!(self.tcx.sess,
+ self.tcx.span_of_impl(impl_def_id).unwrap(),
+ E0521,
+ "redundant default implementations of trait \
+ `{}`:",
+ trait_ref);
+ err.span_note(self.tcx
+ .span_of_impl(self.tcx.map.local_def_id(prev_id))
.unwrap(),
"redundant implementation is here:");
err.emit();
let trait_ref = self.tcx.impl_trait_ref(impl_def_id).unwrap();
let trait_def_id = trait_ref.def_id;
- let _task = self.tcx.dep_graph.in_task(
- DepNode::CoherenceOverlapCheck(trait_def_id));
+ let _task =
+ self.tcx.dep_graph.in_task(DepNode::CoherenceOverlapCheck(trait_def_id));
let def = self.tcx.lookup_trait_def(trait_def_id);
// insertion failed due to overlap
if let Err(overlap) = insert_result {
- let mut err = struct_span_err!(
- self.tcx.sess, self.tcx.span_of_impl(impl_def_id).unwrap(), E0119,
- "conflicting implementations of trait `{}`{}:",
- overlap.trait_desc,
- overlap.self_desc.clone().map_or(String::new(),
- |ty| format!(" for type `{}`", ty)));
+ let mut err = struct_span_err!(self.tcx.sess,
+ self.tcx.span_of_impl(impl_def_id).unwrap(),
+ E0119,
+ "conflicting implementations of trait `{}`{}:",
+ overlap.trait_desc,
+ overlap.self_desc.clone().map_or(String::new(),
+ |ty| {
+ format!(" for type `{}`", ty)
+ }));
match self.tcx.span_of_impl(overlap.with_impl) {
Ok(span) => {
- err.span_label(span,
- &format!("first implementation here"));
+ err.span_label(span, &format!("first implementation here"));
err.span_label(self.tcx.span_of_impl(impl_def_id).unwrap(),
&format!("conflicting implementation{}",
overlap.self_desc
|ty| format!(" for `{}`", ty))));
}
Err(cname) => {
- err.note(&format!("conflicting implementation in crate `{}`",
- cname));
+ err.note(&format!("conflicting implementation in crate `{}`", cname));
}
}
let mut supertrait_def_ids =
traits::supertrait_def_ids(self.tcx, data.principal.def_id());
if supertrait_def_ids.any(|d| d == trait_def_id) {
- span_err!(self.tcx.sess, item.span, E0371,
+ span_err!(self.tcx.sess,
+ item.span,
+ E0371,
"the object type `{}` automatically \
implements the trait `{}`",
trait_ref.self_ty(),
tcx.map.krate().visit_all_items(&mut orphan);
}
-struct UnsafetyChecker<'cx, 'tcx:'cx> {
- tcx: TyCtxt<'cx, 'tcx, 'tcx>
+struct UnsafetyChecker<'cx, 'tcx: 'cx> {
+ tcx: TyCtxt<'cx, 'tcx, 'tcx>,
}
impl<'cx, 'tcx, 'v> UnsafetyChecker<'cx, 'tcx> {
- fn check_unsafety_coherence(&mut self, item: &'v hir::Item,
+ fn check_unsafety_coherence(&mut self,
+ item: &'v hir::Item,
unsafety: hir::Unsafety,
polarity: hir::ImplPolarity) {
match self.tcx.impl_trait_ref(self.tcx.map.local_def_id(item.id)) {
None => {
// Inherent impl.
match unsafety {
- hir::Unsafety::Normal => { /* OK */ }
+ hir::Unsafety::Normal => {
+ // OK
+ }
hir::Unsafety::Unsafe => {
- span_err!(self.tcx.sess, item.span, E0197,
+ span_err!(self.tcx.sess,
+ item.span,
+ E0197,
"inherent impls cannot be declared as unsafe");
}
}
Some(trait_ref) => {
let trait_def = self.tcx.lookup_trait_def(trait_ref.def_id);
match (trait_def.unsafety, unsafety, polarity) {
- (hir::Unsafety::Unsafe,
- hir::Unsafety::Unsafe, hir::ImplPolarity::Negative) => {
- span_err!(self.tcx.sess, item.span, E0198,
+ (hir::Unsafety::Unsafe, hir::Unsafety::Unsafe, hir::ImplPolarity::Negative) => {
+ span_err!(self.tcx.sess,
+ item.span,
+ E0198,
"negative implementations are not unsafe");
}
(hir::Unsafety::Normal, hir::Unsafety::Unsafe, _) => {
- span_err!(self.tcx.sess, item.span, E0199,
+ span_err!(self.tcx.sess,
+ item.span,
+ E0199,
"implementing the trait `{}` is not unsafe",
trait_ref);
}
- (hir::Unsafety::Unsafe,
- hir::Unsafety::Normal, hir::ImplPolarity::Positive) => {
- span_err!(self.tcx.sess, item.span, E0200,
+ (hir::Unsafety::Unsafe, hir::Unsafety::Normal, hir::ImplPolarity::Positive) => {
+ span_err!(self.tcx.sess,
+ item.span,
+ E0200,
"the trait `{}` requires an `unsafe impl` declaration",
trait_ref);
}
- (hir::Unsafety::Unsafe,
- hir::Unsafety::Normal, hir::ImplPolarity::Negative) |
- (hir::Unsafety::Unsafe,
- hir::Unsafety::Unsafe, hir::ImplPolarity::Positive) |
+ (hir::Unsafety::Unsafe, hir::Unsafety::Normal, hir::ImplPolarity::Negative) |
+ (hir::Unsafety::Unsafe, hir::Unsafety::Unsafe, hir::ImplPolarity::Positive) |
(hir::Unsafety::Normal, hir::Unsafety::Normal, _) => {
- /* OK */
+ // OK
}
}
}
}
}
-impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for UnsafetyChecker<'cx, 'tcx> {
+impl<'cx, 'tcx, 'v> intravisit::Visitor<'v> for UnsafetyChecker<'cx, 'tcx> {
fn visit_item(&mut self, item: &'v hir::Item) {
match item.node {
hir::ItemDefaultImpl(unsafety, _) => {
hir::ItemImpl(unsafety, polarity, ..) => {
self.check_unsafety_coherence(item, unsafety, polarity);
}
- _ => { }
+ _ => {}
}
}
}
-> bool
{
if let Some(trait_id) = self.tcx().map.as_local_node_id(trait_def_id) {
- trait_defines_associated_type_named(self.ccx, trait_id, assoc_name)
+ trait_associated_type_names(self.tcx(), trait_id)
+ .any(|name| name == assoc_name)
} else {
- let trait_def = self.tcx().lookup_trait_def(trait_def_id);
- trait_def.associated_type_names.contains(&assoc_name)
+ self.tcx().impl_or_trait_items(trait_def_id).iter().any(|&def_id| {
+ match self.tcx().impl_or_trait_item(def_id) {
+ ty::TypeTraitItem(ref item) => item.name == assoc_name,
+ _ => false
+ }
+ })
}
}
vis: &hir::Visibility,
sig: &hir::MethodSig,
defaultness: hir::Defaultness,
+ has_body: bool,
untransformed_rcvr_ty: Ty<'tcx>,
rcvr_ty_predicates: &ty::GenericPredicates<'tcx>) {
let def_id = ccx.tcx.map.local_def_id(id);
sig, untransformed_rcvr_ty, anon_scope)
};
- let ty_method = ty::Method::new(name,
- ty_generics,
- ty_generic_predicates,
- fty,
- explicit_self_category,
- ty::Visibility::from_hir(vis, id, ccx.tcx),
- defaultness,
- def_id,
- container);
+ let ty_method = ty::Method {
+ name: name,
+ generics: ty_generics,
+ predicates: ty_generic_predicates,
+ fty: fty,
+ explicit_self: explicit_self_category,
+ vis: ty::Visibility::from_hir(vis, id, ccx.tcx),
+ defaultness: defaultness,
+ has_body: has_body,
+ def_id: def_id,
+ container: container,
+ };
let substs = mk_item_substs(&ccx.icx(&(rcvr_ty_predicates, &sig.generics)),
ccx.tcx.map.span(id), def_id);
defaultness: hir::Defaultness,
ty: Option<Ty<'tcx>>)
{
+ let predicates = ty::GenericPredicates {
+ parent: Some(container.id()),
+ predicates: vec![]
+ };
+ ccx.tcx.predicates.borrow_mut().insert(ccx.tcx.map.local_def_id(id),
+ predicates);
+
let associated_type = Rc::new(ty::AssociatedType {
name: name,
vis: ty::Visibility::from_hir(vis, id, ccx.tcx),
// Convert all the associated types.
for impl_item in impl_items {
if let hir::ImplItemKind::Type(ref ty) = impl_item.node {
+ let type_def_id = ccx.tcx.map.local_def_id(impl_item.id);
+ generics_of_def_id(ccx, type_def_id);
+
if opt_trait_ref.is_none() {
span_err!(tcx.sess, impl_item.span, E0202,
"associated types are not allowed in inherent impls");
convert_method(ccx, ImplContainer(def_id),
impl_item.name, impl_item.id, method_vis,
- sig, impl_item.defaultness, selfty,
+ sig, impl_item.defaultness, true, selfty,
&ty_predicates);
}
}
// Convert all the associated types.
for trait_item in trait_items {
if let hir::TypeTraitItem(_, ref opt_ty) = trait_item.node {
+ let type_def_id = ccx.tcx.map.local_def_id(trait_item.id);
+ generics_of_def_id(ccx, type_def_id);
+
let typ = opt_ty.as_ref().map({
|ty| ccx.icx(&trait_predicates).to_ty(&ExplicitRscope, &ty)
});
// Convert all the methods
for trait_item in trait_items {
- if let hir::MethodTraitItem(ref sig, _) = trait_item.node {
+ if let hir::MethodTraitItem(ref sig, ref body) = trait_item.node {
convert_method(ccx,
container,
trait_item.name,
&hir::Inherited,
sig,
hir::Defaultness::Default,
+ body.is_some(),
tcx.mk_self_type(),
&trait_predicates);
// Add an entry mapping
let trait_item_def_ids = Rc::new(trait_items.iter().map(|trait_item| {
- let def_id = ccx.tcx.map.local_def_id(trait_item.id);
- match trait_item.node {
- hir::ConstTraitItem(..) => ty::ConstTraitItemId(def_id),
- hir::MethodTraitItem(..) => ty::MethodTraitItemId(def_id),
- hir::TypeTraitItem(..) => ty::TypeTraitItemId(def_id)
- }
+ ccx.tcx.map.local_def_id(trait_item.id)
}).collect());
- tcx.trait_item_def_ids.borrow_mut().insert(ccx.tcx.map.local_def_id(it.id),
- trait_item_def_ids);
+ tcx.impl_or_trait_item_def_ids.borrow_mut().insert(ccx.tcx.map.local_def_id(it.id),
+ trait_item_def_ids);
},
hir::ItemStruct(ref struct_def, _) |
hir::ItemUnion(ref struct_def, _) => {
return def.clone();
}
- let (unsafety, generics, items) = match it.node {
- hir::ItemTrait(unsafety, ref generics, _, ref items) => {
- (unsafety, generics, items)
+ let (unsafety, generics) = match it.node {
+ hir::ItemTrait(unsafety, ref generics, _, _) => {
+ (unsafety, generics)
}
_ => span_bug!(it.span, "trait_def_of_item invoked on non-trait"),
};
let ty_generics = generics_of_def_id(ccx, def_id);
let substs = mk_item_substs(&ccx.icx(generics), it.span, def_id);
- let associated_type_names: Vec<_> = items.iter().filter_map(|trait_item| {
- match trait_item.node {
- hir::TypeTraitItem(..) => Some(trait_item.name),
- _ => None,
- }
- }).collect();
-
let def_path_hash = tcx.def_path(def_id).deterministic_hash(tcx);
let trait_ref = ty::TraitRef::new(def_id, substs);
- let trait_def = ty::TraitDef::new(unsafety,
- paren_sugar,
- ty_generics,
- trait_ref,
- associated_type_names,
+ let trait_def = ty::TraitDef::new(unsafety, paren_sugar, ty_generics, trait_ref,
def_path_hash);
tcx.intern_trait_def(trait_def)
}
-fn trait_defines_associated_type_named(ccx: &CrateCtxt,
- trait_node_id: ast::NodeId,
- assoc_name: ast::Name)
- -> bool
+pub fn trait_associated_type_names<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ trait_node_id: ast::NodeId)
+ -> impl Iterator<Item=ast::Name> + 'a
{
- let item = match ccx.tcx.map.get(trait_node_id) {
+ let item = match tcx.map.get(trait_node_id) {
hir_map::NodeItem(item) => item,
_ => bug!("trait_node_id {} is not an item", trait_node_id)
};
_ => bug!("trait_node_id {} is not a trait", trait_node_id)
};
- trait_items.iter().any(|trait_item| {
+ trait_items.iter().filter_map(|trait_item| {
match trait_item.node {
- hir::TypeTraitItem(..) => trait_item.name == assoc_name,
- _ => false,
+ hir::TypeTraitItem(..) => Some(trait_item.name),
+ _ => None,
}
})
}
```
"##,
+E0513: r##"
+The type of the variable couldn't be found out.
+
+Erroneous code example:
+
+```compile_fail,E0513
+use std::mem;
+
+unsafe {
+ let size = mem::size_of::<u32>();
+ mem::transmute_copy::<u32, [u8; size]>(&8_8);
+ // error: no type for local variable
+}
+```
+
+To fix this error, please use a constant size instead of `size`. To make
+this error more obvious, you could run:
+
+```compile_fail,E0080
+use std::mem;
+
+unsafe {
+ mem::transmute_copy::<u32, [u8; mem::size_of::<u32>()]>(&8_8);
+ // error: constant evaluation error
+}
+```
+
+So now, you can fix your code by setting the size directly:
+
+```
+use std::mem;
+
+unsafe {
+ mem::transmute_copy::<u32, [u8; 4]>(&8_8);
+ // `u32` is 4 bytes so we replace the `mem::size_of` call with its size
+}
+```
+"##,
+
E0516: r##"
The `typeof` keyword is currently reserved but unimplemented.
Erroneous code example:
E0399, // trait items need to be implemented because the associated
// type `{}` was overridden
E0436, // functional record update requires a struct
- E0513, // no type for local variable ..
E0521, // redundant default implementations of trait
E0533, // `{}` does not name a unit variant, unit struct or a constant
E0562, // `impl Trait` not allowed outside of function
#![feature(box_patterns)]
#![feature(box_syntax)]
+#![feature(conservative_impl_trait)]
#![feature(dotdot_in_tuple_patterns)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
invariant: VarianceTermPtr<'a>,
bivariant: VarianceTermPtr<'a>,
- pub constraints: Vec<Constraint<'a>> ,
+ pub constraints: Vec<Constraint<'a>>,
}
/// Declares that the variable `decl_id` appears in a location with
}
pub fn add_constraints_from_crate<'a, 'tcx>(terms_cx: TermsContext<'a, 'tcx>)
- -> ConstraintContext<'a, 'tcx>
-{
+ -> ConstraintContext<'a, 'tcx> {
let tcx = terms_cx.tcx;
let covariant = terms_cx.arena.alloc(ConstantTerm(ty::Covariant));
let contravariant = terms_cx.arena.alloc(ConstantTerm(ty::Contravariant));
debug!("visit_item item={}", tcx.map.node_to_string(item.id));
match item.node {
- hir::ItemEnum(..) | hir::ItemStruct(..) | hir::ItemUnion(..) => {
+ hir::ItemEnum(..) |
+ hir::ItemStruct(..) |
+ hir::ItemUnion(..) => {
let scheme = tcx.lookup_item_type(did);
// Not entirely obvious: constraints on structs/enums do not
hir::ItemForeignMod(..) |
hir::ItemTy(..) |
hir::ItemImpl(..) |
- hir::ItemDefaultImpl(..) => {
- }
+ hir::ItemDefaultImpl(..) => {}
}
}
}
/// Is `param_id` a lifetime according to `map`?
fn is_lifetime(map: &hir_map::Map, param_id: ast::NodeId) -> bool {
match map.find(param_id) {
- Some(hir_map::NodeLifetime(..)) => true, _ => false
+ Some(hir_map::NodeLifetime(..)) => true,
+ _ => false,
}
}
let tcx = self.terms_cx.tcx;
assert!(is_lifetime(&tcx.map, param_id));
match tcx.named_region_map.defs.get(¶m_id) {
- Some(&rl::DefEarlyBoundRegion(_, lifetime_decl_id))
- => lifetime_decl_id,
+ Some(&rl::DefEarlyBoundRegion(_, lifetime_decl_id)) => lifetime_decl_id,
Some(_) => bug!("should not encounter non early-bound cases"),
// The lookup should only fail when `param_id` is
// itself a lifetime binding: use it as the decl_id.
- None => param_id,
+ None => param_id,
}
}
// parameter (by inspecting parent of its binding declaration
// to see if it is introduced by a type or by a fn/impl).
- let check_result = |this:&ConstraintContext| -> bool {
+ let check_result = |this: &ConstraintContext| -> bool {
let tcx = this.terms_cx.tcx;
let decl_id = this.find_binding_for_lifetime(param_id);
// Currently only called on lifetimes; double-checking that.
assert!(is_lifetime(&tcx.map, param_id));
let parent_id = tcx.map.get_parent(decl_id);
- let parent = tcx.map.find(parent_id).unwrap_or_else(
- || bug!("tcx.map missing entry for id: {}", parent_id));
+ let parent = tcx.map
+ .find(parent_id)
+ .unwrap_or_else(|| bug!("tcx.map missing entry for id: {}", parent_id));
let is_inferred;
macro_rules! cannot_happen { () => { {
hir::ItemEnum(..) |
hir::ItemStruct(..) |
hir::ItemUnion(..) |
- hir::ItemTrait(..) => is_inferred = true,
- hir::ItemFn(..) => is_inferred = false,
- _ => cannot_happen!(),
+ hir::ItemTrait(..) => is_inferred = true,
+ hir::ItemFn(..) => is_inferred = false,
+ _ => cannot_happen!(),
}
}
- hir_map::NodeTraitItem(..) => is_inferred = false,
- hir_map::NodeImplItem(..) => is_inferred = false,
- _ => cannot_happen!(),
+ hir_map::NodeTraitItem(..) => is_inferred = false,
+ hir_map::NodeImplItem(..) => is_inferred = false,
+ _ => cannot_happen!(),
}
return is_inferred;
fn add_constraint(&mut self,
InferredIndex(index): InferredIndex,
variance: VarianceTermPtr<'a>) {
- debug!("add_constraint(index={}, variance={:?})",
- index, variance);
- self.constraints.push(Constraint { inferred: InferredIndex(index),
- variance: variance });
+ debug!("add_constraint(index={}, variance={:?})", index, variance);
+ self.constraints.push(Constraint {
+ inferred: InferredIndex(index),
+ variance: variance,
+ });
}
- fn contravariant(&mut self,
- variance: VarianceTermPtr<'a>)
- -> VarianceTermPtr<'a> {
+ fn contravariant(&mut self, variance: VarianceTermPtr<'a>) -> VarianceTermPtr<'a> {
self.xform(variance, self.contravariant)
}
- fn invariant(&mut self,
- variance: VarianceTermPtr<'a>)
- -> VarianceTermPtr<'a> {
+ fn invariant(&mut self, variance: VarianceTermPtr<'a>) -> VarianceTermPtr<'a> {
self.xform(variance, self.invariant)
}
}
}
- fn xform(&mut self,
- v1: VarianceTermPtr<'a>,
- v2: VarianceTermPtr<'a>)
- -> VarianceTermPtr<'a> {
+ fn xform(&mut self, v1: VarianceTermPtr<'a>, v2: VarianceTermPtr<'a>) -> VarianceTermPtr<'a> {
match (*v1, *v2) {
(_, ConstantTerm(ty::Covariant)) => {
// Applying a "covariant" transform is always a no-op
v1
}
- (ConstantTerm(c1), ConstantTerm(c2)) => {
- self.constant_term(c1.xform(c2))
- }
+ (ConstantTerm(c1), ConstantTerm(c2)) => self.constant_term(c1.xform(c2)),
- _ => {
- &*self.terms_cx.arena.alloc(TransformTerm(v1, v2))
- }
+ _ => &*self.terms_cx.arena.alloc(TransformTerm(v1, v2)),
}
}
// README.md for a discussion on dep-graph management.
self.tcx().dep_graph.read(ItemVariances::to_dep_node(&trait_ref.def_id));
- self.add_constraints_from_substs(
- generics,
- trait_ref.def_id,
- &trait_def.generics.types,
- &trait_def.generics.regions,
- trait_ref.substs,
- variance);
+ self.add_constraints_from_substs(generics,
+ trait_ref.def_id,
+ &trait_def.generics.types,
+ &trait_def.generics.regions,
+ trait_ref.substs,
+ variance);
}
/// Adds constraints appropriate for an instance of `ty` appearing
variance);
match ty.sty {
- ty::TyBool |
- ty::TyChar | ty::TyInt(_) | ty::TyUint(_) |
- ty::TyFloat(_) | ty::TyStr | ty::TyNever => {
- /* leaf type -- noop */
+ ty::TyBool | ty::TyChar | ty::TyInt(_) | ty::TyUint(_) | ty::TyFloat(_) |
+ ty::TyStr | ty::TyNever => {
+ // leaf type -- noop
}
- ty::TyClosure(..) | ty::TyAnon(..) => {
+ ty::TyClosure(..) |
+ ty::TyAnon(..) => {
bug!("Unexpected closure type in variance computation");
}
self.add_constraints_from_mt(generics, mt, variance);
}
- ty::TyBox(typ) | ty::TyArray(typ, _) | ty::TySlice(typ) => {
+ ty::TyBox(typ) |
+ ty::TyArray(typ, _) |
+ ty::TySlice(typ) => {
self.add_constraints_from_ty(generics, typ, variance);
}
-
ty::TyRawPtr(ref mt) => {
self.add_constraints_from_mt(generics, mt, variance);
}
// README.md for a discussion on dep-graph management.
self.tcx().dep_graph.read(ItemVariances::to_dep_node(&def.did));
- self.add_constraints_from_substs(
- generics,
- def.did,
- &item_type.generics.types,
- &item_type.generics.regions,
- substs,
- variance);
+ self.add_constraints_from_substs(generics,
+ def.did,
+ &item_type.generics.types,
+ &item_type.generics.regions,
+ substs,
+ variance);
}
ty::TyProjection(ref data) => {
// README.md for a discussion on dep-graph management.
self.tcx().dep_graph.read(ItemVariances::to_dep_node(&trait_ref.def_id));
- self.add_constraints_from_substs(
- generics,
- trait_ref.def_id,
- &trait_def.generics.types,
- &trait_def.generics.regions,
- trait_ref.substs,
- variance);
+ self.add_constraints_from_substs(generics,
+ trait_ref.def_id,
+ &trait_def.generics.types,
+ &trait_def.generics.regions,
+ trait_ref.substs,
+ variance);
}
ty::TyTrait(ref data) => {
let contra = self.contravariant(variance);
self.add_constraints_from_region(generics, data.region_bound, contra);
- let poly_trait_ref =
- data.principal.with_self_ty(self.tcx(), self.tcx().types.err);
+ let poly_trait_ref = data.principal.with_self_ty(self.tcx(), self.tcx().types.err);
self.add_constraints_from_trait_ref(generics, poly_trait_ref.0, variance);
for projection in &data.projection_bounds {
ty::TyInfer(..) => {
bug!("unexpected type encountered in \
- variance inference: {}", ty);
+ variance inference: {}",
+ ty);
}
}
}
variance);
for p in type_param_defs {
- let variance_decl =
- self.declared_variance(p.def_id, def_id, p.index as usize);
+ let variance_decl = self.declared_variance(p.def_id, def_id, p.index as usize);
let variance_i = self.xform(variance, variance_decl);
let substs_ty = substs.type_for_def(p);
debug!("add_constraints_from_substs: variance_decl={:?} variance_i={:?}",
- variance_decl, variance_i);
+ variance_decl,
+ variance_i);
self.add_constraints_from_ty(generics, substs_ty, variance_i);
}
for p in region_param_defs {
- let variance_decl =
- self.declared_variance(p.def_id, def_id, p.index as usize);
+ let variance_decl = self.declared_variance(p.def_id, def_id, p.index as usize);
let variance_i = self.xform(variance, variance_decl);
let substs_r = substs.region_for_def(p);
self.add_constraints_from_region(generics, substs_r, variance_i);
}
}
- ty::ReStatic => { }
+ ty::ReStatic => {}
ty::ReLateBound(..) => {
// We do not infer variance for region parameters on
// methods or in fn types.
}
- ty::ReFree(..) | ty::ReScope(..) | ty::ReVar(..) |
- ty::ReSkolemized(..) | ty::ReEmpty | ty::ReErased => {
+ ty::ReFree(..) |
+ ty::ReScope(..) |
+ ty::ReVar(..) |
+ ty::ReSkolemized(..) |
+ ty::ReEmpty |
+ ty::ReErased => {
// We don't expect to see anything but 'static or bound
// regions when visiting member types or method types.
bug!("unexpected region encountered in variance \
solve::solve_constraints(constraints_cx);
tcx.variance_computed.set(true);
}
-
struct SolveContext<'a, 'tcx: 'a> {
terms_cx: TermsContext<'a, 'tcx>,
- constraints: Vec<Constraint<'a>> ,
+ constraints: Vec<Constraint<'a>>,
// Maps from an InferredIndex to the inferred value for that variable.
- solutions: Vec<ty::Variance>
+ solutions: Vec<ty::Variance>,
}
pub fn solve_constraints(constraints_cx: ConstraintContext) {
let ConstraintContext { terms_cx, constraints, .. } = constraints_cx;
- let solutions =
- terms_cx.inferred_infos.iter()
- .map(|ii| ii.initial_variance)
- .collect();
+ let solutions = terms_cx.inferred_infos
+ .iter()
+ .map(|ii| ii.initial_variance)
+ .collect();
let mut solutions_cx = SolveContext {
terms_cx: terms_cx,
constraints: constraints,
- solutions: solutions
+ solutions: solutions,
};
solutions_cx.solve();
solutions_cx.write();
if old_value != new_value {
debug!("Updating inferred {} (node {}) \
from {:?} to {:?} due to {:?}",
- inferred,
- self.terms_cx
- .inferred_infos[inferred]
- .param_id,
- old_value,
- new_value,
- term);
+ inferred,
+ self.terms_cx
+ .inferred_infos[inferred]
+ .param_id,
+ old_value,
+ new_value,
+ term);
self.solutions[inferred] = new_value;
changed = true;
let info = &inferred_infos[index];
let variance = solutions[index];
debug!("Index {} Info {} Variance {:?}",
- index, info.index, variance);
+ index,
+ info.index,
+ variance);
assert_eq!(item_variances.len(), info.index);
item_variances.push(variance);
index += 1;
}
- debug!("item_id={} item_variances={:?}",
- item_id,
- item_variances);
+ debug!("item_id={} item_variances={:?}", item_id, item_variances);
let item_def_id = tcx.map.local_def_id(item_id);
// For unit testing: check for a special "rustc_variance"
// attribute and report an error with various results if found.
if tcx.has_attr(item_def_id, "rustc_variance") {
- span_err!(tcx.sess, tcx.map.span(item_id), E0208, "{:?}", item_variances);
+ span_err!(tcx.sess,
+ tcx.map.span(item_id),
+ E0208,
+ "{:?}",
+ item_variances);
}
- let newly_added = tcx.item_variance_map.borrow_mut()
- .insert(item_def_id, Rc::new(item_variances)).is_none();
+ let newly_added = tcx.item_variance_map
+ .borrow_mut()
+ .insert(item_def_id, Rc::new(item_variances))
+ .is_none();
assert!(newly_added);
}
}
fn evaluate(&self, term: VarianceTermPtr<'a>) -> ty::Variance {
match *term {
- ConstantTerm(v) => {
- v
- }
+ ConstantTerm(v) => v,
TransformTerm(t1, t2) => {
let v1 = self.evaluate(t1);
v1.xform(v2)
}
- InferredTerm(InferredIndex(index)) => {
- self.solutions[index]
- }
+ InferredTerm(InferredIndex(index)) => self.solutions[index],
}
}
}
match *self {
ConstantTerm(c1) => write!(f, "{:?}", c1),
TransformTerm(v1, v2) => write!(f, "({:?} \u{00D7} {:?})", v1, v2),
- InferredTerm(id) => write!(f, "[{}]", { let InferredIndex(i) = id; i })
+ InferredTerm(id) => {
+ write!(f, "[{}]", {
+ let InferredIndex(i) = id;
+ i
+ })
+ }
}
}
}
pub inferred_map: NodeMap<InferredIndex>,
// Maps from an InferredIndex to the info for that variable.
- pub inferred_infos: Vec<InferredInfo<'a>> ,
+ pub inferred_infos: Vec<InferredInfo<'a>>,
}
pub struct InferredInfo<'a> {
pub initial_variance: ty::Variance,
}
-pub fn determine_parameters_to_be_inferred<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- arena: &'a mut TypedArena<VarianceTerm<'a>>)
- -> TermsContext<'a, 'tcx>
-{
+pub fn determine_parameters_to_be_inferred<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ arena: &'a mut TypedArena<VarianceTerm<'a>>)
+ -> TermsContext<'a, 'tcx> {
let mut terms_cx = TermsContext {
tcx: tcx,
arena: arena,
// cache and share the variance struct used for items with
// no type/region parameters
- empty_variances: Rc::new(vec![])
+ empty_variances: Rc::new(vec![]),
};
// See README.md for a discussion on dep-graph management.
- tcx.visit_all_items_in_krate(|def_id| ItemVariances::to_dep_node(&def_id),
- &mut terms_cx);
+ tcx.visit_all_items_in_krate(|def_id| ItemVariances::to_dep_node(&def_id), &mut terms_cx);
terms_cx
}
-fn lang_items(tcx: TyCtxt) -> Vec<(ast::NodeId,Vec<ty::Variance>)> {
+fn lang_items(tcx: TyCtxt) -> Vec<(ast::NodeId, Vec<ty::Variance>)> {
let all = vec![
(tcx.lang_items.phantom_data(), vec![ty::Covariant]),
(tcx.lang_items.unsafe_cell_type(), vec![ty::Invariant]),
fn add_inferreds_for_item(&mut self,
item_id: ast::NodeId,
has_self: bool,
- generics: &hir::Generics)
- {
- /*!
- * Add "inferreds" for the generic parameters declared on this
- * item. This has a lot of annoying parameters because we are
- * trying to drive this from the AST, rather than the
- * ty::Generics, so that we can get span info -- but this
- * means we must accommodate syntactic distinctions.
- */
+ generics: &hir::Generics) {
+ //! Add "inferreds" for the generic parameters declared on this
+ //! item. This has a lot of annoying parameters because we are
+ //! trying to drive this from the AST, rather than the
+ //! ty::Generics, so that we can get span info -- but this
+ //! means we must accommodate syntactic distinctions.
+ //!
// NB: In the code below for writing the results back into the
// tcx, we rely on the fact that all inferreds for a particular
// parameters".
if self.num_inferred() == inferreds_on_entry {
let item_def_id = self.tcx.map.local_def_id(item_id);
- let newly_added =
- self.tcx.item_variance_map.borrow_mut().insert(
- item_def_id,
- self.empty_variances.clone()).is_none();
+ let newly_added = self.tcx
+ .item_variance_map
+ .borrow_mut()
+ .insert(item_def_id, self.empty_variances.clone())
+ .is_none();
assert!(newly_added);
}
}
- fn add_inferred(&mut self,
- item_id: ast::NodeId,
- index: usize,
- param_id: ast::NodeId) {
+ fn add_inferred(&mut self, item_id: ast::NodeId, index: usize, param_id: ast::NodeId) {
let inf_index = InferredIndex(self.inferred_infos.len());
let term = self.arena.alloc(InferredTerm(inf_index));
let initial_variance = self.pick_initial_variance(item_id, index);
- self.inferred_infos.push(InferredInfo { item_id: item_id,
- index: index,
- param_id: param_id,
- term: term,
- initial_variance: initial_variance });
+ self.inferred_infos.push(InferredInfo {
+ item_id: item_id,
+ index: index,
+ param_id: param_id,
+ term: term,
+ initial_variance: initial_variance,
+ });
let newly_added = self.inferred_map.insert(param_id, inf_index).is_none();
assert!(newly_added);
inf_index={:?}, \
initial_variance={:?})",
self.tcx.item_path_str(self.tcx.map.local_def_id(item_id)),
- item_id, index, param_id, inf_index,
+ item_id,
+ index,
+ param_id,
+ inf_index,
initial_variance);
}
- fn pick_initial_variance(&self,
- item_id: ast::NodeId,
- index: usize)
- -> ty::Variance
- {
+ fn pick_initial_variance(&self, item_id: ast::NodeId, index: usize) -> ty::Variance {
match self.lang_items.iter().find(|&&(n, _)| n == item_id) {
Some(&(_, ref variances)) => variances[index],
- None => ty::Bivariant
+ None => ty::Bivariant,
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for TermsContext<'a, 'tcx> {
fn visit_item(&mut self, item: &hir::Item) {
- debug!("add_inferreds for item {}", self.tcx.map.node_to_string(item.id));
+ debug!("add_inferreds for item {}",
+ self.tcx.map.node_to_string(item.id));
match item.node {
hir::ItemEnum(_, ref generics) |
hir::ItemFn(..) |
hir::ItemMod(..) |
hir::ItemForeignMod(..) |
- hir::ItemTy(..) => {
- }
+ hir::ItemTy(..) => {}
}
}
}
-
// - +
// o
match (v1, v2) {
- (ty::Invariant, _) | (_, ty::Invariant) => ty::Invariant,
+ (ty::Invariant, _) |
+ (_, ty::Invariant) => ty::Invariant,
(ty::Covariant, ty::Contravariant) => ty::Invariant,
(ty::Contravariant, ty::Covariant) => ty::Invariant,
(ty::Contravariant, ty::Contravariant) => ty::Contravariant,
- (x, ty::Bivariant) | (ty::Bivariant, x) => x,
+ (x, ty::Bivariant) |
+ (ty::Bivariant, x) => x,
}
}
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::char::{MAX, from_digit, from_u32, from_u32_unchecked};
#[stable(feature = "rust1", since = "1.0.0")]
-pub use core::char::{EncodeUtf16, EncodeUtf8, EscapeDebug, EscapeDefault, EscapeUnicode};
+pub use core::char::{EscapeDebug, EscapeDefault, EscapeUnicode};
// unstable reexports
#[unstable(feature = "try_from", issue = "33417")]
C::len_utf16(self)
}
- /// Returns an iterator over the bytes of this character as UTF-8.
+ /// Encodes this character as UTF-8 into the provided byte buffer,
+ /// and then returns the subslice of the buffer that contains the encoded character.
///
- /// The returned iterator also has an `as_slice()` method to view the
- /// encoded bytes as a byte slice.
+ /// # Panics
+ ///
+ /// Panics if the buffer is not large enough.
+ /// A buffer of length four is large enough to encode any `char`.
///
/// # Examples
///
+ /// In both of these examples, 'ß' takes two bytes to encode.
+ ///
/// ```
/// #![feature(unicode)]
///
- /// let iterator = 'ß'.encode_utf8();
- /// assert_eq!(iterator.as_slice(), [0xc3, 0x9f]);
+ /// let mut b = [0; 2];
///
- /// for (i, byte) in iterator.enumerate() {
- /// println!("byte {}: {:x}", i, byte);
- /// }
+ /// let result = 'ß'.encode_utf8(&mut b);
+ ///
+ /// assert_eq!(result, "ß");
+ ///
+ /// assert_eq!(result.len(), 2);
+ /// ```
+ ///
+ /// A buffer that's too small:
+ ///
+ /// ```
+ /// #![feature(unicode)]
+ /// use std::thread;
+ ///
+ /// let result = thread::spawn(|| {
+ /// let mut b = [0; 1];
+ ///
+ /// // this panics
+ /// 'ß'.encode_utf8(&mut b);
+ /// }).join();
+ ///
+ /// assert!(result.is_err());
/// ```
- #[unstable(feature = "unicode", issue = "27784")]
+ #[unstable(feature = "unicode",
+ reason = "pending decision about Iterator/Writer/Reader",
+ issue = "27784")]
#[inline]
- pub fn encode_utf8(self) -> EncodeUtf8 {
- C::encode_utf8(self)
+ pub fn encode_utf8(self, dst: &mut [u8]) -> &mut str {
+ C::encode_utf8(self, dst)
}
- /// Returns an iterator over the `u16` entries of this character as UTF-16.
+ /// Encodes this character as UTF-16 into the provided `u16` buffer,
+ /// and then returns the subslice of the buffer that contains the encoded character.
///
- /// The returned iterator also has an `as_slice()` method to view the
- /// encoded form as a slice.
+ /// # Panics
+ ///
+ /// Panics if the buffer is not large enough.
+ /// A buffer of length 2 is large enough to encode any `char`.
///
/// # Examples
///
+ /// In both of these examples, '𝕊' takes two `u16`s to encode.
+ ///
/// ```
/// #![feature(unicode)]
///
- /// let iterator = '𝕊'.encode_utf16();
- /// assert_eq!(iterator.as_slice(), [0xd835, 0xdd4a]);
+ /// let mut b = [0; 2];
///
- /// for (i, val) in iterator.enumerate() {
- /// println!("entry {}: {:x}", i, val);
- /// }
+ /// let result = '𝕊'.encode_utf16(&mut b);
+ ///
+ /// assert_eq!(result.len(), 2);
/// ```
- #[unstable(feature = "unicode", issue = "27784")]
+ ///
+ /// A buffer that's too small:
+ ///
+ /// ```
+ /// #![feature(unicode)]
+ /// use std::thread;
+ ///
+ /// let result = thread::spawn(|| {
+ /// let mut b = [0; 1];
+ ///
+ /// // this panics
+ /// '𝕊'.encode_utf16(&mut b);
+ /// }).join();
+ ///
+ /// assert!(result.is_err());
+ /// ```
+ #[unstable(feature = "unicode",
+ reason = "pending decision about Iterator/Writer/Reader",
+ issue = "27784")]
#[inline]
- pub fn encode_utf16(self) -> EncodeUtf16 {
- C::encode_utf16(self)
+ pub fn encode_utf16(self, dst: &mut [u16]) -> &mut [u16] {
+ C::encode_utf16(self, dst)
}
/// Returns true if this `char` is an alphabetic code point, and false if not.
return Some(tmp);
}
+ let mut buf = [0; 2];
self.chars.next().map(|ch| {
- let n = CharExt::encode_utf16(ch);
- let n = n.as_slice();
- if n.len() == 2 {
- self.extra = n[1];
+ let n = CharExt::encode_utf16(ch, &mut buf).len();
+ if n == 2 {
+ self.extra = buf[1];
}
- n[0]
+ buf[0]
})
}
use syntax::ast;
use rustc::hir;
-use rustc::middle::cstore;
use rustc::hir::def::Def;
use rustc::hir::def_id::DefId;
+use rustc::hir::map::DefPathData;
use rustc::hir::print as pprust;
use rustc::ty::{self, TyCtxt, VariantKind};
use rustc::util::nodemap::FnvHashSet;
}
Def::Struct(did)
// If this is a struct constructor, we skip it
- if tcx.sess.cstore.tuple_struct_definition_if_ctor(did).is_none() => {
+ if tcx.def_key(did).disambiguated_data.data != DefPathData::StructCtor => {
record_extern_fqn(cx, did, clean::TypeStruct);
ret.extend(build_impls(cx, tcx, did));
clean::StructItem(build_struct(cx, tcx, did))
Def::TyAlias(did) => {
record_extern_fqn(cx, did, clean::TypeTypedef);
ret.extend(build_impls(cx, tcx, did));
- build_type(cx, tcx, did)
+ clean::TypedefItem(build_type_alias(cx, tcx, did), false)
}
Def::Enum(did) => {
record_extern_fqn(cx, did, clean::TypeEnum);
ret.extend(build_impls(cx, tcx, did));
- build_type(cx, tcx, did)
+ clean::EnumItem(build_enum(cx, tcx, did))
}
// Assume that the enum type is reexported next to the variant, and
// variants don't show up in documentation specially.
}
}
+fn build_enum<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ did: DefId) -> clean::Enum {
+ let t = tcx.lookup_item_type(did);
+ let predicates = tcx.lookup_predicates(did);
+
+ clean::Enum {
+ generics: (t.generics, &predicates).clean(cx),
+ variants_stripped: false,
+ variants: tcx.lookup_adt_def(did).variants.clean(cx),
+ }
+}
+
fn build_struct<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>,
did: DefId) -> clean::Struct {
let t = tcx.lookup_item_type(did);
}
}
-fn build_type<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- did: DefId) -> clean::ItemEnum {
+fn build_type_alias<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ did: DefId) -> clean::Typedef {
let t = tcx.lookup_item_type(did);
let predicates = tcx.lookup_predicates(did);
- match t.ty.sty {
- ty::TyAdt(edef, _) if edef.is_enum() && !tcx.sess.cstore.is_typedef(did) => {
- return clean::EnumItem(clean::Enum {
- generics: (t.generics, &predicates).clean(cx),
- variants_stripped: false,
- variants: edef.variants.clean(cx),
- })
- }
- _ => {}
- }
- clean::TypedefItem(clean::Typedef {
+ clean::Typedef {
type_: t.ty.clean(cx),
generics: (t.generics, &predicates).clean(cx),
- }, false)
+ }
}
pub fn build_impls<'a, 'tcx>(cx: &DocContext,
build_impl(cx, tcx, did, &mut impls);
}
}
-
- // If this is the first time we've inlined something from this crate, then
- // we inline *all* impls from the crate into this crate. Note that there's
+ // If this is the first time we've inlined something from another crate, then
+ // we inline *all* impls from all the crates into this crate. Note that there's
// currently no way for us to filter this based on type, and we likely need
// many impls for a variety of reasons.
//
// Primarily, the impls will be used to populate the documentation for this
// type being inlined, but impls can also be used when generating
// documentation for primitives (no way to find those specifically).
- if cx.populated_crate_impls.borrow_mut().insert(did.krate) {
- for item in tcx.sess.cstore.crate_top_level_items(did.krate) {
- populate_impls(cx, tcx, item.def, &mut impls);
- }
+ if cx.populated_all_crate_impls.get() {
+ return impls;
+ }
- fn populate_impls<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def: cstore::DefLike,
- impls: &mut Vec<clean::Item>) {
- match def {
- cstore::DlImpl(did) => build_impl(cx, tcx, did, impls),
- cstore::DlDef(Def::Mod(did)) => {
- for item in tcx.sess.cstore.item_children(did) {
- populate_impls(cx, tcx, item.def, impls)
- }
- }
- _ => {}
- }
+ cx.populated_all_crate_impls.set(true);
+
+ for did in tcx.sess.cstore.implementations_of_trait(None) {
+ build_impl(cx, tcx, did, &mut impls);
+ }
+
+ // Also try to inline primitive impls from other crates.
+ let primitive_impls = [
+ tcx.lang_items.isize_impl(),
+ tcx.lang_items.i8_impl(),
+ tcx.lang_items.i16_impl(),
+ tcx.lang_items.i32_impl(),
+ tcx.lang_items.i64_impl(),
+ tcx.lang_items.usize_impl(),
+ tcx.lang_items.u8_impl(),
+ tcx.lang_items.u16_impl(),
+ tcx.lang_items.u32_impl(),
+ tcx.lang_items.u64_impl(),
+ tcx.lang_items.f32_impl(),
+ tcx.lang_items.f64_impl(),
+ tcx.lang_items.char_impl(),
+ tcx.lang_items.str_impl(),
+ tcx.lang_items.slice_impl(),
+ tcx.lang_items.slice_impl(),
+ tcx.lang_items.const_ptr_impl()
+ ];
+
+ for def_id in primitive_impls.iter().filter_map(|&def_id| def_id) {
+ if !def_id.is_local() {
+ tcx.populate_implementations_for_primitive_if_necessary(def_id);
+ build_impl(cx, tcx, def_id, &mut impls);
}
}
}
let predicates = tcx.lookup_predicates(did);
- let trait_items = tcx.sess.cstore.impl_items(did)
+ let trait_items = tcx.sess.cstore.impl_or_trait_items(did)
.iter()
- .filter_map(|did| {
- let did = did.def_id();
- let impl_item = tcx.impl_or_trait_item(did);
- match impl_item {
+ .filter_map(|&did| {
+ match tcx.impl_or_trait_item(did) {
ty::ConstTraitItem(ref assoc_const) => {
let did = assoc_const.def_id;
let type_scheme = tcx.lookup_item_type(did);
for_: for_,
generics: (ty.generics, &predicates).clean(cx),
items: trait_items,
- polarity: polarity.map(|p| { p.clean(cx) }),
+ polarity: Some(polarity.clean(cx)),
}),
source: clean::Span::empty(),
name: None,
// visit each node at most once.
let mut visited = FnvHashSet();
for item in tcx.sess.cstore.item_children(did) {
- match item.def {
- cstore::DlDef(Def::ForeignMod(did)) => {
- fill_in(cx, tcx, did, items);
- }
- cstore::DlDef(def) if item.vis == ty::Visibility::Public => {
- if !visited.insert(def) { continue }
+ if tcx.sess.cstore.visibility(item.def_id) == ty::Visibility::Public {
+ if !visited.insert(item.def_id) { continue }
+ if let Some(def) = tcx.sess.cstore.describe_def(item.def_id) {
if let Some(i) = try_inline_def(cx, tcx, def) {
items.extend(i)
}
}
- cstore::DlDef(..) => {}
- // All impls were inlined above
- cstore::DlImpl(..) => {}
- cstore::DlField => panic!("unimplemented field"),
}
}
}
use syntax_pos::{self, DUMMY_SP, Pos};
use rustc_trans::back::link;
-use rustc::middle::cstore;
use rustc::middle::privacy::AccessLevels;
use rustc::middle::resolve_lifetime::DefRegion::*;
use rustc::hir::def::Def;
-use rustc::hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX};
-use rustc::hir::fold::Folder;
+use rustc::hir::def_id::{self, DefId, DefIndex, CRATE_DEF_INDEX};
use rustc::hir::print as pprust;
use rustc::ty::subst::Substs;
use rustc::ty::{self, AdtKind};
pub name: String,
pub src: PathBuf,
pub module: Option<Item>,
- pub externs: Vec<(ast::CrateNum, ExternalCrate)>,
+ pub externs: Vec<(def_id::CrateNum, ExternalCrate)>,
pub primitives: Vec<PrimitiveType>,
pub access_levels: Arc<AccessLevels<DefId>>,
// These are later on moved into `CACHEKEY`, leaving the map empty.
pub external_traits: FnvHashMap<DefId, Trait>,
}
-struct CrateNum(ast::CrateNum);
+struct CrateNum(def_id::CrateNum);
impl<'a, 'tcx> Clean<Crate> for visit_ast::RustdocVisitor<'a, 'tcx> {
fn clean(&self, cx: &DocContext) -> Crate {
impl Clean<ExternalCrate> for CrateNum {
fn clean(&self, cx: &DocContext) -> ExternalCrate {
let mut primitives = Vec::new();
+ let root = DefId { krate: self.0, index: CRATE_DEF_INDEX };
cx.tcx_opt().map(|tcx| {
- for item in tcx.sess.cstore.crate_top_level_items(self.0) {
- let did = match item.def {
- cstore::DlDef(Def::Mod(did)) => did,
- _ => continue
- };
- let attrs = inline::load_attrs(cx, tcx, did);
+ for item in tcx.sess.cstore.item_children(root) {
+ let attrs = inline::load_attrs(cx, tcx, item.def_id);
PrimitiveType::find(&attrs).map(|prim| primitives.push(prim));
}
});
ExternalCrate {
name: (&cx.sess().cstore.crate_name(self.0)[..]).to_owned(),
- attrs: cx.sess().cstore.crate_attrs(self.0).clean(cx),
+ attrs: cx.sess().cstore.item_attrs(root).clean(cx),
primitives: primitives,
}
}
}
impl Clean<Lifetime> for hir::Lifetime {
- fn clean(&self, _: &DocContext) -> Lifetime {
+ fn clean(&self, cx: &DocContext) -> Lifetime {
+ if let Some(tcx) = cx.tcx_opt() {
+ let def = tcx.named_region_map.defs.get(&self.id).cloned();
+ match def {
+ Some(DefEarlyBoundRegion(_, node_id)) |
+ Some(DefLateBoundRegion(_, node_id)) |
+ Some(DefFreeRegion(_, node_id)) => {
+ if let Some(lt) = cx.lt_substs.borrow().get(&node_id).cloned() {
+ return lt;
+ }
+ }
+ _ => {}
+ }
+ }
Lifetime(self.name.to_string())
}
}
let mut names = if cx.map.as_local_node_id(did).is_some() {
vec![].into_iter()
} else {
- cx.tcx().sess.cstore.method_arg_names(did).into_iter()
+ cx.tcx().sess.cstore.fn_arg_names(did).into_iter()
}.peekable();
FnDecl {
output: Return(sig.0.output.clean(cx)),
values: sig.0.inputs.iter().map(|t| {
Argument {
type_: t.clean(cx),
- id: 0,
- name: names.next().unwrap_or("".to_string()),
+ id: ast::CRATE_NODE_ID,
+ name: names.next().map_or("".to_string(), |name| name.to_string()),
}
}).collect(),
},
}
}
-// Poor man's type parameter substitution at HIR level.
-// Used to replace private type aliases in public signatures with their aliased types.
-struct SubstAlias<'a, 'tcx: 'a> {
- tcx: &'a ty::TyCtxt<'a, 'tcx, 'tcx>,
- // Table type parameter definition -> substituted type
- ty_substs: FnvHashMap<Def, hir::Ty>,
- // Table node id of lifetime parameter definition -> substituted lifetime
- lt_substs: FnvHashMap<ast::NodeId, hir::Lifetime>,
-}
-
-impl<'a, 'tcx: 'a, 'b: 'tcx> Folder for SubstAlias<'a, 'tcx> {
- fn fold_ty(&mut self, ty: P<hir::Ty>) -> P<hir::Ty> {
- if let hir::TyPath(..) = ty.node {
- let def = self.tcx.expect_def(ty.id);
- if let Some(new_ty) = self.ty_substs.get(&def).cloned() {
- return P(new_ty);
- }
- }
- hir::fold::noop_fold_ty(ty, self)
- }
- fn fold_lifetime(&mut self, lt: hir::Lifetime) -> hir::Lifetime {
- let def = self.tcx.named_region_map.defs.get(<.id).cloned();
- match def {
- Some(DefEarlyBoundRegion(_, node_id)) |
- Some(DefLateBoundRegion(_, node_id)) |
- Some(DefFreeRegion(_, node_id)) => {
- if let Some(lt) = self.lt_substs.get(&node_id).cloned() {
- return lt;
- }
- }
- _ => {}
- }
- hir::fold::noop_fold_lifetime(lt, self)
- }
-}
-
impl Clean<Type> for hir::Ty {
fn clean(&self, cx: &DocContext) -> Type {
use rustc::hir::*;
},
TyTup(ref tys) => Tuple(tys.clean(cx)),
TyPath(None, ref path) => {
- if let Some(tcx) = cx.tcx_opt() {
- // Substitute private type aliases
- let def = tcx.expect_def(self.id);
+ let tcx_and_def = cx.tcx_opt().map(|tcx| (tcx, tcx.expect_def(self.id)));
+ if let Some((_, def)) = tcx_and_def {
+ if let Some(new_ty) = cx.ty_substs.borrow().get(&def).cloned() {
+ return new_ty;
+ }
+ }
+
+ let tcx_and_alias = tcx_and_def.and_then(|(tcx, def)| {
if let Def::TyAlias(def_id) = def {
- if let Some(node_id) = tcx.map.as_local_node_id(def_id) {
+ // Substitute private type aliases
+ tcx.map.as_local_node_id(def_id).and_then(|node_id| {
if !cx.access_levels.borrow().is_exported(def_id) {
- let item = tcx.map.expect_item(node_id);
- if let hir::ItemTy(ref ty, ref generics) = item.node {
- let provided_params = &path.segments.last().unwrap().parameters;
- let mut ty_substs = FnvHashMap();
- let mut lt_substs = FnvHashMap();
- for (i, ty_param) in generics.ty_params.iter().enumerate() {
- let ty_param_def = tcx.expect_def(ty_param.id);
- if let Some(ty) = provided_params.types().get(i).cloned()
- .cloned() {
- ty_substs.insert(ty_param_def, ty.unwrap());
- } else if let Some(default) = ty_param.default.clone() {
- ty_substs.insert(ty_param_def, default.unwrap());
- }
- }
- for (i, lt_param) in generics.lifetimes.iter().enumerate() {
- if let Some(lt) = provided_params.lifetimes().get(i)
- .cloned()
- .cloned() {
- lt_substs.insert(lt_param.lifetime.id, lt);
- }
- }
- let mut subst_alias = SubstAlias {
- tcx: &tcx,
- ty_substs: ty_substs,
- lt_substs: lt_substs
- };
- return subst_alias.fold_ty(ty.clone()).clean(cx);
- }
+ Some((tcx, &tcx.map.expect_item(node_id).node))
+ } else {
+ None
}
+ })
+ } else {
+ None
+ }
+ });
+ if let Some((tcx, &hir::ItemTy(ref ty, ref generics))) = tcx_and_alias {
+ let provided_params = &path.segments.last().unwrap().parameters;
+ let mut ty_substs = FnvHashMap();
+ let mut lt_substs = FnvHashMap();
+ for (i, ty_param) in generics.ty_params.iter().enumerate() {
+ let ty_param_def = tcx.expect_def(ty_param.id);
+ if let Some(ty) = provided_params.types().get(i).cloned()
+ .cloned() {
+ ty_substs.insert(ty_param_def, ty.unwrap().clean(cx));
+ } else if let Some(default) = ty_param.default.clone() {
+ ty_substs.insert(ty_param_def, default.unwrap().clean(cx));
+ }
+ }
+ for (i, lt_param) in generics.lifetimes.iter().enumerate() {
+ if let Some(lt) = provided_params.lifetimes().get(i).cloned()
+ .cloned() {
+ lt_substs.insert(lt_param.lifetime.id, lt.clean(cx));
}
}
+ return cx.enter_alias(ty_substs, lt_substs, || ty.clean(cx));
}
resolve_type(cx, path.clean(cx), self.id)
}
type_params: Vec::new(),
where_predicates: Vec::new()
},
- decl: (cx.map.local_def_id(0), &fty.sig).clean(cx),
+ decl: (cx.map.local_def_id(ast::CRATE_NODE_ID), &fty.sig).clean(cx),
abi: fty.abi,
}),
ty::TyAdt(def, substs) => {
impl<'tcx> Clean<Item> for ty::FieldDefData<'tcx, 'static> {
fn clean(&self, cx: &DocContext) -> Item {
- // FIXME: possible O(n^2)-ness! Not my fault.
- let attr_map = cx.tcx().sess.cstore.crate_struct_field_attrs(self.did.krate);
Item {
name: Some(self.name).clean(cx),
- attrs: attr_map.get(&self.did).unwrap_or(&Vec::new()).clean(cx),
+ attrs: cx.tcx().get_attrs(self.did).clean(cx),
source: Span::empty(),
visibility: self.vis.clean(cx),
stability: get_stability(cx, self.did),
name: None,
attrs: self.attrs.clean(cx),
source: self.whence.clean(cx),
- def_id: cx.map.local_def_id(0),
+ def_id: cx.map.local_def_id(ast::CRATE_NODE_ID),
visibility: self.vis.clean(cx),
stability: None,
deprecation: None,
fn register_def(cx: &DocContext, def: Def) -> DefId {
debug!("register_def({:?})", def);
+ let tcx = cx.tcx();
+
let (did, kind) = match def {
Def::Fn(i) => (i, TypeFunction),
Def::TyAlias(i) => (i, TypeTypedef),
Def::Union(i) => (i, TypeUnion),
Def::Mod(i) => (i, TypeModule),
Def::Static(i, _) => (i, TypeStatic),
- Def::Variant(i, _) => (i, TypeEnum),
+ Def::Variant(i) => (tcx.parent_def_id(i).unwrap(), TypeEnum),
Def::SelfTy(Some(def_id), _) => (def_id, TypeTrait),
- Def::SelfTy(_, Some(impl_id)) => {
- // For Def::SelfTy() values inlined from another crate, the
- // impl_id will be DUMMY_NODE_ID, which would cause problems.
- // But we should never run into an impl from another crate here.
- return cx.map.local_def_id(impl_id)
+ Def::SelfTy(_, Some(impl_def_id)) => {
+ return impl_def_id
}
_ => return def.def_id()
};
if did.is_local() { return did }
- let tcx = match cx.tcx_opt() {
- Some(tcx) => tcx,
- None => return did
- };
inline::record_extern_fqn(cx, did, kind);
if let TypeTrait = kind {
let t = inline::build_external_trait(cx, tcx, did);
use rustc::dep_graph::DepGraph;
use rustc::session::{self, config};
use rustc::hir::def_id::DefId;
+use rustc::hir::def::Def;
use rustc::middle::privacy::AccessLevels;
use rustc::ty::{self, TyCtxt};
use rustc::hir::map as hir_map;
use rustc::lint;
-use rustc::util::nodemap::{FnvHashMap, FnvHashSet};
+use rustc::util::nodemap::FnvHashMap;
use rustc_trans::back::link;
use rustc_resolve as resolve;
use rustc_metadata::cstore::CStore;
use errors::emitter::ColorConfig;
use std::cell::{RefCell, Cell};
+use std::mem;
use std::rc::Rc;
+use std::path::PathBuf;
use visit_ast::RustdocVisitor;
use clean;
pub map: &'a hir_map::Map<'tcx>,
pub maybe_typed: MaybeTyped<'a, 'tcx>,
pub input: Input,
- pub populated_crate_impls: RefCell<FnvHashSet<ast::CrateNum>>,
+ pub populated_all_crate_impls: Cell<bool>,
pub deref_trait_did: Cell<Option<DefId>>,
pub deref_mut_trait_did: Cell<Option<DefId>>,
// Note that external items for which `doc(hidden)` applies to are shown as
pub renderinfo: RefCell<RenderInfo>,
/// Later on moved through `clean::Crate` into `html::render::CACHE_KEY`
pub external_traits: RefCell<FnvHashMap<DefId, clean::Trait>>,
+
+ // The current set of type and lifetime substitutions,
+ // for expanding type aliases at the HIR level:
+
+ /// Table type parameter definition -> substituted type
+ pub ty_substs: RefCell<FnvHashMap<Def, clean::Type>>,
+ /// Table node id of lifetime parameter definition -> substituted lifetime
+ pub lt_substs: RefCell<FnvHashMap<ast::NodeId, clean::Lifetime>>,
}
impl<'b, 'tcx> DocContext<'b, 'tcx> {
let tcx_opt = self.tcx_opt();
tcx_opt.expect("tcx not present")
}
+
+ /// Call the closure with the given parameters set as
+ /// the substitutions for a type alias' RHS.
+ pub fn enter_alias<F, R>(&self,
+ ty_substs: FnvHashMap<Def, clean::Type>,
+ lt_substs: FnvHashMap<ast::NodeId, clean::Lifetime>,
+ f: F) -> R
+ where F: FnOnce() -> R {
+ let (old_tys, old_lts) =
+ (mem::replace(&mut *self.ty_substs.borrow_mut(), ty_substs),
+ mem::replace(&mut *self.lt_substs.borrow_mut(), lt_substs));
+ let r = f();
+ *self.ty_substs.borrow_mut() = old_tys;
+ *self.lt_substs.borrow_mut() = old_lts;
+ r
+ }
}
pub trait DocAccessLevels {
cfgs: Vec<String>,
externs: config::Externs,
input: Input,
- triple: Option<String>) -> (clean::Crate, RenderInfo)
+ triple: Option<String>,
+ maybe_sysroot: Option<PathBuf>) -> (clean::Crate, RenderInfo)
{
// Parse, resolve, and typecheck the given crate.
let warning_lint = lint::builtin::WARNINGS.name_lower();
let sessopts = config::Options {
- maybe_sysroot: None,
+ maybe_sysroot: maybe_sysroot,
search_paths: search_paths,
crate_types: vec!(config::CrateTypeRlib),
lint_opts: vec!((warning_lint, lint::Allow)),
map: &tcx.map,
maybe_typed: Typed(tcx),
input: input,
- populated_crate_impls: RefCell::new(FnvHashSet()),
+ populated_all_crate_impls: Cell::new(false),
deref_trait_did: Cell::new(None),
deref_mut_trait_did: Cell::new(None),
access_levels: RefCell::new(access_levels),
- external_traits: RefCell::new(FnvHashMap()),
- renderinfo: RefCell::new(Default::default()),
+ external_traits: Default::default(),
+ renderinfo: Default::default(),
+ ty_substs: Default::default(),
+ lt_substs: Default::default(),
};
debug!("crate: {:?}", ctxt.map.krate());
use syntax_pos::{self, Span};
use rustc::hir;
+use rustc::hir::def_id::CrateNum;
pub struct Module {
pub name: Option<Name>,
pub fn new(name: Option<Name>) -> Module {
Module {
name : name,
- id: 0,
+ id: ast::CRATE_NODE_ID,
vis: hir::Inherited,
stab: None,
depr: None,
pub struct ExternCrate {
pub name: Name,
- pub cnum: ast::CrateNum,
+ pub cnum: CrateNum,
pub path: Option<String>,
pub vis: hir::Visibility,
pub attrs: hir::HirVec<ast::Attribute>,
use std::fmt;
use std::iter::repeat;
-use rustc::middle::cstore::LOCAL_CRATE;
-use rustc::hir::def_id::DefId;
+use rustc::hir::def_id::{DefId, LOCAL_CRATE};
use syntax::abi::Abi;
use rustc::hir;
//! the `render_inner_with_highlighting` or `render_with_highlighting`
//! functions. For more advanced use cases (if you want to supply your own css
//! classes or control how the HTML is generated, or even generate something
-//! other then HTML), then you should implement the the `Writer` trait and use a
+//! other then HTML), then you should implement the `Writer` trait and use a
//! `Classifier`.
use html::escape::Escape;
#![allow(non_camel_case_types)]
use libc;
-use rustc::session::config::get_unstable_features_setting;
use std::ascii::AsciiExt;
use std::cell::RefCell;
use std::default::Default;
let mut data = LangString::all_false();
let mut allow_compile_fail = false;
let mut allow_error_code_check = false;
- match get_unstable_features_setting() {
- UnstableFeatures::Allow | UnstableFeatures::Cheat => {
- allow_compile_fail = true;
- allow_error_code_check = true;
- }
- _ => {},
- };
+ if UnstableFeatures::from_environment().is_nightly_build() {
+ allow_compile_fail = true;
+ allow_error_code_check = true;
+ }
let tokens = string.split(|c: char|
!(c == '_' || c == '-' || c.is_alphanumeric())
use externalfiles::ExternalHtml;
use serialize::json::{ToJson, Json, as_json};
-use syntax::{abi, ast};
+use syntax::abi;
use syntax::feature_gate::UnstableFeatures;
-use rustc::middle::cstore::LOCAL_CRATE;
-use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
+use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, LOCAL_CRATE};
use rustc::middle::privacy::AccessLevels;
use rustc::middle::stability;
-use rustc::session::config::get_unstable_features_setting;
use rustc::hir;
use rustc::util::nodemap::{FnvHashMap, FnvHashSet};
use rustc_data_structures::flock;
pub implementors: FnvHashMap<DefId, Vec<Implementor>>,
/// Cache of where external crate documentation can be found.
- pub extern_locations: FnvHashMap<ast::CrateNum, (String, ExternalLocation)>,
+ pub extern_locations: FnvHashMap<CrateNum, (String, ExternalLocation)>,
/// Cache of where documentation for primitives can be found.
- pub primitive_locations: FnvHashMap<clean::PrimitiveType, ast::CrateNum>,
+ pub primitive_locations: FnvHashMap<clean::PrimitiveType, CrateNum>,
// Note that external items for which `doc(hidden)` applies to are shown as
// non-reachable while local items aren't. This is because we're reusing
fn item_function(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
f: &clean::Function) -> fmt::Result {
// FIXME(#24111): remove when `const_fn` is stabilized
- let vis_constness = match get_unstable_features_setting() {
+ let vis_constness = match UnstableFeatures::from_environment() {
UnstableFeatures::Allow => f.constness,
_ => hir::Constness::NotConst
};
}
};
// FIXME(#24111): remove when `const_fn` is stabilized
- let vis_constness = match get_unstable_features_setting() {
+ let vis_constness = match UnstableFeatures::from_environment() {
UnstableFeatures::Allow => constness,
_ => hir::Constness::NotConst
};
+@import "normalize.css";
+
/**
* Copyright 2013 The Rust Project Developers. See the COPYRIGHT
* file at the top-level directory of this distribution and at
src: local('Source Code Pro Semibold'), url("SourceCodePro-Semibold.woff") format('woff');
}
-@import "normalize.css";
-
* {
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
font-size: 21px;
}
-h4 > code, h3 > code, invisible > code {
+h4 > code, h3 > code, .invisible > code {
position: inherit;
}
font-size: 90%;
}
+/* Shift where in trait listing down a line */
+pre.trait .where::before {
+ content: '\a ';
+}
+
nav {
border-bottom: 1px solid;
padding-bottom: 10px;
.content .search-results td:first-child { padding-right: 0; }
.content .search-results td:first-child a { padding-right: 10px; }
-tr.result span.primitive::after { content: ' (primitive type)'; font-style: italic; color: black}
+tr.result span.primitive::after { content: ' (primitive type)'; font-style: italic; color: black;
+}
body.blur > :not(#help) {
filter: blur(8px);
border-bottom-color: #000;
}
nav.main .separator {
- border-color: 1px solid #000;
+ border: 1px solid #000;
}
a {
color: #000;
use clean::Attributes;
-type Pass = (&'static str, // name
- fn(clean::Crate) -> plugins::PluginResult, // fn
- &'static str); // description
-
-const PASSES: &'static [Pass] = &[
- ("strip-hidden", passes::strip_hidden,
- "strips all doc(hidden) items from the output"),
- ("unindent-comments", passes::unindent_comments,
- "removes excess indentation on comments in order for markdown to like it"),
- ("collapse-docs", passes::collapse_docs,
- "concatenates all document attributes into one document attribute"),
- ("strip-private", passes::strip_private,
- "strips all private items from a crate which cannot be seen externally, \
- implies strip-priv-imports"),
- ("strip-priv-imports", passes::strip_priv_imports,
- "strips all private import statements (`use`, `extern crate`) from a crate"),
-];
-
-const DEFAULT_PASSES: &'static [&'static str] = &[
- "strip-hidden",
- "strip-private",
- "collapse-docs",
- "unindent-comments",
-];
-
struct Output {
krate: clean::Crate,
renderinfo: html::render::RenderInfo,
}
pub fn main() {
- const STACK_SIZE: usize = 32000000; // 32MB
+ const STACK_SIZE: usize = 32_000_000; // 32MB
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
let s = env::args().collect::<Vec<_>>();
main_args(&s)
own theme", "PATH")),
unstable(optmulti("Z", "",
"internal and debugging options (only on nightly build)", "FLAG")),
+ stable(optopt("", "sysroot", "Override the system root", "PATH")),
)
}
if matches.opt_strs("passes") == ["list"] {
println!("Available passes for running rustdoc:");
- for &(name, _, description) in PASSES {
+ for &(name, _, description) in passes::PASSES {
println!("{:>20} - {}", name, description);
}
println!("\nDefault passes for rustdoc:");
- for &name in DEFAULT_PASSES {
+ for &name in passes::DEFAULT_PASSES {
println!("{:>20}", name);
}
return 0;
if matches.free.is_empty() {
println!("expected an input file to act on");
return 1;
- } if matches.free.len() > 1 {
+ }
+ if matches.free.len() > 1 {
println!("only one input file may be specified");
return 1;
}
}
let cfgs = matches.opt_strs("cfg");
let triple = matches.opt_str("target");
+ let maybe_sysroot = matches.opt_str("sysroot").map(PathBuf::from);
let cr = PathBuf::from(cratefile);
info!("starting to run rustc");
use rustc::session::config::Input;
tx.send(core::run_core(paths, cfgs, externs, Input::File(cr),
- triple)).unwrap();
+ triple, maybe_sysroot)).unwrap();
});
let (mut krate, renderinfo) = rx.recv().unwrap();
info!("finished with rustc");
}
if default_passes {
- for name in DEFAULT_PASSES.iter().rev() {
+ for name in passes::DEFAULT_PASSES.iter().rev() {
passes.insert(0, name.to_string());
}
}
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(PathBuf::from(path));
for pass in &passes {
- let plugin = match PASSES.iter()
- .position(|&(p, ..)| {
- p == *pass
- }) {
- Some(i) => PASSES[i].1,
+ let plugin = match passes::PASSES.iter()
+ .position(|&(p, ..)| {
+ p == *pass
+ }) {
+ Some(i) => passes::PASSES[i].1,
None => {
error!("unknown pass {}, skipping", *pass);
continue
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::hir::def_id::DefId;
-use rustc::middle::privacy::AccessLevels;
-use rustc::util::nodemap::DefIdSet;
-use std::cmp;
-use std::mem;
-use std::string::String;
-use std::usize;
-
-use clean::{self, Attributes, GetDefId};
-use clean::Item;
-use plugins;
-use fold;
-use fold::DocFolder;
-use fold::FoldItem::Strip;
-
-/// Strip items marked `#[doc(hidden)]`
-pub fn strip_hidden(krate: clean::Crate) -> plugins::PluginResult {
- let mut retained = DefIdSet();
-
- // strip all #[doc(hidden)] items
- let krate = {
- struct Stripper<'a> {
- retained: &'a mut DefIdSet,
- update_retained: bool,
- }
- impl<'a> fold::DocFolder for Stripper<'a> {
- fn fold_item(&mut self, i: Item) -> Option<Item> {
- if i.attrs.list("doc").has_word("hidden") {
- debug!("found one in strip_hidden; removing");
- // use a dedicated hidden item for given item type if any
- match i.inner {
- clean::StructFieldItem(..) | clean::ModuleItem(..) => {
- // We need to recurse into stripped modules to
- // strip things like impl methods but when doing so
- // we must not add any items to the `retained` set.
- let old = mem::replace(&mut self.update_retained, false);
- let ret = Strip(self.fold_item_recur(i).unwrap()).fold();
- self.update_retained = old;
- return ret;
- }
- _ => return None,
- }
- } else {
- if self.update_retained {
- self.retained.insert(i.def_id);
- }
- }
- self.fold_item_recur(i)
- }
- }
- let mut stripper = Stripper{ retained: &mut retained, update_retained: true };
- stripper.fold_crate(krate)
- };
-
- // strip all impls referencing stripped items
- let mut stripper = ImplStripper { retained: &retained };
- stripper.fold_crate(krate)
-}
-
-/// Strip private items from the point of view of a crate or externally from a
-/// crate, specified by the `xcrate` flag.
-pub fn strip_private(mut krate: clean::Crate) -> plugins::PluginResult {
- // This stripper collects all *retained* nodes.
- let mut retained = DefIdSet();
- let access_levels = krate.access_levels.clone();
-
- // strip all private items
- {
- let mut stripper = Stripper {
- retained: &mut retained,
- access_levels: &access_levels,
- update_retained: true,
- };
- krate = ImportStripper.fold_crate(stripper.fold_crate(krate));
- }
-
- // strip all impls referencing private items
- let mut stripper = ImplStripper { retained: &retained };
- stripper.fold_crate(krate)
-}
-
-struct Stripper<'a> {
- retained: &'a mut DefIdSet,
- access_levels: &'a AccessLevels<DefId>,
- update_retained: bool,
-}
-
-impl<'a> fold::DocFolder for Stripper<'a> {
- fn fold_item(&mut self, i: Item) -> Option<Item> {
- match i.inner {
- clean::StrippedItem(..) => {
- // We need to recurse into stripped modules to strip things
- // like impl methods but when doing so we must not add any
- // items to the `retained` set.
- let old = mem::replace(&mut self.update_retained, false);
- let ret = self.fold_item_recur(i);
- self.update_retained = old;
- return ret;
- }
- // These items can all get re-exported
- clean::TypedefItem(..) | clean::StaticItem(..) |
- clean::StructItem(..) | clean::EnumItem(..) |
- clean::TraitItem(..) | clean::FunctionItem(..) |
- clean::VariantItem(..) | clean::MethodItem(..) |
- clean::ForeignFunctionItem(..) | clean::ForeignStaticItem(..) |
- clean::ConstantItem(..) | clean::UnionItem(..) => {
- if i.def_id.is_local() {
- if !self.access_levels.is_exported(i.def_id) {
- return None;
- }
- }
- }
-
- clean::StructFieldItem(..) => {
- if i.visibility != Some(clean::Public) {
- return Strip(i).fold();
- }
- }
-
- clean::ModuleItem(..) => {
- if i.def_id.is_local() && i.visibility != Some(clean::Public) {
- let old = mem::replace(&mut self.update_retained, false);
- let ret = Strip(self.fold_item_recur(i).unwrap()).fold();
- self.update_retained = old;
- return ret;
- }
- }
-
- // handled in the `strip-priv-imports` pass
- clean::ExternCrateItem(..) | clean::ImportItem(..) => {}
-
- clean::DefaultImplItem(..) | clean::ImplItem(..) => {}
-
- // tymethods/macros have no control over privacy
- clean::MacroItem(..) | clean::TyMethodItem(..) => {}
-
- // Primitives are never stripped
- clean::PrimitiveItem(..) => {}
-
- // Associated consts and types are never stripped
- clean::AssociatedConstItem(..) |
- clean::AssociatedTypeItem(..) => {}
- }
-
- let fastreturn = match i.inner {
- // nothing left to do for traits (don't want to filter their
- // methods out, visibility controlled by the trait)
- clean::TraitItem(..) => true,
-
- // implementations of traits are always public.
- clean::ImplItem(ref imp) if imp.trait_.is_some() => true,
- // Struct variant fields have inherited visibility
- clean::VariantItem(clean::Variant {
- kind: clean::StructVariant(..)
- }) => true,
- _ => false,
- };
-
- let i = if fastreturn {
- if self.update_retained {
- self.retained.insert(i.def_id);
- }
- return Some(i);
- } else {
- self.fold_item_recur(i)
- };
-
- i.and_then(|i| {
- match i.inner {
- // emptied modules have no need to exist
- clean::ModuleItem(ref m)
- if m.items.is_empty() &&
- i.doc_value().is_none() => None,
- _ => {
- if self.update_retained {
- self.retained.insert(i.def_id);
- }
- Some(i)
- }
- }
- })
- }
-}
-
-// This stripper discards all impls which reference stripped items
-struct ImplStripper<'a> {
- retained: &'a DefIdSet
-}
-
-impl<'a> fold::DocFolder for ImplStripper<'a> {
- fn fold_item(&mut self, i: Item) -> Option<Item> {
- if let clean::ImplItem(ref imp) = i.inner {
- // emptied none trait impls can be stripped
- if imp.trait_.is_none() && imp.items.is_empty() {
- return None;
- }
- if let Some(did) = imp.for_.def_id() {
- if did.is_local() && !imp.for_.is_generic() &&
- !self.retained.contains(&did)
- {
- return None;
- }
- }
- if let Some(did) = imp.trait_.def_id() {
- if did.is_local() && !self.retained.contains(&did) {
- return None;
- }
- }
- }
- self.fold_item_recur(i)
- }
-}
-
-// This stripper discards all private import statements (`use`, `extern crate`)
-struct ImportStripper;
-impl fold::DocFolder for ImportStripper {
- fn fold_item(&mut self, i: Item) -> Option<Item> {
- match i.inner {
- clean::ExternCrateItem(..) |
- clean::ImportItem(..) if i.visibility != Some(clean::Public) => None,
- _ => self.fold_item_recur(i)
- }
- }
-}
-
-pub fn strip_priv_imports(krate: clean::Crate) -> plugins::PluginResult {
- ImportStripper.fold_crate(krate)
-}
-
-pub fn unindent_comments(krate: clean::Crate) -> plugins::PluginResult {
- struct CommentCleaner;
- impl fold::DocFolder for CommentCleaner {
- fn fold_item(&mut self, mut i: Item) -> Option<Item> {
- let mut avec: Vec<clean::Attribute> = Vec::new();
- for attr in &i.attrs {
- match attr {
- &clean::NameValue(ref x, ref s)
- if "doc" == *x => {
- avec.push(clean::NameValue("doc".to_string(),
- unindent(s)))
- }
- x => avec.push(x.clone())
- }
- }
- i.attrs = avec;
- self.fold_item_recur(i)
- }
- }
- let mut cleaner = CommentCleaner;
- let krate = cleaner.fold_crate(krate);
- krate
-}
-
-pub fn collapse_docs(krate: clean::Crate) -> plugins::PluginResult {
- struct Collapser;
- impl fold::DocFolder for Collapser {
- fn fold_item(&mut self, mut i: Item) -> Option<Item> {
- let mut docstr = String::new();
- for attr in &i.attrs {
- if let clean::NameValue(ref x, ref s) = *attr {
- if "doc" == *x {
- docstr.push_str(s);
- docstr.push('\n');
- }
- }
- }
- let mut a: Vec<clean::Attribute> = i.attrs.iter().filter(|&a| match a {
- &clean::NameValue(ref x, _) if "doc" == *x => false,
- _ => true
- }).cloned().collect();
- if !docstr.is_empty() {
- a.push(clean::NameValue("doc".to_string(), docstr));
- }
- i.attrs = a;
- self.fold_item_recur(i)
- }
- }
- let mut collapser = Collapser;
- let krate = collapser.fold_crate(krate);
- krate
-}
-
-pub fn unindent(s: &str) -> String {
- let lines = s.lines().collect::<Vec<&str> >();
- let mut saw_first_line = false;
- let mut saw_second_line = false;
- let min_indent = lines.iter().fold(usize::MAX, |min_indent, line| {
-
- // After we see the first non-whitespace line, look at
- // the line we have. If it is not whitespace, and therefore
- // part of the first paragraph, then ignore the indentation
- // level of the first line
- let ignore_previous_indents =
- saw_first_line &&
- !saw_second_line &&
- !line.chars().all(|c| c.is_whitespace());
-
- let min_indent = if ignore_previous_indents {
- usize::MAX
- } else {
- min_indent
- };
-
- if saw_first_line {
- saw_second_line = true;
- }
-
- if line.chars().all(|c| c.is_whitespace()) {
- min_indent
- } else {
- saw_first_line = true;
- let mut whitespace = 0;
- line.chars().all(|char| {
- // Compare against either space or tab, ignoring whether they
- // are mixed or not
- if char == ' ' || char == '\t' {
- whitespace += 1;
- true
- } else {
- false
- }
- });
- cmp::min(min_indent, whitespace)
- }
- });
-
- if !lines.is_empty() {
- let mut unindented = vec![ lines[0].trim().to_string() ];
- unindented.extend_from_slice(&lines[1..].iter().map(|&line| {
- if line.chars().all(|c| c.is_whitespace()) {
- line.to_string()
- } else {
- assert!(line.len() >= min_indent);
- line[min_indent..].to_string()
- }
- }).collect::<Vec<_>>());
- unindented.join("\n")
- } else {
- s.to_string()
- }
-}
-
-#[cfg(test)]
-mod unindent_tests {
- use super::unindent;
-
- #[test]
- fn should_unindent() {
- let s = " line1\n line2".to_string();
- let r = unindent(&s);
- assert_eq!(r, "line1\nline2");
- }
-
- #[test]
- fn should_unindent_multiple_paragraphs() {
- let s = " line1\n\n line2".to_string();
- let r = unindent(&s);
- assert_eq!(r, "line1\n\nline2");
- }
-
- #[test]
- fn should_leave_multiple_indent_levels() {
- // Line 2 is indented another level beyond the
- // base indentation and should be preserved
- let s = " line1\n\n line2".to_string();
- let r = unindent(&s);
- assert_eq!(r, "line1\n\n line2");
- }
-
- #[test]
- fn should_ignore_first_line_indent() {
- // The first line of the first paragraph may not be indented as
- // far due to the way the doc string was written:
- //
- // #[doc = "Start way over here
- // and continue here"]
- let s = "line1\n line2".to_string();
- let r = unindent(&s);
- assert_eq!(r, "line1\nline2");
- }
-
- #[test]
- fn should_not_ignore_first_line_indent_in_a_single_line_para() {
- let s = "line1\n\n line2".to_string();
- let r = unindent(&s);
- assert_eq!(r, "line1\n\n line2");
- }
-
- #[test]
- fn should_unindent_tabs() {
- let s = "\tline1\n\tline2".to_string();
- let r = unindent(&s);
- assert_eq!(r, "line1\nline2");
- }
-
- #[test]
- fn should_trim_mixed_indentation() {
- let s = "\t line1\n\t line2".to_string();
- let r = unindent(&s);
- assert_eq!(r, "line1\nline2");
-
- let s = " \tline1\n \tline2".to_string();
- let r = unindent(&s);
- assert_eq!(r, "line1\nline2");
- }
-}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::string::String;
+
+use clean::{self, Item};
+use plugins;
+use fold;
+use fold::DocFolder;
+
+pub fn collapse_docs(krate: clean::Crate) -> plugins::PluginResult {
+ let mut collapser = Collapser;
+ let krate = collapser.fold_crate(krate);
+ krate
+}
+
+struct Collapser;
+
+impl fold::DocFolder for Collapser {
+ fn fold_item(&mut self, mut i: Item) -> Option<Item> {
+ let mut docstr = String::new();
+ for attr in &i.attrs {
+ if let clean::NameValue(ref x, ref s) = *attr {
+ if "doc" == *x {
+ docstr.push_str(s);
+ docstr.push('\n');
+ }
+ }
+ }
+ let mut a: Vec<clean::Attribute> = i.attrs.iter().filter(|&a| match a {
+ &clean::NameValue(ref x, _) if "doc" == *x => false,
+ _ => true
+ }).cloned().collect();
+ if !docstr.is_empty() {
+ a.push(clean::NameValue("doc".to_string(), docstr));
+ }
+ i.attrs = a;
+ self.fold_item_recur(i)
+ }
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::hir::def_id::DefId;
+use rustc::middle::privacy::AccessLevels;
+use rustc::util::nodemap::DefIdSet;
+use std::mem;
+
+use clean::{self, GetDefId, Item};
+use fold;
+use fold::FoldItem::Strip;
+use plugins;
+
+mod collapse_docs;
+pub use self::collapse_docs::collapse_docs;
+
+mod strip_hidden;
+pub use self::strip_hidden::strip_hidden;
+
+mod strip_private;
+pub use self::strip_private::strip_private;
+
+mod strip_priv_imports;
+pub use self::strip_priv_imports::strip_priv_imports;
+
+mod unindent_comments;
+pub use self::unindent_comments::unindent_comments;
+
+type Pass = (&'static str, // name
+ fn(clean::Crate) -> plugins::PluginResult, // fn
+ &'static str); // description
+
+pub const PASSES: &'static [Pass] = &[
+ ("strip-hidden", strip_hidden,
+ "strips all doc(hidden) items from the output"),
+ ("unindent-comments", unindent_comments,
+ "removes excess indentation on comments in order for markdown to like it"),
+ ("collapse-docs", collapse_docs,
+ "concatenates all document attributes into one document attribute"),
+ ("strip-private", strip_private,
+ "strips all private items from a crate which cannot be seen externally, \
+ implies strip-priv-imports"),
+ ("strip-priv-imports", strip_priv_imports,
+ "strips all private import statements (`use`, `extern crate`) from a crate"),
+];
+
+pub const DEFAULT_PASSES: &'static [&'static str] = &[
+ "strip-hidden",
+ "strip-private",
+ "collapse-docs",
+ "unindent-comments",
+];
+
+
+struct Stripper<'a> {
+ retained: &'a mut DefIdSet,
+ access_levels: &'a AccessLevels<DefId>,
+ update_retained: bool,
+}
+
+impl<'a> fold::DocFolder for Stripper<'a> {
+ fn fold_item(&mut self, i: Item) -> Option<Item> {
+ match i.inner {
+ clean::StrippedItem(..) => {
+ // We need to recurse into stripped modules to strip things
+ // like impl methods but when doing so we must not add any
+ // items to the `retained` set.
+ let old = mem::replace(&mut self.update_retained, false);
+ let ret = self.fold_item_recur(i);
+ self.update_retained = old;
+ return ret;
+ }
+ // These items can all get re-exported
+ clean::TypedefItem(..) | clean::StaticItem(..) |
+ clean::StructItem(..) | clean::EnumItem(..) |
+ clean::TraitItem(..) | clean::FunctionItem(..) |
+ clean::VariantItem(..) | clean::MethodItem(..) |
+ clean::ForeignFunctionItem(..) | clean::ForeignStaticItem(..) |
+ clean::ConstantItem(..) | clean::UnionItem(..) => {
+ if i.def_id.is_local() {
+ if !self.access_levels.is_exported(i.def_id) {
+ return None;
+ }
+ }
+ }
+
+ clean::StructFieldItem(..) => {
+ if i.visibility != Some(clean::Public) {
+ return Strip(i).fold();
+ }
+ }
+
+ clean::ModuleItem(..) => {
+ if i.def_id.is_local() && i.visibility != Some(clean::Public) {
+ let old = mem::replace(&mut self.update_retained, false);
+ let ret = Strip(self.fold_item_recur(i).unwrap()).fold();
+ self.update_retained = old;
+ return ret;
+ }
+ }
+
+ // handled in the `strip-priv-imports` pass
+ clean::ExternCrateItem(..) | clean::ImportItem(..) => {}
+
+ clean::DefaultImplItem(..) | clean::ImplItem(..) => {}
+
+ // tymethods/macros have no control over privacy
+ clean::MacroItem(..) | clean::TyMethodItem(..) => {}
+
+ // Primitives are never stripped
+ clean::PrimitiveItem(..) => {}
+
+ // Associated consts and types are never stripped
+ clean::AssociatedConstItem(..) |
+ clean::AssociatedTypeItem(..) => {}
+ }
+
+ let fastreturn = match i.inner {
+ // nothing left to do for traits (don't want to filter their
+ // methods out, visibility controlled by the trait)
+ clean::TraitItem(..) => true,
+
+ // implementations of traits are always public.
+ clean::ImplItem(ref imp) if imp.trait_.is_some() => true,
+ // Struct variant fields have inherited visibility
+ clean::VariantItem(clean::Variant {
+ kind: clean::StructVariant(..)
+ }) => true,
+ _ => false,
+ };
+
+ let i = if fastreturn {
+ if self.update_retained {
+ self.retained.insert(i.def_id);
+ }
+ return Some(i);
+ } else {
+ self.fold_item_recur(i)
+ };
+
+ i.and_then(|i| {
+ match i.inner {
+ // emptied modules have no need to exist
+ clean::ModuleItem(ref m)
+ if m.items.is_empty() &&
+ i.doc_value().is_none() => None,
+ _ => {
+ if self.update_retained {
+ self.retained.insert(i.def_id);
+ }
+ Some(i)
+ }
+ }
+ })
+ }
+}
+
+// This stripper discards all impls which reference stripped items
+struct ImplStripper<'a> {
+ retained: &'a DefIdSet
+}
+
+impl<'a> fold::DocFolder for ImplStripper<'a> {
+ fn fold_item(&mut self, i: Item) -> Option<Item> {
+ if let clean::ImplItem(ref imp) = i.inner {
+ // emptied none trait impls can be stripped
+ if imp.trait_.is_none() && imp.items.is_empty() {
+ return None;
+ }
+ if let Some(did) = imp.for_.def_id() {
+ if did.is_local() && !imp.for_.is_generic() &&
+ !self.retained.contains(&did)
+ {
+ return None;
+ }
+ }
+ if let Some(did) = imp.trait_.def_id() {
+ if did.is_local() && !self.retained.contains(&did) {
+ return None;
+ }
+ }
+ }
+ self.fold_item_recur(i)
+ }
+}
+
+// This stripper discards all private import statements (`use`, `extern crate`)
+struct ImportStripper;
+impl fold::DocFolder for ImportStripper {
+ fn fold_item(&mut self, i: Item) -> Option<Item> {
+ match i.inner {
+ clean::ExternCrateItem(..) |
+ clean::ImportItem(..) if i.visibility != Some(clean::Public) => None,
+ _ => self.fold_item_recur(i)
+ }
+ }
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::util::nodemap::DefIdSet;
+use std::mem;
+
+use clean::{self, Attributes};
+use clean::Item;
+use plugins;
+use fold;
+use fold::DocFolder;
+use fold::FoldItem::Strip;
+use passes::ImplStripper;
+
+/// Strip items marked `#[doc(hidden)]`
+pub fn strip_hidden(krate: clean::Crate) -> plugins::PluginResult {
+ let mut retained = DefIdSet();
+
+ // strip all #[doc(hidden)] items
+ let krate = {
+ let mut stripper = Stripper{ retained: &mut retained, update_retained: true };
+ stripper.fold_crate(krate)
+ };
+
+ // strip all impls referencing stripped items
+ let mut stripper = ImplStripper { retained: &retained };
+ stripper.fold_crate(krate)
+}
+
+struct Stripper<'a> {
+ retained: &'a mut DefIdSet,
+ update_retained: bool,
+}
+
+impl<'a> fold::DocFolder for Stripper<'a> {
+ fn fold_item(&mut self, i: Item) -> Option<Item> {
+ if i.attrs.list("doc").has_word("hidden") {
+ debug!("found one in strip_hidden; removing");
+ // use a dedicated hidden item for given item type if any
+ match i.inner {
+ clean::StructFieldItem(..) | clean::ModuleItem(..) => {
+ // We need to recurse into stripped modules to
+ // strip things like impl methods but when doing so
+ // we must not add any items to the `retained` set.
+ let old = mem::replace(&mut self.update_retained, false);
+ let ret = Strip(self.fold_item_recur(i).unwrap()).fold();
+ self.update_retained = old;
+ return ret;
+ }
+ _ => return None,
+ }
+ } else {
+ if self.update_retained {
+ self.retained.insert(i.def_id);
+ }
+ }
+ self.fold_item_recur(i)
+ }
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use clean;
+use fold::DocFolder;
+use plugins;
+use passes::ImportStripper;
+
+pub fn strip_priv_imports(krate: clean::Crate) -> plugins::PluginResult {
+ ImportStripper.fold_crate(krate)
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::util::nodemap::DefIdSet;
+
+use clean;
+use plugins;
+use fold::DocFolder;
+use passes::{ImplStripper, ImportStripper, Stripper};
+
+/// Strip private items from the point of view of a crate or externally from a
+/// crate, specified by the `xcrate` flag.
+pub fn strip_private(mut krate: clean::Crate) -> plugins::PluginResult {
+ // This stripper collects all *retained* nodes.
+ let mut retained = DefIdSet();
+ let access_levels = krate.access_levels.clone();
+
+ // strip all private items
+ {
+ let mut stripper = Stripper {
+ retained: &mut retained,
+ access_levels: &access_levels,
+ update_retained: true,
+ };
+ krate = ImportStripper.fold_crate(stripper.fold_crate(krate));
+ }
+
+ // strip all impls referencing private items
+ let mut stripper = ImplStripper { retained: &retained };
+ stripper.fold_crate(krate)
+}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::cmp;
+use std::string::String;
+use std::usize;
+
+use clean::{self, Item};
+use plugins;
+use fold::{self, DocFolder};
+
+pub fn unindent_comments(krate: clean::Crate) -> plugins::PluginResult {
+ let mut cleaner = CommentCleaner;
+ let krate = cleaner.fold_crate(krate);
+ krate
+}
+
+struct CommentCleaner;
+
+impl fold::DocFolder for CommentCleaner {
+ fn fold_item(&mut self, mut i: Item) -> Option<Item> {
+ let mut avec: Vec<clean::Attribute> = Vec::new();
+ for attr in &i.attrs {
+ match attr {
+ &clean::NameValue(ref x, ref s)
+ if "doc" == *x => {
+ avec.push(clean::NameValue("doc".to_string(),
+ unindent(s)))
+ }
+ x => avec.push(x.clone())
+ }
+ }
+ i.attrs = avec;
+ self.fold_item_recur(i)
+ }
+}
+
+fn unindent(s: &str) -> String {
+ let lines = s.lines().collect::<Vec<&str> >();
+ let mut saw_first_line = false;
+ let mut saw_second_line = false;
+ let min_indent = lines.iter().fold(usize::MAX, |min_indent, line| {
+
+ // After we see the first non-whitespace line, look at
+ // the line we have. If it is not whitespace, and therefore
+ // part of the first paragraph, then ignore the indentation
+ // level of the first line
+ let ignore_previous_indents =
+ saw_first_line &&
+ !saw_second_line &&
+ !line.chars().all(|c| c.is_whitespace());
+
+ let min_indent = if ignore_previous_indents {
+ usize::MAX
+ } else {
+ min_indent
+ };
+
+ if saw_first_line {
+ saw_second_line = true;
+ }
+
+ if line.chars().all(|c| c.is_whitespace()) {
+ min_indent
+ } else {
+ saw_first_line = true;
+ let mut whitespace = 0;
+ line.chars().all(|char| {
+ // Compare against either space or tab, ignoring whether they
+ // are mixed or not
+ if char == ' ' || char == '\t' {
+ whitespace += 1;
+ true
+ } else {
+ false
+ }
+ });
+ cmp::min(min_indent, whitespace)
+ }
+ });
+
+ if !lines.is_empty() {
+ let mut unindented = vec![ lines[0].trim().to_string() ];
+ unindented.extend_from_slice(&lines[1..].iter().map(|&line| {
+ if line.chars().all(|c| c.is_whitespace()) {
+ line.to_string()
+ } else {
+ assert!(line.len() >= min_indent);
+ line[min_indent..].to_string()
+ }
+ }).collect::<Vec<_>>());
+ unindented.join("\n")
+ } else {
+ s.to_string()
+ }
+}
+
+#[cfg(test)]
+mod unindent_tests {
+ use super::unindent;
+
+ #[test]
+ fn should_unindent() {
+ let s = " line1\n line2".to_string();
+ let r = unindent(&s);
+ assert_eq!(r, "line1\nline2");
+ }
+
+ #[test]
+ fn should_unindent_multiple_paragraphs() {
+ let s = " line1\n\n line2".to_string();
+ let r = unindent(&s);
+ assert_eq!(r, "line1\n\nline2");
+ }
+
+ #[test]
+ fn should_leave_multiple_indent_levels() {
+ // Line 2 is indented another level beyond the
+ // base indentation and should be preserved
+ let s = " line1\n\n line2".to_string();
+ let r = unindent(&s);
+ assert_eq!(r, "line1\n\n line2");
+ }
+
+ #[test]
+ fn should_ignore_first_line_indent() {
+ // The first line of the first paragraph may not be indented as
+ // far due to the way the doc string was written:
+ //
+ // #[doc = "Start way over here
+ // and continue here"]
+ let s = "line1\n line2".to_string();
+ let r = unindent(&s);
+ assert_eq!(r, "line1\nline2");
+ }
+
+ #[test]
+ fn should_not_ignore_first_line_indent_in_a_single_line_para() {
+ let s = "line1\n\n line2".to_string();
+ let r = unindent(&s);
+ assert_eq!(r, "line1\n\n line2");
+ }
+
+ #[test]
+ fn should_unindent_tabs() {
+ let s = "\tline1\n\tline2".to_string();
+ let r = unindent(&s);
+ assert_eq!(r, "line1\nline2");
+ }
+
+ #[test]
+ fn should_trim_mixed_indentation() {
+ let s = "\t line1\n\t line2".to_string();
+ let r = unindent(&s);
+ assert_eq!(r, "line1\nline2");
+
+ let s = " \tline1\n \tline2".to_string();
+ let r = unindent(&s);
+ assert_eq!(r, "line1\nline2");
+ }
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::cell::{RefCell, Cell};
+use std::cell::Cell;
use std::env;
use std::ffi::OsString;
use std::io::prelude::*;
use rustc::dep_graph::DepGraph;
use rustc::hir::map as hir_map;
use rustc::session::{self, config};
-use rustc::session::config::{get_unstable_features_setting, OutputType,
- OutputTypes, Externs};
+use rustc::session::config::{OutputType, OutputTypes, Externs};
use rustc::session::search_paths::{SearchPaths, PathKind};
-use rustc::util::nodemap::{FnvHashMap, FnvHashSet};
use rustc_back::dynamic_lib::DynamicLibrary;
use rustc_back::tempdir::TempDir;
use rustc_driver::{driver, Compilation};
use rustc_metadata::cstore::CStore;
use rustc_resolve::MakeGlobMap;
use syntax::codemap::CodeMap;
+use syntax::feature_gate::UnstableFeatures;
use errors;
use errors::emitter::ColorConfig;
search_paths: libs.clone(),
crate_types: vec!(config::CrateTypeDylib),
externs: externs.clone(),
- unstable_features: get_unstable_features_setting(),
+ unstable_features: UnstableFeatures::from_environment(),
..config::basic_options().clone()
};
map: &map,
maybe_typed: core::NotTyped(&sess),
input: input,
- external_traits: RefCell::new(FnvHashMap()),
- populated_crate_impls: RefCell::new(FnvHashSet()),
+ populated_all_crate_impls: Cell::new(false),
+ external_traits: Default::default(),
deref_trait_did: Cell::new(None),
deref_mut_trait_did: Cell::new(None),
access_levels: Default::default(),
renderinfo: Default::default(),
+ ty_substs: Default::default(),
+ lt_substs: Default::default(),
};
let mut v = RustdocVisitor::new(&ctx);
.. config::basic_codegen_options()
},
test: as_test_harness,
- unstable_features: get_unstable_features_setting(),
+ unstable_features: UnstableFeatures::from_environment(),
..config::basic_options().clone()
};
use rustc::hir::map as hir_map;
use rustc::hir::def::Def;
+use rustc::hir::def_id::LOCAL_CRATE;
use rustc::middle::privacy::AccessLevel;
use rustc::util::nodemap::FnvHashSet;
let cstore = &self.cx.sess().cstore;
om.extern_crates.push(ExternCrate {
cnum: cstore.extern_mod_stmt_cnum(item.id)
- .unwrap_or(ast::CrateNum::max_value()),
+ .unwrap_or(LOCAL_CRATE),
name: name,
path: p.map(|x|x.to_string()),
vis: item.vis.clone(),
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rustc::middle::cstore::{CrateStore, ChildItem, DefLike};
+use rustc::middle::cstore::CrateStore;
use rustc::middle::privacy::{AccessLevels, AccessLevel};
use rustc::hir::def::Def;
-use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
+use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId};
use rustc::ty::Visibility;
-use syntax::ast;
use std::cell::RefMut;
}
}
- pub fn visit_lib(&mut self, cnum: ast::CrateNum) {
+ pub fn visit_lib(&mut self, cnum: CrateNum) {
let did = DefId { krate: cnum, index: CRATE_DEF_INDEX };
self.update(did, Some(AccessLevel::Public));
self.visit_mod(did);
}
}
- pub fn visit_mod(&mut self, did: DefId) {
- for item in self.cstore.item_children(did) {
- if let DefLike::DlDef(def) = item.def {
- match def {
- Def::Mod(did) |
- Def::ForeignMod(did) |
- Def::Trait(did) |
- Def::Struct(did) |
- Def::Union(did) |
- Def::Enum(did) |
- Def::TyAlias(did) |
- Def::Fn(did) |
- Def::Method(did) |
- Def::Static(did, _) |
- Def::Const(did) => self.visit_item(did, item),
- _ => {}
- }
- }
+ pub fn visit_mod(&mut self, def_id: DefId) {
+ for item in self.cstore.item_children(def_id) {
+ self.visit_item(item.def_id);
}
}
- fn visit_item(&mut self, did: DefId, item: ChildItem) {
- let inherited_item_level = match item.def {
- DefLike::DlImpl(..) | DefLike::DlField => unreachable!(),
- DefLike::DlDef(def) => {
- match def {
- Def::ForeignMod(..) => self.prev_level,
- _ => if item.vis == Visibility::Public { self.prev_level } else { None }
- }
- }
+ fn visit_item(&mut self, def_id: DefId) {
+ let vis = self.cstore.visibility(def_id);
+ let inherited_item_level = if vis == Visibility::Public {
+ self.prev_level
+ } else {
+ None
};
- let item_level = self.update(did, inherited_item_level);
+ let item_level = self.update(def_id, inherited_item_level);
- if let DefLike::DlDef(Def::Mod(did)) = item.def {
+ if let Some(Def::Mod(_)) = self.cstore.describe_def(def_id) {
let orig_level = self.prev_level;
self.prev_level = item_level;
- self.visit_mod(did);
+ self.visit_mod(def_id);
self.prev_level = orig_level;
}
}
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
let mut bits = 0;
for item in self {
- bits |= item.to_usize();
+ bits |= 1 << item.to_usize();
}
s.emit_usize(bits)
}
let mut set = EnumSet::new();
for bit in 0..(mem::size_of::<usize>()*8) {
if bits & (1 << bit) != 0 {
- set.insert(CLike::from_usize(1 << bit));
+ set.insert(CLike::from_usize(bit));
}
}
Ok(set)
}
fn escape_char(writer: &mut fmt::Write, v: char) -> EncodeResult {
- escape_str(writer, unsafe {
- str::from_utf8_unchecked(v.encode_utf8().as_slice())
- })
+ escape_str(writer, v.encode_utf8(&mut [0; 4]))
}
fn spaces(wr: &mut fmt::Write, mut n: usize) -> EncodeResult {
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[inline]
+pub fn write_to_vec(vec: &mut Vec<u8>, position: &mut usize, byte: u8) {
+ if *position == vec.len() {
+ vec.push(byte);
+ } else {
+ vec[*position] = byte;
+ }
+
+ *position += 1;
+}
+
+pub fn write_unsigned_leb128(out: &mut Vec<u8>, start_position: usize, mut value: u64) -> usize {
+ let mut position = start_position;
+ loop {
+ let mut byte = (value & 0x7F) as u8;
+ value >>= 7;
+ if value != 0 {
+ byte |= 0x80;
+ }
+
+ write_to_vec(out, &mut position, byte);
+
+ if value == 0 {
+ break;
+ }
+ }
+
+ return position - start_position;
+}
+
+pub fn read_unsigned_leb128(data: &[u8], start_position: usize) -> (u64, usize) {
+ let mut result = 0;
+ let mut shift = 0;
+ let mut position = start_position;
+ loop {
+ let byte = data[position];
+ position += 1;
+ result |= ((byte & 0x7F) as u64) << shift;
+ if (byte & 0x80) == 0 {
+ break;
+ }
+ shift += 7;
+ }
+
+ (result, position - start_position)
+}
+
+
+pub fn write_signed_leb128(out: &mut Vec<u8>, start_position: usize, mut value: i64) -> usize {
+ let mut position = start_position;
+
+ loop {
+ let mut byte = (value as u8) & 0x7f;
+ value >>= 7;
+ let more = !((((value == 0) && ((byte & 0x40) == 0)) ||
+ ((value == -1) && ((byte & 0x40) != 0))));
+ if more {
+ byte |= 0x80; // Mark this byte to show that more bytes will follow.
+ }
+
+ write_to_vec(out, &mut position, byte);
+
+ if !more {
+ break;
+ }
+ }
+
+ return position - start_position;
+}
+
+pub fn read_signed_leb128(data: &[u8], start_position: usize) -> (i64, usize) {
+ let mut result = 0;
+ let mut shift = 0;
+ let mut position = start_position;
+ let mut byte;
+
+ loop {
+ byte = data[position];
+ position += 1;
+ result |= ((byte & 0x7F) as i64) << shift;
+ shift += 7;
+
+ if (byte & 0x80) == 0 {
+ break;
+ }
+ }
+
+ if (shift < 64) && ((byte & 0x40) != 0) {
+ // sign extend
+ result |= -(1i64 << shift);
+ }
+
+ (result, position - start_position)
+}
+
+#[test]
+fn test_unsigned_leb128() {
+ let mut stream = Vec::with_capacity(10000);
+
+ for x in 0..62 {
+ let pos = stream.len();
+ let bytes_written = write_unsigned_leb128(&mut stream, pos, 3 << x);
+ assert_eq!(stream.len(), pos + bytes_written);
+ }
+
+ let mut position = 0;
+ for x in 0..62 {
+ let expected = 3 << x;
+ let (actual, bytes_read) = read_unsigned_leb128(&stream, position);
+ assert_eq!(expected, actual);
+ position += bytes_read;
+ }
+ assert_eq!(stream.len(), position);
+}
+
+#[test]
+fn test_signed_leb128() {
+ let mut values = Vec::new();
+
+ let mut i = -500;
+ while i < 500 {
+ values.push(i * 123457i64);
+ i += 1;
+ }
+
+ let mut stream = Vec::new();
+
+ for &x in &values {
+ let pos = stream.len();
+ let bytes_written = write_signed_leb128(&mut stream, pos, x);
+ assert_eq!(stream.len(), pos + bytes_written);
+ }
+
+ let mut pos = 0;
+ for &x in &values {
+ let (value, bytes_read) = read_signed_leb128(&mut stream, pos);
+ pos += bytes_read;
+ assert_eq!(x, value);
+ }
+ assert_eq!(pos, stream.len());
+}
#![feature(box_syntax)]
#![feature(collections)]
+#![feature(core_intrinsics)]
#![feature(enumset)]
#![feature(rustc_private)]
+#![feature(specialization)]
#![feature(staged_api)]
#![feature(unicode)]
#![feature(question_mark)]
extern crate rustc_unicode;
extern crate collections;
-pub use self::serialize::{Decoder, Encoder, Decodable, Encodable,
- DecoderHelpers, EncoderHelpers};
+pub use self::serialize::{Decoder, Encoder, Decodable, Encodable};
+
+pub use self::serialize::{SpecializationError, SpecializedEncoder, SpecializedDecoder};
+pub use self::serialize::{UseSpecializedEncodable, UseSpecializedDecodable};
mod serialize;
mod collection_impls;
pub mod hex;
pub mod json;
+pub mod opaque;
+pub mod leb128;
+
mod rustc_serialize {
pub use serialize::*;
}
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use leb128::{read_signed_leb128, read_unsigned_leb128, write_signed_leb128, write_unsigned_leb128};
+use std::io::{self, Write};
+use serialize;
+
+// -----------------------------------------------------------------------------
+// Encoder
+// -----------------------------------------------------------------------------
+
+pub type EncodeResult = io::Result<()>;
+
+pub struct Encoder<'a> {
+ pub cursor: &'a mut io::Cursor<Vec<u8>>,
+}
+
+impl<'a> Encoder<'a> {
+ pub fn new(cursor: &'a mut io::Cursor<Vec<u8>>) -> Encoder<'a> {
+ Encoder { cursor: cursor }
+ }
+}
+
+
+macro_rules! write_uleb128 {
+ ($enc:expr, $value:expr) => {{
+ let pos = $enc.cursor.position() as usize;
+ let bytes_written = write_unsigned_leb128($enc.cursor.get_mut(), pos, $value as u64);
+ $enc.cursor.set_position((pos + bytes_written) as u64);
+ Ok(())
+ }}
+}
+
+macro_rules! write_sleb128 {
+ ($enc:expr, $value:expr) => {{
+ let pos = $enc.cursor.position() as usize;
+ let bytes_written = write_signed_leb128($enc.cursor.get_mut(), pos, $value as i64);
+ $enc.cursor.set_position((pos + bytes_written) as u64);
+ Ok(())
+ }}
+}
+
+impl<'a> serialize::Encoder for Encoder<'a> {
+ type Error = io::Error;
+
+ fn emit_nil(&mut self) -> EncodeResult {
+ Ok(())
+ }
+
+ fn emit_usize(&mut self, v: usize) -> EncodeResult {
+ write_uleb128!(self, v)
+ }
+
+ fn emit_u64(&mut self, v: u64) -> EncodeResult {
+ write_uleb128!(self, v)
+ }
+
+ fn emit_u32(&mut self, v: u32) -> EncodeResult {
+ write_uleb128!(self, v)
+ }
+
+ fn emit_u16(&mut self, v: u16) -> EncodeResult {
+ write_uleb128!(self, v)
+ }
+
+ fn emit_u8(&mut self, v: u8) -> EncodeResult {
+ let _ = self.cursor.write_all(&[v]);
+ Ok(())
+ }
+
+ fn emit_isize(&mut self, v: isize) -> EncodeResult {
+ write_sleb128!(self, v)
+ }
+
+ fn emit_i64(&mut self, v: i64) -> EncodeResult {
+ write_sleb128!(self, v)
+ }
+
+ fn emit_i32(&mut self, v: i32) -> EncodeResult {
+ write_sleb128!(self, v)
+ }
+
+ fn emit_i16(&mut self, v: i16) -> EncodeResult {
+ write_sleb128!(self, v)
+ }
+
+ fn emit_i8(&mut self, v: i8) -> EncodeResult {
+ let as_u8: u8 = unsafe { ::std::mem::transmute(v) };
+ let _ = self.cursor.write_all(&[as_u8]);
+ Ok(())
+ }
+
+ fn emit_bool(&mut self, v: bool) -> EncodeResult {
+ self.emit_u8(if v {
+ 1
+ } else {
+ 0
+ })
+ }
+
+ fn emit_f64(&mut self, v: f64) -> EncodeResult {
+ let as_u64: u64 = unsafe { ::std::mem::transmute(v) };
+ self.emit_u64(as_u64)
+ }
+
+ fn emit_f32(&mut self, v: f32) -> EncodeResult {
+ let as_u32: u32 = unsafe { ::std::mem::transmute(v) };
+ self.emit_u32(as_u32)
+ }
+
+ fn emit_char(&mut self, v: char) -> EncodeResult {
+ self.emit_u32(v as u32)
+ }
+
+ fn emit_str(&mut self, v: &str) -> EncodeResult {
+ self.emit_usize(v.len())?;
+ let _ = self.cursor.write_all(v.as_bytes());
+ Ok(())
+ }
+}
+
+impl<'a> Encoder<'a> {
+ pub fn position(&self) -> usize {
+ self.cursor.position() as usize
+ }
+}
+
+// -----------------------------------------------------------------------------
+// Decoder
+// -----------------------------------------------------------------------------
+
+pub struct Decoder<'a> {
+ pub data: &'a [u8],
+ position: usize,
+}
+
+impl<'a> Decoder<'a> {
+ pub fn new(data: &'a [u8], position: usize) -> Decoder<'a> {
+ Decoder {
+ data: data,
+ position: position,
+ }
+ }
+
+ pub fn position(&self) -> usize {
+ self.position
+ }
+
+ pub fn advance(&mut self, bytes: usize) {
+ self.position += bytes;
+ }
+}
+
+macro_rules! read_uleb128 {
+ ($dec:expr, $t:ty) => ({
+ let (value, bytes_read) = read_unsigned_leb128($dec.data, $dec.position);
+ $dec.position += bytes_read;
+ Ok(value as $t)
+ })
+}
+
+macro_rules! read_sleb128 {
+ ($dec:expr, $t:ty) => ({
+ let (value, bytes_read) = read_signed_leb128($dec.data, $dec.position);
+ $dec.position += bytes_read;
+ Ok(value as $t)
+ })
+}
+
+
+impl<'a> serialize::Decoder for Decoder<'a> {
+ type Error = String;
+
+ fn read_nil(&mut self) -> Result<(), Self::Error> {
+ Ok(())
+ }
+
+ fn read_u64(&mut self) -> Result<u64, Self::Error> {
+ read_uleb128!(self, u64)
+ }
+
+ fn read_u32(&mut self) -> Result<u32, Self::Error> {
+ read_uleb128!(self, u32)
+ }
+
+ fn read_u16(&mut self) -> Result<u16, Self::Error> {
+ read_uleb128!(self, u16)
+ }
+
+ fn read_u8(&mut self) -> Result<u8, Self::Error> {
+ let value = self.data[self.position];
+ self.position += 1;
+ Ok(value)
+ }
+
+ fn read_usize(&mut self) -> Result<usize, Self::Error> {
+ read_uleb128!(self, usize)
+ }
+
+ fn read_i64(&mut self) -> Result<i64, Self::Error> {
+ read_sleb128!(self, i64)
+ }
+
+ fn read_i32(&mut self) -> Result<i32, Self::Error> {
+ read_sleb128!(self, i32)
+ }
+
+ fn read_i16(&mut self) -> Result<i16, Self::Error> {
+ read_sleb128!(self, i16)
+ }
+
+ fn read_i8(&mut self) -> Result<i8, Self::Error> {
+ let as_u8 = self.data[self.position];
+ self.position += 1;
+ unsafe { Ok(::std::mem::transmute(as_u8)) }
+ }
+
+ fn read_isize(&mut self) -> Result<isize, Self::Error> {
+ read_sleb128!(self, isize)
+ }
+
+ fn read_bool(&mut self) -> Result<bool, Self::Error> {
+ let value = self.read_u8()?;
+ Ok(value != 0)
+ }
+
+ fn read_f64(&mut self) -> Result<f64, Self::Error> {
+ let bits = self.read_u64()?;
+ Ok(unsafe { ::std::mem::transmute(bits) })
+ }
+
+ fn read_f32(&mut self) -> Result<f32, Self::Error> {
+ let bits = self.read_u32()?;
+ Ok(unsafe { ::std::mem::transmute(bits) })
+ }
+
+ fn read_char(&mut self) -> Result<char, Self::Error> {
+ let bits = self.read_u32()?;
+ Ok(::std::char::from_u32(bits).unwrap())
+ }
+
+ fn read_str(&mut self) -> Result<String, Self::Error> {
+ let len = self.read_usize()?;
+ let s = ::std::str::from_utf8(&self.data[self.position..self.position + len]).unwrap();
+ self.position += len;
+ Ok(s.to_string())
+ }
+
+ fn error(&mut self, err: &str) -> Self::Error {
+ err.to_string()
+ }
+}
+
+
+#[cfg(test)]
+mod tests {
+ use serialize::{Encodable, Decodable};
+ use std::io::Cursor;
+ use std::fmt::Debug;
+ use super::{Encoder, Decoder};
+
+ #[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
+ struct Struct {
+ a: (),
+ b: u8,
+ c: u16,
+ d: u32,
+ e: u64,
+ f: usize,
+
+ g: i8,
+ h: i16,
+ i: i32,
+ j: i64,
+ k: isize,
+
+ l: char,
+ m: String,
+ n: f32,
+ o: f64,
+ p: bool,
+ q: Option<u32>,
+ }
+
+
+ fn check_round_trip<T: Encodable + Decodable + PartialEq + Debug>(values: Vec<T>) {
+ let mut cursor = Cursor::new(Vec::new());
+
+ for value in &values {
+ let mut encoder = Encoder::new(&mut cursor);
+ Encodable::encode(&value, &mut encoder).unwrap();
+ }
+
+ let data = cursor.into_inner();
+ let mut decoder = Decoder::new(&data[..], 0);
+
+ for value in values {
+ let decoded = Decodable::decode(&mut decoder).unwrap();
+ assert_eq!(value, decoded);
+ }
+ }
+
+ #[test]
+ fn test_unit() {
+ check_round_trip(vec![(), (), (), ()]);
+ }
+
+ #[test]
+ fn test_u8() {
+ let mut vec = vec![];
+ for i in ::std::u8::MIN..::std::u8::MAX {
+ vec.push(i);
+ }
+ check_round_trip(vec);
+ }
+
+ #[test]
+ fn test_u16() {
+ for i in ::std::u16::MIN..::std::u16::MAX {
+ check_round_trip(vec![1, 2, 3, i, i, i]);
+ }
+ }
+
+ #[test]
+ fn test_u32() {
+ check_round_trip(vec![1, 2, 3, ::std::u32::MIN, 0, 1, ::std::u32::MAX, 2, 1]);
+ }
+
+ #[test]
+ fn test_u64() {
+ check_round_trip(vec![1, 2, 3, ::std::u64::MIN, 0, 1, ::std::u64::MAX, 2, 1]);
+ }
+
+ #[test]
+ fn test_usize() {
+ check_round_trip(vec![1, 2, 3, ::std::usize::MIN, 0, 1, ::std::usize::MAX, 2, 1]);
+ }
+
+ #[test]
+ fn test_i8() {
+ let mut vec = vec![];
+ for i in ::std::i8::MIN..::std::i8::MAX {
+ vec.push(i);
+ }
+ check_round_trip(vec);
+ }
+
+ #[test]
+ fn test_i16() {
+ for i in ::std::i16::MIN..::std::i16::MAX {
+ check_round_trip(vec![-1, 2, -3, i, i, i, 2]);
+ }
+ }
+
+ #[test]
+ fn test_i32() {
+ check_round_trip(vec![-1, 2, -3, ::std::i32::MIN, 0, 1, ::std::i32::MAX, 2, 1]);
+ }
+
+ #[test]
+ fn test_i64() {
+ check_round_trip(vec![-1, 2, -3, ::std::i64::MIN, 0, 1, ::std::i64::MAX, 2, 1]);
+ }
+
+ #[test]
+ fn test_isize() {
+ check_round_trip(vec![-1, 2, -3, ::std::isize::MIN, 0, 1, ::std::isize::MAX, 2, 1]);
+ }
+
+ #[test]
+ fn test_bool() {
+ check_round_trip(vec![false, true, true, false, false]);
+ }
+
+ #[test]
+ fn test_f32() {
+ let mut vec = vec![];
+ for i in -100..100 {
+ vec.push((i as f32) / 3.0);
+ }
+ check_round_trip(vec);
+ }
+
+ #[test]
+ fn test_f64() {
+ let mut vec = vec![];
+ for i in -100..100 {
+ vec.push((i as f64) / 3.0);
+ }
+ check_round_trip(vec);
+ }
+
+ #[test]
+ fn test_char() {
+ let vec = vec!['a', 'b', 'c', 'd', 'A', 'X', ' ', '#', 'Ö', 'Ä', 'µ', '€'];
+ check_round_trip(vec);
+ }
+
+ #[test]
+ fn test_string() {
+ let vec = vec!["abcbuÖeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µsbpapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmeabpnvapeapmaebn".to_string(),
+ "abcbuÖganeiÄnameÜavmpßvmea€µsbpnvapeapmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µsbpmaebn".to_string(),
+ "abcbuÖganeiovÄnameÜavmpßvmea€µnvapeapmaebn".to_string()];
+
+ check_round_trip(vec);
+ }
+
+ #[test]
+ fn test_option() {
+ check_round_trip(vec![Some(-1i8)]);
+ check_round_trip(vec![Some(-2i16)]);
+ check_round_trip(vec![Some(-3i32)]);
+ check_round_trip(vec![Some(-4i64)]);
+ check_round_trip(vec![Some(-5isize)]);
+
+ let none_i8: Option<i8> = None;
+ check_round_trip(vec![none_i8]);
+
+ let none_i16: Option<i16> = None;
+ check_round_trip(vec![none_i16]);
+
+ let none_i32: Option<i32> = None;
+ check_round_trip(vec![none_i32]);
+
+ let none_i64: Option<i64> = None;
+ check_round_trip(vec![none_i64]);
+
+ let none_isize: Option<isize> = None;
+ check_round_trip(vec![none_isize]);
+ }
+
+ #[test]
+ fn test_struct() {
+ check_round_trip(vec![Struct {
+ a: (),
+ b: 10,
+ c: 11,
+ d: 12,
+ e: 13,
+ f: 14,
+
+ g: 15,
+ h: 16,
+ i: 17,
+ j: 18,
+ k: 19,
+
+ l: 'x',
+ m: "abc".to_string(),
+ n: 20.5,
+ o: 21.5,
+ p: false,
+ q: None,
+ }]);
+
+ check_round_trip(vec![Struct {
+ a: (),
+ b: 101,
+ c: 111,
+ d: 121,
+ e: 131,
+ f: 141,
+
+ g: -15,
+ h: -16,
+ i: -17,
+ j: -18,
+ k: -19,
+
+ l: 'y',
+ m: "def".to_string(),
+ n: -20.5,
+ o: -21.5,
+ p: true,
+ q: Some(1234567),
+ }]);
+ }
+
+ #[derive(PartialEq, Clone, Debug, RustcEncodable, RustcDecodable)]
+ enum Enum {
+ Variant1,
+ Variant2(usize, f32),
+ Variant3 {
+ a: i32,
+ b: char,
+ c: bool,
+ },
+ }
+
+ #[test]
+ fn test_enum() {
+ check_round_trip(vec![Enum::Variant1,
+ Enum::Variant2(1, 2.5),
+ Enum::Variant3 {
+ a: 3,
+ b: 'b',
+ c: false,
+ },
+ Enum::Variant3 {
+ a: -4,
+ b: 'f',
+ c: true,
+ }]);
+ }
+
+ #[test]
+ fn test_sequence() {
+ let mut vec = vec![];
+ for i in -100i64..100i64 {
+ vec.push(i * 100000);
+ }
+
+ check_round_trip(vec![vec]);
+ }
+
+ #[test]
+ fn test_hash_map() {
+ use std::collections::HashMap;
+ let mut map = HashMap::new();
+ for i in -100i64..100i64 {
+ map.insert(i * 100000, i * 10000);
+ }
+
+ check_round_trip(vec![map]);
+ }
+
+ #[test]
+ fn test_tuples() {
+ check_round_trip(vec![('x', (), false, 0.5f32)]);
+ check_round_trip(vec![(9i8, 10u16, 1.5f64)]);
+ check_round_trip(vec![(-12i16, 11u8, 12usize)]);
+ check_round_trip(vec![(1234567isize, 100000000000000u64, 99999999999999i64)]);
+ check_round_trip(vec![(String::new(), "some string".to_string())]);
+ }
+}
Core encoding and decoding interfaces.
*/
+use std::intrinsics;
use std::path;
use std::rc::Rc;
use std::cell::{Cell, RefCell};
fn emit_str(&mut self, v: &str) -> Result<(), Self::Error>;
// Compound types:
- fn emit_enum<F>(&mut self, name: &str, f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
+ fn emit_enum<F>(&mut self, _name: &str, f: F) -> Result<(), Self::Error>
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) }
- fn emit_enum_variant<F>(&mut self, v_name: &str,
+ fn emit_enum_variant<F>(&mut self, _v_name: &str,
v_id: usize,
- len: usize,
+ _len: usize,
f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
- fn emit_enum_variant_arg<F>(&mut self, a_idx: usize, f: F)
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error>
+ {
+ self.emit_usize(v_id)?;
+ f(self)
+ }
+ fn emit_enum_variant_arg<F>(&mut self, _a_idx: usize, f: F)
-> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) }
fn emit_enum_struct_variant<F>(&mut self, v_name: &str,
v_id: usize,
len: usize,
f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error>
+ {
+ self.emit_enum_variant(v_name, v_id, len, f)
+ }
fn emit_enum_struct_variant_field<F>(&mut self,
- f_name: &str,
+ _f_name: &str,
f_idx: usize,
f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error>
+ {
+ self.emit_enum_variant_arg(f_idx, f)
+ }
- fn emit_struct<F>(&mut self, name: &str, len: usize, f: F)
+ fn emit_struct<F>(&mut self, _name: &str, _len: usize, f: F)
-> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
- fn emit_struct_field<F>(&mut self, f_name: &str, f_idx: usize, f: F)
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) }
+ fn emit_struct_field<F>(&mut self, _f_name: &str, _f_idx: usize, f: F)
-> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) }
- fn emit_tuple<F>(&mut self, len: usize, f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
- fn emit_tuple_arg<F>(&mut self, idx: usize, f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
+ fn emit_tuple<F>(&mut self, _len: usize, f: F) -> Result<(), Self::Error>
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) }
+ fn emit_tuple_arg<F>(&mut self, _idx: usize, f: F) -> Result<(), Self::Error>
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) }
- fn emit_tuple_struct<F>(&mut self, name: &str, len: usize, f: F)
+ fn emit_tuple_struct<F>(&mut self, _name: &str, len: usize, f: F)
-> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error>
+ {
+ self.emit_tuple(len, f)
+ }
fn emit_tuple_struct_arg<F>(&mut self, f_idx: usize, f: F)
-> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error>
+ {
+ self.emit_tuple_arg(f_idx, f)
+ }
// Specialized types:
fn emit_option<F>(&mut self, f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
- fn emit_option_none(&mut self) -> Result<(), Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error>
+ {
+ self.emit_enum("Option", f)
+ }
+ fn emit_option_none(&mut self) -> Result<(), Self::Error> {
+ self.emit_enum_variant("None", 0, 0, |_| Ok(()))
+ }
fn emit_option_some<F>(&mut self, f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error>
+ {
+
+ self.emit_enum_variant("Some", 1, 1, f)
+ }
fn emit_seq<F>(&mut self, len: usize, f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
- fn emit_seq_elt<F>(&mut self, idx: usize, f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error>
+ {
+ self.emit_usize(len)?;
+ f(self)
+ }
+ fn emit_seq_elt<F>(&mut self, _idx: usize, f: F) -> Result<(), Self::Error>
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) }
fn emit_map<F>(&mut self, len: usize, f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
- fn emit_map_elt_key<F>(&mut self, idx: usize, f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
- fn emit_map_elt_val<F>(&mut self, idx: usize, f: F) -> Result<(), Self::Error>
- where F: FnOnce(&mut Self) -> Result<(), Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error>
+ {
+ self.emit_usize(len)?;
+ f(self)
+ }
+ fn emit_map_elt_key<F>(&mut self, _idx: usize, f: F) -> Result<(), Self::Error>
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) }
+ fn emit_map_elt_val<F>(&mut self, _idx: usize, f: F) -> Result<(), Self::Error>
+ where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) }
}
pub trait Decoder {
fn read_str(&mut self) -> Result<String, Self::Error>;
// Compound types:
- fn read_enum<T, F>(&mut self, name: &str, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+ fn read_enum<T, F>(&mut self, _name: &str, f: F) -> Result<T, Self::Error>
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error> { f(self) }
- fn read_enum_variant<T, F>(&mut self, names: &[&str], f: F)
+ fn read_enum_variant<T, F>(&mut self, _names: &[&str], mut f: F)
-> Result<T, Self::Error>
- where F: FnMut(&mut Self, usize) -> Result<T, Self::Error>;
- fn read_enum_variant_arg<T, F>(&mut self, a_idx: usize, f: F)
+ where F: FnMut(&mut Self, usize) -> Result<T, Self::Error>
+ {
+ let disr = self.read_usize()?;
+ f(self, disr)
+ }
+ fn read_enum_variant_arg<T, F>(&mut self, _a_idx: usize, f: F)
-> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error> { f(self) }
fn read_enum_struct_variant<T, F>(&mut self, names: &[&str], f: F)
-> Result<T, Self::Error>
- where F: FnMut(&mut Self, usize) -> Result<T, Self::Error>;
+ where F: FnMut(&mut Self, usize) -> Result<T, Self::Error>
+ {
+ self.read_enum_variant(names, f)
+ }
fn read_enum_struct_variant_field<T, F>(&mut self,
- &f_name: &str,
+ _f_name: &str,
f_idx: usize,
f: F)
-> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error>
+ {
+ self.read_enum_variant_arg(f_idx, f)
+ }
- fn read_struct<T, F>(&mut self, s_name: &str, len: usize, f: F)
+ fn read_struct<T, F>(&mut self, _s_name: &str, _len: usize, f: F)
-> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error> { f(self) }
fn read_struct_field<T, F>(&mut self,
- f_name: &str,
- f_idx: usize,
+ _f_name: &str,
+ _f_idx: usize,
f: F)
-> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error> { f(self) }
- fn read_tuple<T, F>(&mut self, len: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
- fn read_tuple_arg<T, F>(&mut self, a_idx: usize, f: F)
+ fn read_tuple<T, F>(&mut self, _len: usize, f: F) -> Result<T, Self::Error>
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error> { f(self) }
+ fn read_tuple_arg<T, F>(&mut self, _a_idx: usize, f: F)
-> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error> { f(self) }
- fn read_tuple_struct<T, F>(&mut self, s_name: &str, len: usize, f: F)
+ fn read_tuple_struct<T, F>(&mut self, _s_name: &str, len: usize, f: F)
-> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error>
+ {
+ self.read_tuple(len, f)
+ }
fn read_tuple_struct_arg<T, F>(&mut self, a_idx: usize, f: F)
-> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error>
+ {
+ self.read_tuple_arg(a_idx, f)
+ }
// Specialized types:
- fn read_option<T, F>(&mut self, f: F) -> Result<T, Self::Error>
- where F: FnMut(&mut Self, bool) -> Result<T, Self::Error>;
+ fn read_option<T, F>(&mut self, mut f: F) -> Result<T, Self::Error>
+ where F: FnMut(&mut Self, bool) -> Result<T, Self::Error>
+ {
+ self.read_enum("Option", move |this| {
+ this.read_enum_variant(&["None", "Some"], move |this, idx| {
+ match idx {
+ 0 => f(this, false),
+ 1 => f(this, true),
+ _ => Err(this.error("read_option: expected 0 for None or 1 for Some")),
+ }
+ })
+ })
+ }
fn read_seq<T, F>(&mut self, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Self, usize) -> Result<T, Self::Error>;
- fn read_seq_elt<T, F>(&mut self, idx: usize, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+ where F: FnOnce(&mut Self, usize) -> Result<T, Self::Error>
+ {
+ let len = self.read_usize()?;
+ f(self, len)
+ }
+ fn read_seq_elt<T, F>(&mut self, _idx: usize, f: F) -> Result<T, Self::Error>
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error> { f(self) }
fn read_map<T, F>(&mut self, f: F) -> Result<T, Self::Error>
- where F: FnOnce(&mut Self, usize) -> Result<T, Self::Error>;
- fn read_map_elt_key<T, F>(&mut self, idx: usize, f: F)
+ where F: FnOnce(&mut Self, usize) -> Result<T, Self::Error>
+ {
+ let len = self.read_usize()?;
+ f(self, len)
+ }
+ fn read_map_elt_key<T, F>(&mut self, _idx: usize, f: F)
-> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
- fn read_map_elt_val<T, F>(&mut self, idx: usize, f: F)
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error> { f(self) }
+ fn read_map_elt_val<T, F>(&mut self, _idx: usize, f: F)
-> Result<T, Self::Error>
- where F: FnOnce(&mut Self) -> Result<T, Self::Error>;
+ where F: FnOnce(&mut Self) -> Result<T, Self::Error> { f(self) }
// Failure
fn error(&mut self, err: &str) -> Self::Error;
}
// ___________________________________________________________________________
-// Helper routines
+// Specialization-based interface for multi-dispatch Encodable/Decodable.
-pub trait EncoderHelpers: Encoder {
- fn emit_from_vec<T, F>(&mut self, v: &[T], f: F)
- -> Result<(), Self::Error>
- where F: FnMut(&mut Self, &T) -> Result<(), Self::Error>;
+/// Implement this trait on your `{Encodable,Decodable}::Error` types
+/// to override the default panic behavior for missing specializations.
+pub trait SpecializationError {
+ /// Create an error for a missing method specialization.
+ /// Defaults to panicking with type, trait & method names.
+ /// `S` is the encoder/decoder state type,
+ /// `T` is the type being encoded/decoded, and
+ /// the arguments are the names of the trait
+ /// and method that should've been overriden.
+ fn not_found<S, T: ?Sized>(trait_name: &'static str,
+ method_name: &'static str) -> Self;
}
-impl<S:Encoder> EncoderHelpers for S {
- fn emit_from_vec<T, F>(&mut self, v: &[T], mut f: F) -> Result<(), S::Error> where
- F: FnMut(&mut S, &T) -> Result<(), S::Error>,
- {
- self.emit_seq(v.len(), |this| {
- for (i, e) in v.iter().enumerate() {
- this.emit_seq_elt(i, |this| {
- f(this, e)
- })?;
- }
- Ok(())
- })
+impl<E> SpecializationError for E {
+ default fn not_found<S, T: ?Sized>(trait_name: &'static str,
+ method_name: &'static str) -> E {
+ panic!("missing specializaiton: `<{} as {}<{}>>::{}` not overriden",
+ unsafe { intrinsics::type_name::<S>() },
+ trait_name,
+ unsafe { intrinsics::type_name::<T>() },
+ method_name);
}
}
-pub trait DecoderHelpers: Decoder {
- fn read_to_vec<T, F>(&mut self, f: F)
- -> Result<Vec<T>, Self::Error> where
- F: FnMut(&mut Self) -> Result<T, Self::Error>;
+/// Implement this trait on encoders, with `T` being the type
+/// you want to encode (employing `UseSpecializedEncodable`),
+/// using a strategy specific to the encoder.
+pub trait SpecializedEncoder<T: ?Sized + UseSpecializedEncodable>: Encoder {
+ /// Encode the value in a manner specific to this encoder state.
+ fn specialized_encode(&mut self, value: &T) -> Result<(), Self::Error>;
}
-impl<D: Decoder> DecoderHelpers for D {
- fn read_to_vec<T, F>(&mut self, mut f: F) -> Result<Vec<T>, D::Error> where F:
- FnMut(&mut D) -> Result<T, D::Error>,
- {
- self.read_seq(|this, len| {
- let mut v = Vec::with_capacity(len);
- for i in 0..len {
- v.push(this.read_seq_elt(i, |this| f(this))?);
- }
- Ok(v)
- })
+impl<E: Encoder, T: ?Sized + UseSpecializedEncodable> SpecializedEncoder<T> for E {
+ default fn specialized_encode(&mut self, value: &T) -> Result<(), E::Error> {
+ value.default_encode(self)
+ }
+}
+
+/// Implement this trait on decoders, with `T` being the type
+/// you want to decode (employing `UseSpecializedDecodable`),
+/// using a strategy specific to the decoder.
+pub trait SpecializedDecoder<T: UseSpecializedDecodable>: Decoder {
+ /// Decode a value in a manner specific to this decoder state.
+ fn specialized_decode(&mut self) -> Result<T, Self::Error>;
+}
+
+impl<D: Decoder, T: UseSpecializedDecodable> SpecializedDecoder<T> for D {
+ default fn specialized_decode(&mut self) -> Result<T, D::Error> {
+ T::default_decode(self)
+ }
+}
+
+/// Implement this trait on your type to get an `Encodable`
+/// implementation which goes through `SpecializedEncoder`.
+pub trait UseSpecializedEncodable {
+ /// Defaults to returning an error (see `SpecializationError`).
+ fn default_encode<E: Encoder>(&self, _: &mut E) -> Result<(), E::Error> {
+ Err(E::Error::not_found::<E, Self>("SpecializedEncoder", "specialized_encode"))
+ }
+}
+
+impl<T: ?Sized + UseSpecializedEncodable> Encodable for T {
+ default fn encode<E: Encoder>(&self, e: &mut E) -> Result<(), E::Error> {
+ E::specialized_encode(e, self)
+ }
+}
+
+/// Implement this trait on your type to get an `Decodable`
+/// implementation which goes through `SpecializedDecoder`.
+pub trait UseSpecializedDecodable: Sized {
+ /// Defaults to returning an error (see `SpecializationError`).
+ fn default_decode<D: Decoder>(_: &mut D) -> Result<Self, D::Error> {
+ Err(D::Error::not_found::<D, Self>("SpecializedDecoder", "specialized_decode"))
}
}
+
+impl<T: UseSpecializedDecodable> Decodable for T {
+ default fn decode<D: Decoder>(d: &mut D) -> Result<T, D::Error> {
+ D::specialized_decode(d)
+ }
+}
+
+// Can't avoid specialization for &T and Box<T> impls,
+// as proxy impls on them are blankets that conflict
+// with the Encodable and Decodable impls above,
+// which only have `default` on their methods
+// for this exact reason.
+// May be fixable in a simpler fashion via the
+// more complex lattice model for specialization.
+impl<'a, T: ?Sized + Encodable> UseSpecializedEncodable for &'a T {}
+impl<T: ?Sized + Encodable> UseSpecializedEncodable for Box<T> {}
+impl<T: Decodable> UseSpecializedDecodable for Box<T> {}
println!("cargo:rustc-cfg=cargobuild");
println!("cargo:rerun-if-changed=build.rs");
- let target = env::var("TARGET").unwrap();
- let host = env::var("HOST").unwrap();
+ let target = env::var("TARGET").expect("TARGET was not set");
+ let host = env::var("HOST").expect("HOST was not set");
if cfg!(feature = "backtrace") && !target.contains("apple") && !target.contains("msvc") &&
!target.contains("emscripten") {
build_libbacktrace(&host, &target);
run(Command::new("make")
.current_dir(&build_dir)
.arg(format!("INCDIR={}", src_dir.display()))
- .arg("-j").arg(env::var("NUM_JOBS").unwrap()));
+ .arg("-j").arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set")));
}
/// println!("{:?} has {} hp", viking, health);
/// }
/// ```
+///
+/// A HashMap with fixed list of elements can be initialized from an array:
+///
+/// ```
+/// use std::collections::HashMap;
+///
+/// fn main() {
+/// let timber_resources: HashMap<&str, i32> =
+/// [("Norway", 100),
+/// ("Denmark", 50),
+/// ("Iceland", 10)]
+/// .iter().cloned().collect();
+/// // use the values stored in map
+/// }
+/// ```
+
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct HashMap<K, V, S = RandomState> {
mod test_map {
use super::HashMap;
use super::Entry::{Occupied, Vacant};
+ use super::RandomState;
use cell::RefCell;
use rand::{thread_rng, Rng};
+ #[test]
+ fn test_create_capacities() {
+ type HM = HashMap<i32, i32>;
+
+ let m = HM::new();
+ assert_eq!(m.capacity(), 0);
+
+ let m = HM::default();
+ assert_eq!(m.capacity(), 0);
+
+ let m = HM::with_hasher(RandomState::new());
+ assert_eq!(m.capacity(), 0);
+ }
+
#[test]
fn test_create_capacity_zero() {
let mut m = HashMap::with_capacity(0);
use super::Recover;
use super::map::{self, HashMap, Keys, RandomState};
-const INITIAL_CAPACITY: usize = 32;
-
// Future Optimization (FIXME!)
// =============================
//
/// println!("{:?}", x);
/// }
/// ```
+///
+/// HashSet with fixed list of elements can be initialized from an array:
+///
+/// ```
+/// use std::collections::HashSet;
+///
+/// fn main() {
+/// let viking_names: HashSet<&str> =
+/// [ "Einar", "Olaf", "Harald" ].iter().cloned().collect();
+/// // use the values stored in the set
+/// }
+/// ```
+
+
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct HashSet<T, S = RandomState> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> HashSet<T, RandomState> {
- HashSet::with_capacity(INITIAL_CAPACITY)
+ HashSet { map: HashMap::new() }
}
/// Creates an empty HashSet with space for at least `n` elements in
#[inline]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_hasher(hasher: S) -> HashSet<T, S> {
- HashSet::with_capacity_and_hasher(INITIAL_CAPACITY, hasher)
+ HashSet { map: HashMap::with_hasher(hasher) }
}
/// Creates an empty HashSet with space for at least `capacity`
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_capacity_and_hasher(capacity: usize, hasher: S)
-> HashSet<T, S> {
- HashSet {
- map: HashMap::with_capacity_and_hasher(capacity, hasher),
- }
+ HashSet { map: HashMap::with_capacity_and_hasher(capacity, hasher) }
}
/// Returns a reference to the set's hasher.
{
/// Creates an empty `HashSet<T, S>` with the `Default` value for the hasher.
fn default() -> HashSet<T, S> {
- HashSet::with_hasher(Default::default())
+ HashSet { map: HashMap::default() }
}
}
#[cfg(test)]
mod test_set {
use super::HashSet;
+ use super::super::map::RandomState;
+
+ #[test]
+ fn test_create_capacities() {
+ type HS = HashSet<i32>;
+
+ let s = HS::new();
+ assert_eq!(s.capacity(), 0);
+
+ let s = HS::default();
+ assert_eq!(s.capacity(), 0);
+
+ let s = HS::with_hasher(RandomState::new());
+ assert_eq!(s.capacity(), 0);
+ }
#[test]
fn test_disjoint() {
/// around just the "table" part of the hashtable. It enforces some
/// invariants at the type level and employs some performance trickery,
/// but in general is just a tricked out `Vec<Option<u64, K, V>>`.
-#[cfg_attr(stage0, unsafe_no_drop_flag)]
pub struct RawTable<K, V> {
capacity: usize,
size: usize,
pub const EXE_EXTENSION: &'static str = "js";
}
+#[cfg(target_os = "haiku")]
+mod os {
+ pub const FAMILY: &'static str = "unix";
+ pub const OS: &'static str = "haiku";
+ pub const DLL_PREFIX: &'static str = "lib";
+ pub const DLL_SUFFIX: &'static str = ".so";
+ pub const DLL_EXTENSION: &'static str = "so";
+ pub const EXE_SUFFIX: &'static str = "";
+ pub const EXE_EXTENSION: &'static str = "";
+}
+
#[cfg(target_arch = "x86")]
mod arch {
pub const ARCH: &'static str = "x86";
/// An error returned from `CString::new` to indicate that a nul byte was found
/// in the vector provided.
-#[derive(Clone, PartialEq, Debug)]
+#[derive(Clone, PartialEq, Eq, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct NulError(usize, Vec<u8>);
/// An error returned from `CStr::from_bytes_with_nul` to indicate that a nul
/// byte was found too early in the slice provided or one wasn't found at all.
-#[derive(Clone, PartialEq, Debug)]
+#[derive(Clone, PartialEq, Eq, Debug)]
#[stable(feature = "cstr_from_bytes", since = "1.10.0")]
pub struct FromBytesWithNulError { _a: () }
/// An error returned from `CString::into_string` to indicate that a UTF-8 error
/// was encountered during the conversion.
-#[derive(Clone, PartialEq, Debug)]
+#[derive(Clone, PartialEq, Eq, Debug)]
#[stable(feature = "cstring_into", since = "1.7.0")]
pub struct IntoStringError {
inner: CString,
/// Retakes ownership of a `CString` that was transferred to C.
///
+ /// Additionally, the length of the string will be recalculated from the pointer.
+ ///
+ /// # Safety
+ ///
/// This should only ever be called with a pointer that was earlier
- /// obtained by calling `into_raw` on a `CString`. Additionally, the length
- /// of the string will be recalculated from the pointer.
+ /// obtained by calling `into_raw` on a `CString`. Other usage (e.g. trying to take
+ /// ownership of a string that was allocated by foreign code) is likely to lead
+ /// to undefined behavior or allocator corruption.
#[stable(feature = "cstr_memory", since = "1.4.0")]
pub unsafe fn from_raw(ptr: *mut c_char) -> CString {
let len = libc::strlen(ptr) + 1; // Including the NUL byte
}
// Turns this `CString` into an empty string to prevent
-// memory unsafe code from working by accident.
+// memory unsafe code from working by accident. Inline
+// to prevent LLVM from optimizing it away in debug builds.
#[stable(feature = "cstring_drop", since = "1.13.0")]
impl Drop for CString {
+ #[inline]
fn drop(&mut self) {
unsafe { *self.inner.get_unchecked_mut(0) = 0; }
}
}
}
+#[stable(feature = "dir_entry_debug", since = "1.13.0")]
+impl fmt::Debug for DirEntry {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("DirEntry")
+ .field(&self.path())
+ .finish()
+ }
+}
+
impl AsInner<fs_imp::DirEntry> for DirEntry {
fn as_inner(&self) -> &fs_imp::DirEntry { &self.0 }
}
}
}
+ #[test]
+ fn dir_entry_debug() {
+ let tmpdir = tmpdir();
+ File::create(&tmpdir.join("b")).unwrap();
+ let mut read_dir = tmpdir.path().read_dir().unwrap();
+ let dir_entry = read_dir.next().unwrap().unwrap();
+ let actual = format!("{:?}", dir_entry);
+ let expected = format!("DirEntry({:?})", dir_entry.0.path());
+ assert_eq!(actual, expected);
+ }
+
#[test]
fn read_dir_not_found() {
let res = fs::read_dir("/path/that/does/not/exist");
///
/// Seeking always discards the internal buffer, even if the seek position
/// would otherwise fall within it. This guarantees that calling
- /// `.unwrap()` immediately after a seek yields the underlying reader at
- /// the same position.
+ /// `.into_inner()` immediately after a seek yields the underlying reader
+ /// at the same position.
///
/// See `std::io::Seek` for more details.
///
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
if !self.done_first {
match self.first.read(buf)? {
- 0 => { self.done_first = true; }
+ 0 if buf.len() != 0 => { self.done_first = true; }
n => return Ok(n),
}
}
cmp_bufread(chain1, chain2, &testdata[..]);
}
+ #[test]
+ fn chain_zero_length_read_is_not_eof() {
+ let a = b"A";
+ let b = b"B";
+ let mut s = String::new();
+ let mut chain = (&a[..]).chain(&b[..]);
+ chain.read(&mut []).unwrap();
+ chain.read_to_string(&mut s).unwrap();
+ assert_eq!("AB", s);
+ }
+
#[bench]
fn bench_read_to_end(b: &mut test::Bencher) {
b.iter(|| {
#![feature(unboxed_closures)]
#![feature(unicode)]
#![feature(unique)]
-#![cfg_attr(stage0, feature(unsafe_no_drop_flag))]
#![feature(unwind_attributes)]
#![feature(vec_push_all)]
#![feature(zero_one)]
// We want to reexport a few macros from core but libcore has already been
// imported by the compiler (via our #[no_std] attribute) In this case we just
// add a new crate name so we can attach the reexports to it.
-#[macro_reexport(assert, assert_eq, debug_assert, debug_assert_eq,
- unreachable, unimplemented, write, writeln, try)]
+#[macro_reexport(assert, assert_eq, assert_ne, debug_assert, debug_assert_eq,
+ debug_assert_ne, unreachable, unimplemented, write, writeln, try)]
extern crate core as __core;
#[macro_use]
pub mod builtin {
/// The core macro for formatted string creation & output.
///
- /// This macro produces a value of type `fmt::Arguments`. This value can be
- /// passed to the functions in `std::fmt` for performing useful functions.
- /// All other formatting macros (`format!`, `write!`, `println!`, etc) are
+ /// This macro produces a value of type [`fmt::Arguments`]. This value can be
+ /// passed to the functions in [`std::fmt`] for performing useful functions.
+ /// All other formatting macros ([`format!`], [`write!`], [`println!`], etc) are
/// proxied through this one.
///
- /// For more information, see the documentation in `std::fmt`.
+ /// For more information, see the documentation in [`std::fmt`].
+ ///
+ /// [`fmt::Arguments`]: ../std/fmt/struct.Arguments.html
+ /// [`std::fmt`]: ../std/fmt/index.html
+ /// [`format!`]: ../std/macro.format.html
+ /// [`write!`]: ../std/macro.write.html
+ /// [`println!`]: ../std/macro.println.html
///
/// # Examples
///
///
/// [`shutdown`]: struct.TcpStream.html#method.shutdown
/// [`TcpStream`]: struct.TcpStream.html
-#[derive(Copy, Clone, PartialEq, Debug)]
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Shutdown {
/// Indicates that the reading portion of this stream/socket should be shut
/// An error returned when parsing an IP address or a socket address.
#[stable(feature = "rust1", since = "1.0.0")]
-#[derive(Debug, Clone, PartialEq)]
+#[derive(Debug, Clone, PartialEq, Eq)]
pub struct AddrParseError(());
#[stable(feature = "addr_parse_error_error", since = "1.4.0")]
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![stable(feature = "metadata_ext", since = "1.1.0")]
+
+use libc;
+
+use fs::Metadata;
+use sys_common::AsInner;
+
+#[allow(deprecated)]
+use os::haiku::raw;
+
+/// OS-specific extension methods for `fs::Metadata`
+#[stable(feature = "metadata_ext", since = "1.1.0")]
+pub trait MetadataExt {
+ /// Gain a reference to the underlying `stat` structure which contains
+ /// the raw information returned by the OS.
+ ///
+ /// The contents of the returned `stat` are **not** consistent across
+ /// Unix platforms. The `os::unix::fs::MetadataExt` trait contains the
+ /// cross-Unix abstractions contained within the raw stat.
+ #[stable(feature = "metadata_ext", since = "1.1.0")]
+ #[rustc_deprecated(since = "1.8.0",
+ reason = "deprecated in favor of the accessor \
+ methods of this trait")]
+ #[allow(deprecated)]
+ fn as_raw_stat(&self) -> &raw::stat;
+
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_dev(&self) -> u64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_ino(&self) -> u64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_mode(&self) -> u32;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_nlink(&self) -> u64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_uid(&self) -> u32;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_gid(&self) -> u32;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_rdev(&self) -> u64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_size(&self) -> u64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_atime(&self) -> i64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_atime_nsec(&self) -> i64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_mtime(&self) -> i64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_mtime_nsec(&self) -> i64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_ctime(&self) -> i64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_ctime_nsec(&self) -> i64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_crtime(&self) -> i64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_crtime_nsec(&self) -> i64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_blksize(&self) -> u64;
+ #[stable(feature = "metadata_ext2", since = "1.8.0")]
+ fn st_blocks(&self) -> u64;
+}
+
+#[stable(feature = "metadata_ext", since = "1.1.0")]
+impl MetadataExt for Metadata {
+ #[allow(deprecated)]
+ fn as_raw_stat(&self) -> &raw::stat {
+ unsafe {
+ &*(self.as_inner().as_inner() as *const libc::stat
+ as *const raw::stat)
+ }
+ }
+ fn st_dev(&self) -> u64 {
+ self.as_inner().as_inner().st_dev as u64
+ }
+ fn st_ino(&self) -> u64 {
+ self.as_inner().as_inner().st_ino as u64
+ }
+ fn st_mode(&self) -> u32 {
+ self.as_inner().as_inner().st_mode as u32
+ }
+ fn st_nlink(&self) -> u64 {
+ self.as_inner().as_inner().st_nlink as u64
+ }
+ fn st_uid(&self) -> u32 {
+ self.as_inner().as_inner().st_uid as u32
+ }
+ fn st_gid(&self) -> u32 {
+ self.as_inner().as_inner().st_gid as u32
+ }
+ fn st_rdev(&self) -> u64 {
+ self.as_inner().as_inner().st_rdev as u64
+ }
+ fn st_size(&self) -> u64 {
+ self.as_inner().as_inner().st_size as u64
+ }
+ fn st_atime(&self) -> i64 {
+ self.as_inner().as_inner().st_atime as i64
+ }
+ fn st_atime_nsec(&self) -> i64 {
+ self.as_inner().as_inner().st_atime_nsec as i64
+ }
+ fn st_mtime(&self) -> i64 {
+ self.as_inner().as_inner().st_mtime as i64
+ }
+ fn st_mtime_nsec(&self) -> i64 {
+ self.as_inner().as_inner().st_mtime_nsec as i64
+ }
+ fn st_ctime(&self) -> i64 {
+ self.as_inner().as_inner().st_ctime as i64
+ }
+ fn st_ctime_nsec(&self) -> i64 {
+ self.as_inner().as_inner().st_ctime_nsec as i64
+ }
+ fn st_crtime(&self) -> i64 {
+ self.as_inner().as_inner().st_crtime as i64
+ }
+ fn st_crtime_nsec(&self) -> i64 {
+ self.as_inner().as_inner().st_crtime_nsec as i64
+ }
+ fn st_blksize(&self) -> u64 {
+ self.as_inner().as_inner().st_blksize as u64
+ }
+ fn st_blocks(&self) -> u64 {
+ self.as_inner().as_inner().st_blocks as u64
+ }
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Haiku-specific definitions
+
+#![stable(feature = "raw_ext", since = "1.1.0")]
+
+pub mod raw;
+pub mod fs;
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Haiku-specific raw type definitions
+
+#![stable(feature = "raw_ext", since = "1.1.0")]
+
+#![allow(deprecated)]
+
+use os::raw::{c_long};
+use os::unix::raw::{uid_t, gid_t};
+
+// Use the direct definition of usize, instead of uintptr_t like in libc
+#[stable(feature = "pthread_t", since = "1.8.0")] pub type pthread_t = usize;
+
+#[stable(feature = "raw_ext", since = "1.1.0")] pub type blkcnt_t = i64;
+#[stable(feature = "raw_ext", since = "1.1.0")] pub type blksize_t = i32;
+#[stable(feature = "raw_ext", since = "1.1.0")] pub type dev_t = i32;
+#[stable(feature = "raw_ext", since = "1.1.0")] pub type ino_t = i64;
+#[stable(feature = "raw_ext", since = "1.1.0")] pub type mode_t = u32;
+#[stable(feature = "raw_ext", since = "1.1.0")] pub type nlink_t = i32;
+#[stable(feature = "raw_ext", since = "1.1.0")] pub type off_t = i64;
+#[stable(feature = "raw_ext", since = "1.1.0")] pub type time_t = i32;
+
+#[repr(C)]
+#[derive(Clone)]
+#[stable(feature = "raw_ext", since = "1.1.0")]
+pub struct stat {
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_dev: dev_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_ino: ino_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_mode: mode_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_nlink: nlink_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_uid: uid_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_gid: gid_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_size: off_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_rdev: dev_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_blksize: blksize_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_atime: time_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_atime_nsec: c_long,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_mtime: time_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_mtime_nsec: c_long,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_ctime: time_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_ctime_nsec: c_long,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_crtime: time_t,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_crtime_nsec: c_long,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_type: u32,
+ #[stable(feature = "raw_ext", since = "1.1.0")]
+ pub st_blocks: blkcnt_t,
+}
#[cfg(target_os = "bitrig")] pub mod bitrig;
#[cfg(target_os = "dragonfly")] pub mod dragonfly;
#[cfg(target_os = "freebsd")] pub mod freebsd;
+#[cfg(target_os = "haiku")] pub mod haiku;
#[cfg(target_os = "ios")] pub mod ios;
#[cfg(target_os = "linux")] pub mod linux;
#[cfg(target_os = "macos")] pub mod macos;
/// # Panics
///
/// Panics if called from a panicking thread.
+///
+/// # Examples
+///
+/// The following will print "Custom panic hook":
+///
+/// ```should_panic
+/// use std::panic;
+///
+/// panic::set_hook(Box::new(|_| {
+/// println!("Custom panic hook");
+/// }));
+///
+/// panic!("Normal panic");
+/// ```
#[stable(feature = "panic_hooks", since = "1.10.0")]
pub fn set_hook(hook: Box<Fn(&PanicInfo) + 'static + Sync + Send>) {
if thread::panicking() {
/// # Panics
///
/// Panics if called from a panicking thread.
+///
+/// # Examples
+///
+/// The following will print "Normal panic":
+///
+/// ```should_panic
+/// use std::panic;
+///
+/// panic::set_hook(Box::new(|_| {
+/// println!("Custom panic hook");
+/// }));
+///
+/// let _ = panic::take_hook();
+///
+/// panic!("Normal panic");
+/// ```
#[stable(feature = "panic_hooks", since = "1.10.0")]
pub fn take_hook() -> Box<Fn(&PanicInfo) + 'static + Sync + Send> {
if thread::panicking() {
#[cfg(target_os = "ios")]
#[link(name = "System")]
extern {}
+
+#[cfg(target_os = "haiku")]
+#[link(name = "network")]
+extern {}
struct Packets { cur: *mut Handle<'static, ()> }
#[doc(hidden)]
-#[derive(PartialEq)]
+#[derive(PartialEq, Eq)]
pub enum StartResult {
Installed,
Abort,
target_os = "netbsd",
target_os = "openbsd",
target_os = "solaris",
- target_os = "emscripten"))]
+ target_os = "emscripten",
+ target_os = "haiku"))]
mod imp {
use libc::c_char;
use mem;
#[cfg(any(target_os = "dragonfly", target_os = "freebsd",
target_os = "ios", target_os = "macos",
target_os = "openbsd", target_os = "netbsd",
- target_os = "solaris"))]
+ target_os = "solaris", target_os = "haiku"))]
use sys::net::netc::IPV6_JOIN_GROUP as IPV6_ADD_MEMBERSHIP;
#[cfg(not(any(target_os = "dragonfly", target_os = "freebsd",
target_os = "ios", target_os = "macos",
target_os = "openbsd", target_os = "netbsd",
- target_os = "solaris")))]
+ target_os = "solaris", taget_os = "haiku")))]
use sys::net::netc::IPV6_ADD_MEMBERSHIP;
#[cfg(any(target_os = "dragonfly", target_os = "freebsd",
target_os = "ios", target_os = "macos",
target_os = "openbsd", target_os = "netbsd",
- target_os = "solaris"))]
+ target_os = "solaris", target_os = "haiku"))]
use sys::net::netc::IPV6_LEAVE_GROUP as IPV6_DROP_MEMBERSHIP;
#[cfg(not(any(target_os = "dragonfly", target_os = "freebsd",
target_os = "ios", target_os = "macos",
target_os = "openbsd", target_os = "netbsd",
- target_os = "solaris")))]
+ target_os = "solaris", target_os = "haiku")))]
use sys::net::netc::IPV6_DROP_MEMBERSHIP;
////////////////////////////////////////////////////////////////////////////////
/// Copied from String::push
/// This does **not** include the WTF-8 concatenation check.
fn push_code_point_unchecked(&mut self, code_point: CodePoint) {
- let bytes = unsafe {
- char::from_u32_unchecked(code_point.value).encode_utf8()
+ let c = unsafe {
+ char::from_u32_unchecked(code_point.value)
};
- self.bytes.extend_from_slice(bytes.as_slice());
+ let mut bytes = [0; 4];
+ let bytes = c.encode_utf8(&mut bytes).as_bytes();
+ self.bytes.extend_from_slice(bytes)
}
#[inline]
return Some(tmp);
}
+ let mut buf = [0; 2];
self.code_points.next().map(|code_point| {
- let n = unsafe {
- char::from_u32_unchecked(code_point.value).encode_utf16()
+ let c = unsafe {
+ char::from_u32_unchecked(code_point.value)
};
- let n = n.as_slice();
- if n.len() == 2 {
- self.extra = n[1];
+ let n = c.encode_utf16(&mut buf).len();
+ if n == 2 {
+ self.extra = buf[1];
}
- n[0]
+ buf[0]
})
}
Ok(ret as usize)
}
- #[cfg(not(any(target_env = "newlib", target_os = "solaris", target_os = "emscripten")))]
+ #[cfg(not(any(target_env = "newlib",
+ target_os = "solaris",
+ target_os = "emscripten",
+ target_os = "haiku")))]
pub fn set_cloexec(&self) -> io::Result<()> {
unsafe {
cvt(libc::ioctl(self.fd, libc::FIOCLEX))?;
Ok(())
}
}
- #[cfg(any(target_env = "newlib", target_os = "solaris", target_os = "emscripten"))]
+ #[cfg(any(target_env = "newlib",
+ target_os = "solaris",
+ target_os = "emscripten",
+ target_os = "haiku"))]
pub fn set_cloexec(&self) -> io::Result<()> {
unsafe {
let previous = cvt(libc::fcntl(self.fd, libc::F_GETFD))?;
// resolve so we at least compile this.
//
// [1]: http://comments.gmane.org/gmane.linux.lib.musl.general/2963
- #[cfg(target_os = "android")]
+ #[cfg(any(target_os = "android", target_os = "haiku"))]
use libc::F_DUPFD as F_DUPFD_CLOEXEC;
- #[cfg(not(target_os = "android"))]
+ #[cfg(not(any(target_os = "android", target_os="haiku")))]
use libc::F_DUPFD_CLOEXEC;
let make_filedesc = |fd| {
stat(&self.path()).map(|m| m.file_type())
}
- #[cfg(not(target_os = "solaris"))]
+ #[cfg(target_os = "haiku")]
+ pub fn file_type(&self) -> io::Result<FileType> {
+ lstat(&self.path()).map(|m| m.file_type())
+ }
+
+ #[cfg(not(any(target_os = "solaris", target_os = "haiku")))]
pub fn file_type(&self) -> io::Result<FileType> {
match self.entry.d_type {
libc::DT_CHR => Ok(FileType { mode: libc::S_IFCHR }),
target_os = "linux",
target_os = "emscripten",
target_os = "android",
- target_os = "solaris"))]
+ target_os = "solaris",
+ target_os = "haiku"))]
pub fn ino(&self) -> u64 {
self.entry.d_ino as u64
}
}
#[cfg(any(target_os = "android",
target_os = "linux",
- target_os = "emscripten"))]
+ target_os = "emscripten",
+ target_os = "haiku"))]
fn name_bytes(&self) -> &[u8] {
unsafe {
CStr::from_ptr(self.entry.d_name.as_ptr()).to_bytes()
#[cfg(target_os = "bitrig")] pub use os::bitrig as platform;
#[cfg(target_os = "dragonfly")] pub use os::dragonfly as platform;
#[cfg(target_os = "freebsd")] pub use os::freebsd as platform;
+#[cfg(target_os = "haiku")] pub use os::haiku as platform;
#[cfg(target_os = "ios")] pub use os::ios as platform;
#[cfg(target_os = "linux")] pub use os::linux as platform;
#[cfg(target_os = "macos")] pub use os::macos as platform;
use ffi::CStr;
use io;
-use libc::{self, c_int, size_t, sockaddr, socklen_t};
+use libc::{self, c_int, size_t, sockaddr, socklen_t, EAI_SYSTEM};
use net::{SocketAddr, Shutdown};
use str;
use sys::fd::FileDesc;
pub fn init() {}
pub fn cvt_gai(err: c_int) -> io::Result<()> {
- if err == 0 { return Ok(()) }
+ if err == 0 {
+ return Ok(())
+ }
+ if err == EAI_SYSTEM {
+ return Err(io::Error::last_os_error())
+ }
let detail = unsafe {
str::from_utf8(CStr::from_ptr(libc::gai_strerror(err)).to_bytes()).unwrap()
target_os = "ios",
target_os = "freebsd"),
link_name = "__error")]
+ #[cfg_attr(target_os = "haiku", link_name = "_errnop")]
fn errno_location() -> *mut c_int;
}
}
}
+#[cfg(target_os = "haiku")]
+pub fn current_exe() -> io::Result<PathBuf> {
+ // Use Haiku's image info functions
+ #[repr(C)]
+ struct image_info {
+ id: i32,
+ type_: i32,
+ sequence: i32,
+ init_order: i32,
+ init_routine: *mut libc::c_void, // function pointer
+ term_routine: *mut libc::c_void, // function pointer
+ device: libc::dev_t,
+ node: libc::ino_t,
+ name: [libc::c_char; 1024], // MAXPATHLEN
+ text: *mut libc::c_void,
+ data: *mut libc::c_void,
+ text_size: i32,
+ data_size: i32,
+ api_version: i32,
+ abi: i32,
+ }
+
+ unsafe {
+ extern {
+ fn _get_next_image_info(team_id: i32, cookie: *mut i32,
+ info: *mut image_info, size: i32) -> i32;
+ }
+
+ let mut info: image_info = mem::zeroed();
+ let mut cookie: i32 = 0;
+ // the executable can be found at team id 0
+ let result = _get_next_image_info(0, &mut cookie, &mut info,
+ mem::size_of::<image_info>() as i32);
+ if result != 0 {
+ use io::ErrorKind;
+ Err(io::Error::new(ErrorKind::Other, "Error getting executable path"))
+ } else {
+ let name = CStr::from_ptr(info.name.as_ptr()).to_bytes();
+ Ok(PathBuf::from(OsStr::from_bytes(name)))
+ }
+ }
+}
+
pub struct Args {
iter: vec::IntoIter<OsString>,
_dont_send_or_sync_me: PhantomData<*mut ()>,
target_os = "openbsd",
target_os = "solaris",
target_os = "nacl",
- target_os = "emscripten"))]
+ target_os = "emscripten",
+ target_os = "haiku"))]
pub fn args() -> Args {
use sys_common;
let bytes = sys_common::args::clone().unwrap_or(Vec::new());
name.as_ptr() as *mut libc::c_void);
}
}
- #[cfg(any(target_env = "newlib", target_os = "solaris", target_os = "emscripten"))]
+ #[cfg(any(target_env = "newlib",
+ target_os = "solaris",
+ target_os = "haiku",
+ target_os = "emscripten"))]
pub fn set_name(_name: &CStr) {
- // Newlib, Illumos and Emscripten have no way to set a thread name.
+ // Newlib, Illumos, Haiku, and Emscripten have no way to set a thread name.
}
pub fn sleep(dur: Duration) {
/// Returns the number of whole seconds represented by this duration.
///
- /// The extra precision represented by this duration is ignored (e.g. extra
+ /// The extra precision represented by this duration is ignored (i.e. extra
/// nanoseconds are not represented in the returned value).
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
///
/// This method does **not** return the length of the duration when
/// represented by nanoseconds. The returned number always represents a
- /// fractional portion of a second (e.g. it is less than one billion).
+ /// fractional portion of a second (i.e. it is less than one billion).
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
pub fn subsec_nanos(&self) -> u32 { self.nanos }
Netbsd,
Openbsd,
NaCl,
+ Haiku,
Solaris,
}
Os::Netbsd => "netbsd".fmt(f),
Os::Openbsd => "openbsd".fmt(f),
Os::NaCl => "nacl".fmt(f),
+ Os::Haiku => "haiku".fmt(f),
Os::Solaris => "solaris".fmt(f),
}
}
use std::fmt;
use std::rc::Rc;
-use serialize::{Encodable, Decodable, Encoder, Decoder};
+use std::u32;
+
+use serialize::{self, Encodable, Decodable, Encoder, Decoder};
/// A name is a part of an identifier, representing a string or gensym. It's
/// the result of interning.
pub output: Option<P<Ty>>,
}
-pub type CrateNum = u32;
+#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Debug)]
+pub struct NodeId(u32);
+
+impl NodeId {
+ pub fn new(x: usize) -> NodeId {
+ assert!(x < (u32::MAX as usize));
+ NodeId(x as u32)
+ }
+
+ pub fn from_u32(x: u32) -> NodeId {
+ NodeId(x)
+ }
+
+ pub fn as_usize(&self) -> usize {
+ self.0 as usize
+ }
+
+ pub fn as_u32(&self) -> u32 {
+ self.0
+ }
+}
-pub type NodeId = u32;
+impl fmt::Display for NodeId {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(&self.0, f)
+ }
+}
+
+impl serialize::UseSpecializedEncodable for NodeId {
+ fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+ s.emit_u32(self.0)
+ }
+}
+
+impl serialize::UseSpecializedDecodable for NodeId {
+ fn default_decode<D: Decoder>(d: &mut D) -> Result<NodeId, D::Error> {
+ d.read_u32().map(NodeId)
+ }
+}
/// Node id used to represent the root of the crate.
-pub const CRATE_NODE_ID: NodeId = 0;
+pub const CRATE_NODE_ID: NodeId = NodeId(0);
/// When parsing and doing expansions, we initially give all AST nodes this AST
/// node value. Then later, in the renumber pass, we renumber them to have
/// small, positive ids.
-pub const DUMMY_NODE_ID: NodeId = !0;
+pub const DUMMY_NODE_ID: NodeId = NodeId(!0);
/// The AST represents all type param bounds as types.
/// typeck::collect::compute_bounds matches these against
"packed" => Some(ReprPacked),
"simd" => Some(ReprSimd),
_ => match int_type_of_word(word) {
- Some(ity) => Some(ReprInt(item.span, ity)),
+ Some(ity) => Some(ReprInt(ity)),
None => {
// Not a word we recognize
span_err!(diagnostic, item.span, E0552,
#[derive(PartialEq, Debug, RustcEncodable, RustcDecodable, Copy, Clone)]
pub enum ReprAttr {
ReprAny,
- ReprInt(Span, IntType),
+ ReprInt(IntType),
ReprExtern,
ReprPacked,
ReprSimd,
pub fn is_ffi_safe(&self) -> bool {
match *self {
ReprAny => false,
- ReprInt(_sp, ity) => ity.is_ffi_safe(),
+ ReprInt(ity) => ity.is_ffi_safe(),
ReprExtern => true,
ReprPacked => false,
ReprSimd => true,
}
}
+ /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If
+ /// there are gaps between lhs and rhs, the resulting union will cross these gaps.
+ /// For this to work, the spans have to be:
+ /// * the expn_id of both spans much match
+ /// * the lhs span needs to end on the same line the rhs span begins
+ /// * the lhs span must start at or before the rhs span
+ pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
+ use std::cmp;
+
+ // make sure we're at the same expansion id
+ if sp_lhs.expn_id != sp_rhs.expn_id {
+ return None;
+ }
+
+ let lhs_end = match self.lookup_line(sp_lhs.hi) {
+ Ok(x) => x,
+ Err(_) => return None
+ };
+ let rhs_begin = match self.lookup_line(sp_rhs.lo) {
+ Ok(x) => x,
+ Err(_) => return None
+ };
+
+ // if we must cross lines to merge, don't merge
+ if lhs_end.line != rhs_begin.line {
+ return None;
+ }
+
+ // ensure these follow the expected order and we don't overlap
+ if (sp_lhs.lo <= sp_rhs.lo) && (sp_lhs.hi <= sp_rhs.lo) {
+ Some(Span {
+ lo: cmp::min(sp_lhs.lo, sp_rhs.lo),
+ hi: cmp::max(sp_lhs.hi, sp_rhs.hi),
+ expn_id: sp_lhs.expn_id,
+ })
+ } else {
+ None
+ }
+ }
+
pub fn span_to_string(&self, sp: Span) -> String {
if sp == COMMAND_LINE_SP {
return "<command line option>".to_string();
fn macro_backtrace(&self, span: Span) -> Vec<MacroBacktrace> {
self.macro_backtrace(span)
}
+ fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
+ self.merge_spans(sp_lhs, sp_rhs)
+ }
}
// _____________________________________________________________________________
blork.rs:1:1: 1:12\n `first line.`\n");
}
+ /// Test merging two spans on the same line
+ #[test]
+ fn span_merging() {
+ let cm = CodeMap::new();
+ let inputtext = "bbbb BB bb CCC\n";
+ let selection1 = " ~~ \n";
+ let selection2 = " ~~~\n";
+ cm.new_filemap_and_lines("blork.rs", None, inputtext);
+ let span1 = span_from_selection(inputtext, selection1);
+ let span2 = span_from_selection(inputtext, selection2);
+
+ if let Some(sp) = cm.merge_spans(span1, span2) {
+ let sstr = cm.span_to_expanded_string(sp);
+ assert_eq!(sstr, "blork.rs:1:6: 1:15\n`BB bb CCC`\n");
+ }
+ else {
+ assert!(false);
+ }
+ }
+
+ /// Test failing to merge two spans on different lines
+ #[test]
+ fn span_merging_fail() {
+ let cm = CodeMap::new();
+ let inputtext = "bbbb BB\ncc CCC\n";
+ let selection1 = " ~~\n \n";
+ let selection2 = " \n ~~~\n";
+ cm.new_filemap_and_lines("blork.rs", None, inputtext);
+ let span1 = span_from_selection(inputtext, selection1);
+ let span2 = span_from_selection(inputtext, selection2);
+
+ assert!(cm.merge_spans(span1, span2).is_none());
+ }
+
/// Returns the span corresponding to the `n`th occurrence of
/// `substring` in `source_text`.
trait CodeMapExtension {
// flag the offending attributes
for attr in attrs.iter() {
if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) {
- emit_feature_err(&self.sess.span_diagnostic,
+ emit_feature_err(&self.sess,
"stmt_expr_attributes",
attr.span,
GateIssue::Language,
attr.check_name("cfg")
}
-fn is_test_or_bench(attr: &ast::Attribute) -> bool {
+pub fn is_test_or_bench(attr: &ast::Attribute) -> bool {
attr.check_name("test") || attr.check_name("bench")
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-pub use self::SyntaxExtension::*;
+pub use self::SyntaxExtension::{MultiDecorator, MultiModifier, NormalTT, IdentTT};
use ast::{self, Attribute, Name, PatKind};
use attr::HasAttrs;
use errors::DiagnosticBuilder;
use ext::expand::{self, Invocation, Expansion};
use ext::hygiene::Mark;
-use ext::tt::macro_rules;
-use parse;
-use parse::parser;
+use fold::{self, Folder};
+use parse::{self, parser};
use parse::token;
use parse::token::{InternedString, str_to_ident};
use ptr::P;
use std_inject;
use util::small_vector::SmallVector;
-use fold::Folder;
-use feature_gate;
-use std::collections::HashMap;
use std::path::PathBuf;
use std::rc::Rc;
-use tokenstream;
+use std::default::Default;
+use tokenstream::{self, TokenStream};
#[derive(Debug,Clone)]
}
}
+pub trait ProcMacro {
+ fn expand<'cx>(&self,
+ ecx: &'cx mut ExtCtxt,
+ span: Span,
+ ts: TokenStream)
+ -> TokenStream;
+}
+
+impl<F> ProcMacro for F
+ where F: Fn(TokenStream) -> TokenStream
+{
+ fn expand<'cx>(&self,
+ _ecx: &'cx mut ExtCtxt,
+ _span: Span,
+ ts: TokenStream)
+ -> TokenStream {
+ // FIXME setup implicit context in TLS before calling self.
+ (*self)(ts)
+ }
+}
+
+pub trait AttrProcMacro {
+ fn expand<'cx>(&self,
+ ecx: &'cx mut ExtCtxt,
+ span: Span,
+ annotation: TokenStream,
+ annotated: TokenStream)
+ -> TokenStream;
+}
+
+impl<F> AttrProcMacro for F
+ where F: Fn(TokenStream, TokenStream) -> TokenStream
+{
+ fn expand<'cx>(&self,
+ _ecx: &'cx mut ExtCtxt,
+ _span: Span,
+ annotation: TokenStream,
+ annotated: TokenStream)
+ -> TokenStream {
+ // FIXME setup implicit context in TLS before calling self.
+ (*self)(annotation, annotated)
+ }
+}
+
/// Represents a thing that maps token trees to Macro Results
pub trait TTMacroExpander {
fn expand<'cx>(&self,
/// based upon it.
///
/// `#[derive(...)]` is a `MultiItemDecorator`.
- MultiDecorator(Box<MultiItemDecorator + 'static>),
+ ///
+ /// Prefer ProcMacro or MultiModifier since they are more flexible.
+ MultiDecorator(Box<MultiItemDecorator>),
/// A syntax extension that is attached to an item and modifies it
- /// in-place. More flexible version than Modifier.
- MultiModifier(Box<MultiItemModifier + 'static>),
+ /// in-place. Also allows decoration, i.e., creating new items.
+ MultiModifier(Box<MultiItemModifier>),
+
+ /// A function-like procedural macro. TokenStream -> TokenStream.
+ ProcMacro(Box<ProcMacro>),
+
+ /// An attribute-like procedural macro. TokenStream, TokenStream -> TokenStream.
+ /// The first TokenSteam is the attribute, the second is the annotated item.
+ /// Allows modification of the input items and adding new items, similar to
+ /// MultiModifier, but uses TokenStreams, rather than AST nodes.
+ AttrProcMacro(Box<AttrProcMacro>),
/// A normal, function-like syntax extension.
///
///
/// The `bool` dictates whether the contents of the macro can
/// directly use `#[unstable]` things (true == yes).
- NormalTT(Box<TTMacroExpander + 'static>, Option<Span>, bool),
+ NormalTT(Box<TTMacroExpander>, Option<Span>, bool),
/// A function-like syntax extension that has an extra ident before
/// the block.
///
- IdentTT(Box<IdentMacroExpander + 'static>, Option<Span>, bool),
+ IdentTT(Box<IdentMacroExpander>, Option<Span>, bool),
}
pub type NamedSyntaxExtension = (Name, SyntaxExtension);
pub trait Resolver {
- fn load_crate(&mut self, extern_crate: &ast::Item, allows_macros: bool) -> Vec<LoadedMacro>;
fn next_node_id(&mut self) -> ast::NodeId;
+ fn get_module_scope(&mut self, id: ast::NodeId) -> Mark;
fn visit_expansion(&mut self, mark: Mark, expansion: &Expansion);
- fn add_macro(&mut self, scope: Mark, ident: ast::Ident, ext: Rc<SyntaxExtension>);
+ fn add_macro(&mut self, scope: Mark, def: ast::MacroDef);
+ fn add_ext(&mut self, scope: Mark, ident: ast::Ident, ext: Rc<SyntaxExtension>);
fn add_expansions_at_stmt(&mut self, id: ast::NodeId, macros: Vec<Mark>);
fn find_attr_invoc(&mut self, attrs: &mut Vec<Attribute>) -> Option<Attribute>;
- fn resolve_invoc(&mut self, invoc: &Invocation) -> Option<Rc<SyntaxExtension>>;
-}
-
-pub enum LoadedMacro {
- Def(ast::MacroDef),
- CustomDerive(String, Box<MultiItemModifier>),
+ fn resolve_invoc(&mut self, scope: Mark, invoc: &Invocation) -> Option<Rc<SyntaxExtension>>;
+ fn resolve_derive_mode(&mut self, ident: ast::Ident) -> Option<Rc<MultiItemModifier>>;
}
pub struct DummyResolver;
impl Resolver for DummyResolver {
- fn load_crate(&mut self, _extern_crate: &ast::Item, _allows_macros: bool) -> Vec<LoadedMacro> {
- Vec::new()
- }
fn next_node_id(&mut self) -> ast::NodeId { ast::DUMMY_NODE_ID }
+ fn get_module_scope(&mut self, _id: ast::NodeId) -> Mark { Mark::root() }
fn visit_expansion(&mut self, _invoc: Mark, _expansion: &Expansion) {}
- fn add_macro(&mut self, _scope: Mark, _ident: ast::Ident, _ext: Rc<SyntaxExtension>) {}
+ fn add_macro(&mut self, _scope: Mark, _def: ast::MacroDef) {}
+ fn add_ext(&mut self, _scope: Mark, _ident: ast::Ident, _ext: Rc<SyntaxExtension>) {}
fn add_expansions_at_stmt(&mut self, _id: ast::NodeId, _macros: Vec<Mark>) {}
fn find_attr_invoc(&mut self, _attrs: &mut Vec<Attribute>) -> Option<Attribute> { None }
- fn resolve_invoc(&mut self, _invoc: &Invocation) -> Option<Rc<SyntaxExtension>> { None }
+ fn resolve_derive_mode(&mut self, _ident: ast::Ident) -> Option<Rc<MultiItemModifier>> { None }
+ fn resolve_invoc(&mut self, _scope: Mark, _invoc: &Invocation) -> Option<Rc<SyntaxExtension>> {
+ None
+ }
}
#[derive(Clone)]
pub depth: usize,
pub backtrace: ExpnId,
pub module: Rc<ModuleData>,
- pub in_block: bool,
+
+ // True if non-inline modules without a `#[path]` are forbidden at the root of this expansion.
+ pub no_noninline_mod: bool,
}
/// One of these is made during expansion and incrementally updated as we go;
pub ecfg: expand::ExpansionConfig<'a>,
pub crate_root: Option<&'static str>,
pub resolver: &'a mut Resolver,
- pub exported_macros: Vec<ast::MacroDef>,
- pub derive_modes: HashMap<InternedString, Box<MultiItemModifier>>,
+ pub resolve_err_count: usize,
pub current_expansion: ExpansionData,
}
cfg: cfg,
ecfg: ecfg,
crate_root: None,
- exported_macros: Vec::new(),
resolver: resolver,
- derive_modes: HashMap::new(),
+ resolve_err_count: 0,
current_expansion: ExpansionData {
mark: Mark::root(),
depth: 0,
backtrace: NO_EXPANSION,
module: Rc::new(ModuleData { mod_path: Vec::new(), directory: PathBuf::new() }),
- in_block: false,
+ no_noninline_mod: false,
},
}
}
}
pub fn bt_pop(&mut self) {}
- pub fn insert_macro(&mut self, def: ast::MacroDef) {
- if def.export {
- self.exported_macros.push(def.clone());
- }
- if def.use_locally {
- let ext = macro_rules::compile(self, &def);
- self.resolver.add_macro(self.current_expansion.mark, def.ident, Rc::new(ext));
- }
- }
-
- pub fn insert_custom_derive(&mut self, name: &str, ext: Box<MultiItemModifier>, sp: Span) {
- if !self.ecfg.enable_rustc_macro() {
- feature_gate::emit_feature_err(&self.parse_sess.span_diagnostic,
- "rustc_macro",
- sp,
- feature_gate::GateIssue::Language,
- "loading custom derive macro crates \
- is experimentally supported");
- }
- let name = token::intern_and_get_ident(name);
- if self.derive_modes.insert(name.clone(), ext).is_some() {
- self.span_err(sp, &format!("cannot shadow existing derive mode `{}`", name));
- }
- }
-
pub fn struct_span_warn(&self,
sp: Span,
msg: &str)
for (name, extension) in user_exts {
let ident = ast::Ident::with_empty_ctxt(name);
- self.resolver.add_macro(Mark::root(), ident, Rc::new(extension));
+ self.resolver.add_ext(Mark::root(), ident, Rc::new(extension));
}
let mut module = ModuleData {
}
Some(es)
}
+
+pub struct ChangeSpan {
+ pub span: Span
+}
+
+impl Folder for ChangeSpan {
+ fn new_span(&mut self, _sp: Span) -> Span {
+ self.span
+ }
+
+ fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
+ fold::noop_fold_mac(mac, self)
+ }
+}
// except according to those terms.
use ast::{Block, Crate, Ident, Mac_, PatKind};
-use ast::{MacStmtStyle, StmtKind, ItemKind};
+use ast::{Name, MacStmtStyle, StmtKind, ItemKind};
use ast;
use ext::hygiene::Mark;
use ext::placeholders::{placeholder, PlaceholderExpander};
use attr::{self, HasAttrs};
use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
use syntax_pos::{self, Span, ExpnId};
-use config::StripUnconfigured;
+use config::{is_test_or_bench, StripUnconfigured};
use ext::base::*;
use feature_gate::{self, Features};
use fold;
use fold::*;
-use parse::token::{intern, keywords};
+use parse::{ParseSess, PResult, lexer};
+use parse::parser::Parser;
+use parse::token::{self, intern, keywords};
+use print::pprust;
use ptr::P;
-use tokenstream::TokenTree;
+use tokenstream::{TokenTree, TokenStream};
use util::small_vector::SmallVector;
use visit::Visitor;
($($kind:ident: $ty:ty [$($vec:ident, $ty_elt:ty)*], $kind_name:expr, .$make:ident,
$(.$fold:ident)* $(lift .$fold_elt:ident)*,
$(.$visit:ident)* $(lift .$visit_elt:ident)*;)*) => {
- #[derive(Copy, Clone)]
+ #[derive(Copy, Clone, PartialEq, Eq)]
pub enum ExpansionKind { OptExpr, $( $kind, )* }
pub enum Expansion { OptExpr(Option<P<ast::Expr>>), $( $kind($ty), )* }
impl ExpansionKind {
- fn name(self) -> &'static str {
+ pub fn name(self) -> &'static str {
match self {
ExpansionKind::OptExpr => "expression",
$( ExpansionKind::$kind => $kind_name, )*
self.expand(Expansion::$kind(SmallVector::one(node))).$make()
})*)*
}
+
+ impl<'a> MacResult for ::ext::tt::macro_rules::ParserAnyMacro<'a> {
+ $(fn $make(self: Box<::ext::tt::macro_rules::ParserAnyMacro<'a>>) -> Option<$ty> {
+ Some(self.make(ExpansionKind::$kind).$make())
+ })*
+ }
}
}
InvocationKind::Attr { ref attr, .. } => attr.span,
}
}
-
- pub fn mark(&self) -> Mark {
- self.expansion_data.mark
- }
}
pub struct MacroExpander<'a, 'b:'a> {
fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate {
let err_count = self.cx.parse_sess.span_diagnostic.err_count();
- let mut krate_item = placeholder(ExpansionKind::Items, ast::DUMMY_NODE_ID)
- .make_items().pop().unwrap().unwrap();
- krate_item.node = ast::ItemKind::Mod(krate.module);
- let krate_item = Expansion::Items(SmallVector::one(P(krate_item)));
-
- krate.module = match self.expand(krate_item).make_items().pop().unwrap().unwrap().node {
- ast::ItemKind::Mod(module) => module,
+ let krate_item = Expansion::Items(SmallVector::one(P(ast::Item {
+ attrs: krate.attrs,
+ span: krate.span,
+ node: ast::ItemKind::Mod(krate.module),
+ ident: keywords::Invalid.ident(),
+ id: ast::DUMMY_NODE_ID,
+ vis: ast::Visibility::Public,
+ })));
+
+ match self.expand(krate_item).make_items().pop().unwrap().unwrap() {
+ ast::Item { attrs, node: ast::ItemKind::Mod(module), .. } => {
+ krate.attrs = attrs;
+ krate.module = module;
+ },
_ => unreachable!(),
};
- krate.exported_macros = mem::replace(&mut self.cx.exported_macros, Vec::new());
-
- for def in &mut krate.exported_macros {
- def.id = self.cx.resolver.next_node_id()
- }
- if self.cx.parse_sess.span_diagnostic.err_count() > err_count {
+ if self.cx.parse_sess.span_diagnostic.err_count() - self.cx.resolve_err_count > err_count {
self.cx.parse_sess.span_diagnostic.abort_if_errors();
}
let ExpansionData { depth, mark, .. } = invoc.expansion_data;
self.cx.current_expansion = invoc.expansion_data.clone();
- let expansion = match self.cx.resolver.resolve_invoc(&invoc) {
+ let scope = if self.monotonic { mark } else { orig_expansion_data.mark };
+ self.cx.current_expansion.mark = scope;
+ let expansion = match self.cx.resolver.resolve_invoc(scope, &invoc) {
Some(ext) => self.expand_invoc(invoc, ext),
None => invoc.expansion_kind.dummy(invoc.span()),
};
while let Some(expansions) = expansions.pop() {
for (mark, expansion) in expansions.into_iter().rev() {
let expansion = expansion.fold_with(&mut placeholder_expander);
- placeholder_expander.add(mark, expansion);
+ placeholder_expander.add(ast::NodeId::from_u32(mark), expansion);
}
}
- placeholder_expander.remove(0)
+ placeholder_expander.remove(ast::NodeId::from_u32(0))
}
fn collect_invocations(&mut self, expansion: Expansion) -> (Expansion, Vec<Invocation>) {
};
self.cx.cfg = crate_config;
- let mark = self.cx.current_expansion.mark;
- self.cx.resolver.visit_expansion(mark, &result.0);
+ if self.monotonic {
+ let err_count = self.cx.parse_sess.span_diagnostic.err_count();
+ let mark = self.cx.current_expansion.mark;
+ self.cx.resolver.visit_expansion(mark, &result.0);
+ self.cx.resolve_err_count += self.cx.parse_sess.span_diagnostic.err_count() - err_count;
+ }
+
result
}
};
attr::mark_used(&attr);
+ let name = intern(&attr.name());
self.cx.bt_push(ExpnInfo {
call_site: attr.span,
callee: NameAndSpan {
- format: MacroAttribute(intern(&attr.name())),
+ format: MacroAttribute(name),
span: Some(attr.span),
allow_internal_unstable: false,
}
items.push(item);
kind.expect_from_annotatables(items)
}
+ SyntaxExtension::AttrProcMacro(ref mac) => {
+ let attr_toks = TokenStream::from_tts(tts_for_attr(&attr, &self.cx.parse_sess));
+ let item_toks = TokenStream::from_tts(tts_for_item(&item, &self.cx.parse_sess));
+
+ let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
+ self.parse_expansion(tok_result, kind, name, attr.span)
+ }
_ => unreachable!(),
}
}
/// Expand a macro invocation. Returns the result of expansion.
fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) -> Expansion {
- let (mark, kind) = (invoc.mark(), invoc.expansion_kind);
+ let (mark, kind) = (invoc.expansion_data.mark, invoc.expansion_kind);
let (attrs, mac, ident, span) = match invoc.kind {
InvocationKind::Bang { attrs, mac, ident, span } => (attrs, mac, ident, span),
_ => unreachable!(),
// Detect use of feature-gated or invalid attributes on macro invoations
// since they will not be detected after macro expansion.
for attr in attrs.iter() {
- feature_gate::check_attribute(&attr, &self.cx.parse_sess.span_diagnostic,
+ feature_gate::check_attribute(&attr, &self.cx.parse_sess,
&self.cx.parse_sess.codemap(),
&self.cx.ecfg.features.unwrap());
}
kind.make_from(expander.expand(self.cx, span, ident, marked_tts, attrs))
}
- MultiDecorator(..) | MultiModifier(..) => {
+ MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => {
self.cx.span_err(path.span,
&format!("`{}` can only be used in attributes", extname));
return kind.dummy(span);
}
+
+ SyntaxExtension::ProcMacro(ref expandfun) => {
+ if ident.name != keywords::Invalid.name() {
+ let msg =
+ format!("macro {}! expects no ident argument, given '{}'", extname, ident);
+ self.cx.span_err(path.span, &msg);
+ return kind.dummy(span);
+ }
+
+ self.cx.bt_push(ExpnInfo {
+ call_site: span,
+ callee: NameAndSpan {
+ format: MacroBang(extname),
+ // FIXME procedural macros do not have proper span info
+ // yet, when they do, we should use it here.
+ span: None,
+ // FIXME probably want to follow macro_rules macros here.
+ allow_internal_unstable: false,
+ },
+ });
+
+ let toks = TokenStream::from_tts(marked_tts);
+ let tok_result = expandfun.expand(self.cx, span, toks);
+ Some(self.parse_expansion(tok_result, kind, extname, span))
+ }
};
let expanded = if let Some(expanded) = opt_expanded {
expn_id: Some(self.cx.backtrace()),
})
}
+
+ fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, name: Name, span: Span)
+ -> Expansion {
+ let mut parser = self.cx.new_parser_from_tts(&toks.to_tts());
+ let expansion = match parser.parse_expansion(kind, false) {
+ Ok(expansion) => expansion,
+ Err(mut err) => {
+ err.emit();
+ return kind.dummy(span);
+ }
+ };
+ parser.ensure_complete_parse(name, kind.name(), span);
+ // FIXME better span info
+ expansion.fold_with(&mut ChangeSpan { span: span })
+ }
+}
+
+impl<'a> Parser<'a> {
+ pub fn parse_expansion(&mut self, kind: ExpansionKind, macro_legacy_warnings: bool)
+ -> PResult<'a, Expansion> {
+ Ok(match kind {
+ ExpansionKind::Items => {
+ let mut items = SmallVector::zero();
+ while let Some(item) = self.parse_item()? {
+ items.push(item);
+ }
+ Expansion::Items(items)
+ }
+ ExpansionKind::TraitItems => {
+ let mut items = SmallVector::zero();
+ while self.token != token::Eof {
+ items.push(self.parse_trait_item()?);
+ }
+ Expansion::TraitItems(items)
+ }
+ ExpansionKind::ImplItems => {
+ let mut items = SmallVector::zero();
+ while self.token != token::Eof {
+ items.push(self.parse_impl_item()?);
+ }
+ Expansion::ImplItems(items)
+ }
+ ExpansionKind::Stmts => {
+ let mut stmts = SmallVector::zero();
+ while self.token != token::Eof {
+ if let Some(stmt) = self.parse_full_stmt(macro_legacy_warnings)? {
+ stmts.push(stmt);
+ }
+ }
+ Expansion::Stmts(stmts)
+ }
+ ExpansionKind::Expr => Expansion::Expr(self.parse_expr()?),
+ ExpansionKind::OptExpr => Expansion::OptExpr(Some(self.parse_expr()?)),
+ ExpansionKind::Ty => Expansion::Ty(self.parse_ty()?),
+ ExpansionKind::Pat => Expansion::Pat(self.parse_pat()?),
+ })
+ }
+
+ pub fn ensure_complete_parse(&mut self, macro_name: ast::Name, kind_name: &str, span: Span) {
+ if self.token != token::Eof {
+ let msg = format!("macro expansion ignores token `{}` and any following",
+ self.this_token_to_string());
+ let mut err = self.diagnostic().struct_span_err(self.span, &msg);
+ let msg = format!("caused by the macro expansion here; the usage \
+ of `{}!` is likely invalid in {} context",
+ macro_name, kind_name);
+ err.span_note(span, &msg).emit();
+ }
+ }
}
struct InvocationCollector<'a, 'b: 'a> {
expansion_kind: expansion_kind,
expansion_data: ExpansionData { mark: mark, ..self.cx.current_expansion.clone() },
});
- placeholder(expansion_kind, mark.as_u32())
+ placeholder(expansion_kind, ast::NodeId::from_u32(mark.as_u32()))
}
fn collect_bang(
}
}
+// These are pretty nasty. Ideally, we would keep the tokens around, linked from
+// the AST. However, we don't so we need to create new ones. Since the item might
+// have come from a macro expansion (possibly only in part), we can't use the
+// existing codemap.
+//
+// Therefore, we must use the pretty printer (yuck) to turn the AST node into a
+// string, which we then re-tokenise (double yuck), but first we have to patch
+// the pretty-printed string on to the end of the existing codemap (infinity-yuck).
+fn tts_for_item(item: &Annotatable, parse_sess: &ParseSess) -> Vec<TokenTree> {
+ let text = match *item {
+ Annotatable::Item(ref i) => pprust::item_to_string(i),
+ Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti),
+ Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii),
+ };
+ string_to_tts(text, parse_sess)
+}
+
+fn tts_for_attr(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec<TokenTree> {
+ string_to_tts(pprust::attr_to_string(attr), parse_sess)
+}
+
+fn string_to_tts(text: String, parse_sess: &ParseSess) -> Vec<TokenTree> {
+ let filemap = parse_sess.codemap()
+ .new_filemap(String::from("<macro expansion>"), None, text);
+
+ let lexer = lexer::StringReader::new(&parse_sess.span_diagnostic, filemap);
+ let mut parser = Parser::new(parse_sess, Vec::new(), Box::new(lexer));
+ panictry!(parser.parse_all_token_trees())
+}
+
impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {
let mut expr = self.cfg.configure_expr(expr).unwrap();
}
fn fold_block(&mut self, block: P<Block>) -> P<Block> {
- let orig_in_block = mem::replace(&mut self.cx.current_expansion.in_block, true);
+ let no_noninline_mod = mem::replace(&mut self.cx.current_expansion.no_noninline_mod, true);
let result = noop_fold_block(block, self);
- self.cx.current_expansion.in_block = orig_in_block;
+ self.cx.current_expansion.no_noninline_mod = no_noninline_mod;
result
}
fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {
let item = configure!(self, item);
- let (item, attr) = self.classify_item(item);
+ let (mut item, attr) = self.classify_item(item);
if let Some(attr) = attr {
let item = Annotatable::Item(fully_configure!(self, item, noop_fold_item));
return self.collect_attr(attr, item, ExpansionKind::Items).make_items();
return noop_fold_item(item, self);
}
+ let orig_no_noninline_mod = self.cx.current_expansion.no_noninline_mod;
let mut module = (*self.cx.current_expansion.module).clone();
module.mod_path.push(item.ident);
let inline_module = item.span.contains(inner) || inner == syntax_pos::DUMMY_SP;
if inline_module {
- module.directory.push(&*{
- ::attr::first_attr_value_str_by_name(&item.attrs, "path")
- .unwrap_or(item.ident.name.as_str())
- });
+ if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") {
+ self.cx.current_expansion.no_noninline_mod = false;
+ module.directory.push(&*path);
+ } else {
+ module.directory.push(&*item.ident.name.as_str());
+ }
} else {
+ self.cx.current_expansion.no_noninline_mod = false;
module.directory =
PathBuf::from(self.cx.parse_sess.codemap().span_to_filename(inner));
module.directory.pop();
mem::replace(&mut self.cx.current_expansion.module, Rc::new(module));
let result = noop_fold_item(item, self);
self.cx.current_expansion.module = orig_module;
+ self.cx.current_expansion.no_noninline_mod = orig_no_noninline_mod;
return result;
}
- ast::ItemKind::ExternCrate(..) => {
- // We need to error on `#[macro_use] extern crate` when it isn't at the
- // crate root, because `$crate` won't work properly.
- let is_crate_root = self.cx.current_expansion.module.mod_path.len() == 1;
- for def in self.cx.resolver.load_crate(&*item, is_crate_root) {
- match def {
- LoadedMacro::Def(def) => self.cx.insert_macro(def),
- LoadedMacro::CustomDerive(name, ext) => {
- self.cx.insert_custom_derive(&name, ext, item.span);
- }
- }
+ // Ensure that test functions are accessible from the test harness.
+ ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => {
+ if item.attrs.iter().any(|attr| is_test_or_bench(attr)) {
+ item = item.map(|mut item| { item.vis = ast::Visibility::Public; item });
}
noop_fold_item(item, self)
- },
+ }
_ => noop_fold_item(item, self),
}
}
/// Take a `ExtCtxt`, `Span`, and `TokenStream`, and produce a Macro Result that parses
/// the TokenStream as a block and returns it as an `Expr`.
-pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt, sp: Span, output: TokenStream)
+pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt,
+ sp: Span,
+ output: TokenStream)
-> Box<MacResult + 'cx> {
let parser = cx.new_parser_from_tts(&output.to_tts());
}
pub mod prelude {
- pub use ext::proc_macro_shim::build_block_emitter;
+ pub use super::build_block_emitter;
pub use ast::Ident;
pub use codemap::{DUMMY_SP, Span};
pub use ext::base::{ExtCtxt, MacResult};
use syntax_pos::{Span, DUMMY_SP};
use ext::base::{DummyResult, ExtCtxt, MacEager, MacResult, SyntaxExtension};
use ext::base::{IdentMacroExpander, NormalTT, TTMacroExpander};
+use ext::expand::{Expansion, ExpansionKind};
use ext::placeholders;
use ext::tt::macro_parser::{Success, Error, Failure};
use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
use ext::tt::macro_parser::parse;
+use parse::ParseSess;
use parse::lexer::new_tt_reader;
use parse::parser::{Parser, Restrictions};
use parse::token::{self, gensym_ident, NtTT, Token};
use parse::token::Token::*;
use print;
-use ptr::P;
use tokenstream::{self, TokenTree};
-use util::small_vector::SmallVector;
-
-use std::cell::RefCell;
use std::collections::{HashMap};
use std::collections::hash_map::{Entry};
use std::rc::Rc;
-struct ParserAnyMacro<'a> {
- parser: RefCell<Parser<'a>>,
+pub struct ParserAnyMacro<'a> {
+ parser: Parser<'a>,
/// Span of the expansion site of the macro this parser is for
site_span: Span,
}
impl<'a> ParserAnyMacro<'a> {
- /// Make sure we don't have any tokens left to parse, so we don't
- /// silently drop anything. `allow_semi` is so that "optional"
- /// semicolons at the end of normal expressions aren't complained
- /// about e.g. the semicolon in `macro_rules! kapow { () => {
- /// panic!(); } }` doesn't get picked up by .parse_expr(), but it's
- /// allowed to be there.
- fn ensure_complete_parse(&self, allow_semi: bool, context: &str) {
- let mut parser = self.parser.borrow_mut();
- if allow_semi && parser.token == token::Semi {
+ pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: ExpansionKind) -> Expansion {
+ let ParserAnyMacro { site_span, macro_ident, ref mut parser } = *self;
+ let expansion = panictry!(parser.parse_expansion(kind, true));
+
+ // We allow semicolons at the end of expressions -- e.g. the semicolon in
+ // `macro_rules! m { () => { panic!(); } }` isn't parsed by `.parse_expr()`,
+ // but `m!()` is allowed in expression positions (c.f. issue #34706).
+ if kind == ExpansionKind::Expr && parser.token == token::Semi {
parser.bump();
}
- if parser.token != token::Eof {
- let token_str = parser.this_token_to_string();
- let msg = format!("macro expansion ignores token `{}` and any \
- following",
- token_str);
- let span = parser.span;
- let mut err = parser.diagnostic().struct_span_err(span, &msg[..]);
- let msg = format!("caused by the macro expansion here; the usage \
- of `{}!` is likely invalid in {} context",
- self.macro_ident, context);
- err.span_note(self.site_span, &msg[..])
- .emit();
- }
- }
-}
-
-impl<'a> MacResult for ParserAnyMacro<'a> {
- fn make_expr(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Expr>> {
- let ret = panictry!(self.parser.borrow_mut().parse_expr());
- self.ensure_complete_parse(true, "expression");
- Some(ret)
- }
- fn make_pat(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Pat>> {
- let ret = panictry!(self.parser.borrow_mut().parse_pat());
- self.ensure_complete_parse(false, "pattern");
- Some(ret)
- }
- fn make_items(self: Box<ParserAnyMacro<'a>>) -> Option<SmallVector<P<ast::Item>>> {
- let mut ret = SmallVector::zero();
- while let Some(item) = panictry!(self.parser.borrow_mut().parse_item()) {
- ret.push(item);
- }
- self.ensure_complete_parse(false, "item");
- Some(ret)
- }
-
- fn make_impl_items(self: Box<ParserAnyMacro<'a>>)
- -> Option<SmallVector<ast::ImplItem>> {
- let mut ret = SmallVector::zero();
- loop {
- let mut parser = self.parser.borrow_mut();
- match parser.token {
- token::Eof => break,
- _ => ret.push(panictry!(parser.parse_impl_item()))
- }
- }
- self.ensure_complete_parse(false, "item");
- Some(ret)
- }
-
- fn make_trait_items(self: Box<ParserAnyMacro<'a>>)
- -> Option<SmallVector<ast::TraitItem>> {
- let mut ret = SmallVector::zero();
- loop {
- let mut parser = self.parser.borrow_mut();
- match parser.token {
- token::Eof => break,
- _ => ret.push(panictry!(parser.parse_trait_item()))
- }
- }
- self.ensure_complete_parse(false, "item");
- Some(ret)
- }
-
-
- fn make_stmts(self: Box<ParserAnyMacro<'a>>)
- -> Option<SmallVector<ast::Stmt>> {
- let mut ret = SmallVector::zero();
- loop {
- let mut parser = self.parser.borrow_mut();
- match parser.token {
- token::Eof => break,
- _ => match parser.parse_full_stmt(true) {
- Ok(maybe_stmt) => match maybe_stmt {
- Some(stmt) => ret.push(stmt),
- None => (),
- },
- Err(mut e) => {
- e.emit();
- break;
- }
- }
- }
- }
- self.ensure_complete_parse(false, "statement");
- Some(ret)
- }
- fn make_ty(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Ty>> {
- let ret = panictry!(self.parser.borrow_mut().parse_ty());
- self.ensure_complete_parse(false, "type");
- Some(ret)
+ // Make sure we don't have any tokens left to parse so we don't silently drop anything.
+ parser.ensure_complete_parse(macro_ident.name, kind.name(), site_span);
+ expansion
}
}
_ => cx.span_bug(sp, "malformed macro rhs"),
};
// rhs has holes ( `$id` and `$(...)` that need filled)
- let trncbr = new_tt_reader(&cx.parse_sess().span_diagnostic,
+ let trncbr = new_tt_reader(&cx.parse_sess.span_diagnostic,
Some(named_matches),
imported_from,
rhs);
let mut p = Parser::new(cx.parse_sess(), cx.cfg(), Box::new(trncbr));
p.directory = cx.current_expansion.module.directory.clone();
- p.restrictions = match cx.current_expansion.in_block {
+ p.restrictions = match cx.current_expansion.no_noninline_mod {
true => Restrictions::NO_NONINLINE_MOD,
false => Restrictions::empty(),
};
// Let the context choose how to interpret the result.
// Weird, but useful for X-macros.
return Box::new(ParserAnyMacro {
- parser: RefCell::new(p),
+ parser: p,
// Pass along the original expansion site and the name of the macro
// so we can print a useful error message if the parse of the expanded
attrs: attrs,
};
- cx.insert_macro(def.clone());
-
// If keep_macs is true, expands to a MacEager::items instead.
- if cx.ecfg.keep_macs {
+ let result = if cx.ecfg.keep_macs {
MacEager::items(placeholders::reconstructed_macro_rules(&def).make_items())
} else {
MacEager::items(placeholders::macro_scope_placeholder().make_items())
- }
+ };
+
+ cx.resolver.add_macro(cx.current_expansion.mark, def);
+ result
}
}
// Holy self-referential!
/// Converts a `macro_rules!` invocation into a syntax extension.
-pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
- def: &ast::MacroDef) -> SyntaxExtension {
-
+pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
let lhs_nm = gensym_ident("lhs");
let rhs_nm = gensym_ident("rhs");
];
// Parse the macro_rules! invocation (`none` is for no interpolations):
- let arg_reader = new_tt_reader(&cx.parse_sess().span_diagnostic,
- None,
- None,
- def.body.clone());
-
- let argument_map = match parse(cx.parse_sess(),
- cx.cfg(),
- arg_reader,
- &argument_gram) {
+ let arg_reader = new_tt_reader(&sess.span_diagnostic, None, None, def.body.clone());
+
+ let argument_map = match parse(sess, Vec::new(), arg_reader, &argument_gram) {
Success(m) => m,
Failure(sp, str) | Error(sp, str) => {
- panic!(cx.parse_sess().span_diagnostic
- .span_fatal(sp.substitute_dummy(def.span), &str[..]));
+ panic!(sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &str));
}
};
MatchedSeq(ref s, _) => {
s.iter().map(|m| match **m {
MatchedNonterminal(NtTT(ref tt)) => {
- valid &= check_lhs_nt_follows(cx, tt);
+ valid &= check_lhs_nt_follows(sess, tt);
(**tt).clone()
}
- _ => cx.span_bug(def.span, "wrong-structured lhs")
- }).collect()
+ _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
+ }).collect::<Vec<TokenTree>>()
}
- _ => cx.span_bug(def.span, "wrong-structured lhs")
+ _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
};
let rhses = match **argument_map.get(&rhs_nm).unwrap() {
MatchedSeq(ref s, _) => {
s.iter().map(|m| match **m {
MatchedNonterminal(NtTT(ref tt)) => (**tt).clone(),
- _ => cx.span_bug(def.span, "wrong-structured rhs")
+ _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
}).collect()
}
- _ => cx.span_bug(def.span, "wrong-structured rhs")
+ _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
};
for rhs in &rhses {
- valid &= check_rhs(cx, rhs);
+ valid &= check_rhs(sess, rhs);
+ }
+
+ // don't abort iteration early, so that errors for multiple lhses can be reported
+ for lhs in &lhses {
+ valid &= check_lhs_no_empty_seq(sess, &[lhs.clone()])
}
let exp: Box<_> = Box::new(MacroRulesMacroExpander {
NormalTT(exp, Some(def.span), def.allow_internal_unstable)
}
-fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &TokenTree) -> bool {
+fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool {
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
match lhs {
- &TokenTree::Delimited(_, ref tts) => check_matcher(cx, &tts.tts),
+ &TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts),
_ => {
- cx.span_err(lhs.get_span(), "invalid macro matcher; matchers must \
- be contained in balanced delimiters");
+ let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
+ sess.span_diagnostic.span_err(lhs.get_span(), msg);
false
}
}
// after parsing/expansion. we can report every error in every macro this way.
}
-fn check_rhs(cx: &mut ExtCtxt, rhs: &TokenTree) -> bool {
+/// Check that the lhs contains no repetition which could match an empty token
+/// tree, because then the matcher would hang indefinitely.
+fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool {
+ for tt in tts {
+ match *tt {
+ TokenTree::Token(_, _) => (),
+ TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) {
+ return false;
+ },
+ TokenTree::Sequence(span, ref seq) => {
+ if seq.separator.is_none() {
+ if seq.tts.iter().all(|seq_tt| {
+ match *seq_tt {
+ TokenTree::Sequence(_, ref sub_seq) =>
+ sub_seq.op == tokenstream::KleeneOp::ZeroOrMore,
+ _ => false,
+ }
+ }) {
+ sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
+ return false;
+ }
+ }
+ if !check_lhs_no_empty_seq(sess, &seq.tts) {
+ return false;
+ }
+ }
+ }
+ }
+
+ true
+}
+
+fn check_rhs(sess: &ParseSess, rhs: &TokenTree) -> bool {
match *rhs {
TokenTree::Delimited(..) => return true,
- _ => cx.span_err(rhs.get_span(), "macro rhs must be delimited")
+ _ => sess.span_diagnostic.span_err(rhs.get_span(), "macro rhs must be delimited")
}
false
}
-fn check_matcher(cx: &mut ExtCtxt, matcher: &[TokenTree]) -> bool {
+fn check_matcher(sess: &ParseSess, matcher: &[TokenTree]) -> bool {
let first_sets = FirstSets::new(matcher);
let empty_suffix = TokenSet::empty();
- let err = cx.parse_sess.span_diagnostic.err_count();
- check_matcher_core(cx, &first_sets, matcher, &empty_suffix);
- err == cx.parse_sess.span_diagnostic.err_count()
+ let err = sess.span_diagnostic.err_count();
+ check_matcher_core(sess, &first_sets, matcher, &empty_suffix);
+ err == sess.span_diagnostic.err_count()
}
// The FirstSets for a matcher is a mapping from subsequences in the
//
// Requires that `first_sets` is pre-computed for `matcher`;
// see `FirstSets::new`.
-fn check_matcher_core(cx: &mut ExtCtxt,
+fn check_matcher_core(sess: &ParseSess,
first_sets: &FirstSets,
matcher: &[TokenTree],
follow: &TokenSet) -> TokenSet {
TokenTree::Token(sp, ref tok) => {
let can_be_followed_by_any;
if let Err(bad_frag) = has_legal_fragment_specifier(tok) {
- cx.struct_span_err(sp, &format!("invalid fragment specifier `{}`", bad_frag))
+ let msg = format!("invalid fragment specifier `{}`", bad_frag);
+ sess.span_diagnostic.struct_span_err(sp, &msg)
.help("valid fragment specifiers are `ident`, `block`, \
`stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
and `item`")
}
TokenTree::Delimited(_, ref d) => {
let my_suffix = TokenSet::singleton((d.close_span, Token::CloseDelim(d.delim)));
- check_matcher_core(cx, first_sets, &d.tts, &my_suffix);
+ check_matcher_core(sess, first_sets, &d.tts, &my_suffix);
// don't track non NT tokens
last.replace_with_irrelevant();
// At this point, `suffix_first` is built, and
// `my_suffix` is some TokenSet that we can use
// for checking the interior of `seq_rep`.
- let next = check_matcher_core(cx, first_sets, &seq_rep.tts, my_suffix);
+ let next = check_matcher_core(sess, first_sets, &seq_rep.tts, my_suffix);
if next.maybe_empty {
last.add_all(&next);
} else {
'each_last: for &(_sp, ref t) in &last.tokens {
if let MatchNt(ref name, ref frag_spec) = *t {
for &(sp, ref next_token) in &suffix_first.tokens {
- match is_in_follow(cx, next_token, &frag_spec.name.as_str()) {
+ match is_in_follow(next_token, &frag_spec.name.as_str()) {
Err((msg, help)) => {
- cx.struct_span_err(sp, &msg).help(help).emit();
+ sess.span_diagnostic.struct_span_err(sp, &msg).help(help).emit();
// don't bother reporting every source of
// conflict for a particular element of `last`.
continue 'each_last;
"may be"
};
- cx.span_err(
+ sess.span_diagnostic.span_err(
sp,
&format!("`${name}:{frag}` {may_be} followed by `{next}`, which \
is not allowed for `{frag}` fragments",
/// break macros that were relying on that binary operator as a
/// separator.
// when changing this do not forget to update doc/book/macros.md!
-fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> {
+fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> {
if let &CloseDelim(_) = tok {
// closing a token tree can never be matched by any fragment;
// iow, we always require that `(` and `)` match, etc.
use parse::token::InternedString;
use std::ascii::AsciiExt;
+use std::env;
macro_rules! setter {
($field: ident) => {{
is just used for rustc unit tests \
and will never be stable",
cfg_fn!(rustc_attrs))),
+ ("rustc_metadata_dirty", Whitelisted, Gated("rustc_attrs",
+ "the `#[rustc_metadata_dirty]` attribute \
+ is just used for rustc unit tests \
+ and will never be stable",
+ cfg_fn!(rustc_attrs))),
+ ("rustc_metadata_clean", Whitelisted, Gated("rustc_attrs",
+ "the `#[rustc_metadata_clean]` attribute \
+ is just used for rustc unit tests \
+ and will never be stable",
+ cfg_fn!(rustc_attrs))),
("rustc_partition_reused", Whitelisted, Gated("rustc_attrs",
"this attribute \
is just used for rustc unit tests \
pub fn check_and_emit(&self, sess: &ParseSess, features: &Features) {
let (cfg, feature, has_feature) = GATED_CFGS[self.index];
if !has_feature(features) && !sess.codemap().span_allows_unstable(self.span) {
- let diagnostic = &sess.span_diagnostic;
let explain = format!("`cfg({})` is experimental and subject to change", cfg);
- emit_feature_err(diagnostic, feature, self.span, GateIssue::Language, &explain);
+ emit_feature_err(sess, feature, self.span, GateIssue::Language, &explain);
}
}
}
struct Context<'a> {
features: &'a Features,
- span_handler: &'a Handler,
+ parse_sess: &'a ParseSess,
cm: &'a CodeMap,
plugin_attributes: &'a [(String, AttributeType)],
}
let has_feature: bool = has_feature(&$cx.features);
debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature);
if !has_feature && !cx.cm.span_allows_unstable(span) {
- emit_feature_err(cx.span_handler, name, span, GateIssue::Language, explain);
+ emit_feature_err(cx.parse_sess, name, span, GateIssue::Language, explain);
}
}}
}
}
}
-pub fn check_attribute(attr: &ast::Attribute, handler: &Handler,
+pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess,
cm: &CodeMap, features: &Features) {
let cx = Context {
- features: features, span_handler: handler,
+ features: features, parse_sess: parse_sess,
cm: cm, plugin_attributes: &[]
};
cx.check_attribute(attr, true);
Library(Option<u32>)
}
-pub fn emit_feature_err(diag: &Handler, feature: &str, span: Span, issue: GateIssue,
+pub fn emit_feature_err(sess: &ParseSess, feature: &str, span: Span, issue: GateIssue,
explain: &str) {
+ let diag = &sess.span_diagnostic;
+
let issue = match issue {
GateIssue::Language => find_lang_feature_issue(feature),
GateIssue::Library(lib) => lib,
};
// #23973: do not suggest `#![feature(...)]` if we are in beta/stable
- if option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some() {
- err.emit();
- return;
+ if sess.unstable_features.is_nightly_build() {
+ err.help(&format!("add #![feature({})] to the \
+ crate attributes to enable",
+ feature));
}
- err.help(&format!("add #![feature({})] to the \
- crate attributes to enable",
- feature));
+
err.emit();
}
if attr::contains_name(&i.attrs[..], "simd") {
gate_feature_post!(&self, simd, i.span,
"SIMD types are experimental and possibly buggy");
- self.context.span_handler.span_warn(i.span,
- "the `#[simd]` attribute is deprecated, \
- use `#[repr(simd)]` instead");
+ self.context.parse_sess.span_diagnostic.span_warn(i.span,
+ "the `#[simd]` attribute \
+ is deprecated, use \
+ `#[repr(simd)]` instead");
}
for attr in &i.attrs {
if attr.name() == "repr" {
maybe_stage_features(&sess.span_diagnostic, krate, unstable);
let ctx = Context {
features: features,
- span_handler: &sess.span_diagnostic,
+ parse_sess: sess,
cm: sess.codemap(),
plugin_attributes: plugin_attributes,
};
Cheat
}
+impl UnstableFeatures {
+ pub fn from_environment() -> UnstableFeatures {
+ // Whether this is a feature-staged build, i.e. on the beta or stable channel
+ let disable_unstable_features = option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some();
+ // The secret key needed to get through the rustc build itself by
+ // subverting the unstable features lints
+ let bootstrap_secret_key = option_env!("CFG_BOOTSTRAP_KEY");
+ // The matching key to the above, only known by the build system
+ let bootstrap_provided_key = env::var("RUSTC_BOOTSTRAP_KEY").ok();
+ match (disable_unstable_features, bootstrap_secret_key, bootstrap_provided_key) {
+ (_, Some(ref s), Some(ref p)) if s == p => UnstableFeatures::Cheat,
+ (true, _, _) => UnstableFeatures::Disallow,
+ (false, _, _) => UnstableFeatures::Allow
+ }
+ }
+
+ pub fn is_nightly_build(&self) -> bool {
+ match *self {
+ UnstableFeatures::Allow | UnstableFeatures::Cheat => true,
+ _ => false,
+ }
+ }
+}
+
fn maybe_stage_features(span_handler: &Handler, krate: &ast::Crate,
unstable: UnstableFeatures) {
let allow_features = match unstable {
pub fn noop_fold_local<T: Folder>(l: P<Local>, fld: &mut T) -> P<Local> {
l.map(|Local {id, pat, ty, init, span, attrs}| Local {
id: fld.new_id(id),
- ty: ty.map(|t| fld.fold_ty(t)),
pat: fld.fold_pat(pat),
+ ty: ty.map(|t| fld.fold_ty(t)),
init: init.map(|e| fld.fold_expr(e)),
span: fld.new_span(span),
attrs: fold_attrs(attrs.into(), fld).into(),
ItemKind::Const(folder.fold_ty(t), folder.fold_expr(e))
}
ItemKind::Fn(decl, unsafety, constness, abi, generics, body) => {
- ItemKind::Fn(
- folder.fold_fn_decl(decl),
- unsafety,
- constness,
- abi,
- folder.fold_generics(generics),
- folder.fold_block(body)
- )
+ let generics = folder.fold_generics(generics);
+ let decl = folder.fold_fn_decl(decl);
+ let body = folder.fold_block(body);
+ ItemKind::Fn(decl, unsafety, constness, abi, generics, body)
}
ItemKind::Mod(m) => ItemKind::Mod(folder.fold_mod(m)),
ItemKind::ForeignMod(nm) => ItemKind::ForeignMod(folder.fold_foreign_mod(nm)),
ItemKind::Ty(folder.fold_ty(t), folder.fold_generics(generics))
}
ItemKind::Enum(enum_definition, generics) => {
- ItemKind::Enum(
- ast::EnumDef {
- variants: enum_definition.variants.move_map(|x| folder.fold_variant(x)),
- },
- folder.fold_generics(generics))
+ let generics = folder.fold_generics(generics);
+ let variants = enum_definition.variants.move_map(|x| folder.fold_variant(x));
+ ItemKind::Enum(ast::EnumDef { variants: variants }, generics)
}
ItemKind::Struct(struct_def, generics) => {
- let struct_def = folder.fold_variant_data(struct_def);
- ItemKind::Struct(struct_def, folder.fold_generics(generics))
+ let generics = folder.fold_generics(generics);
+ ItemKind::Struct(folder.fold_variant_data(struct_def), generics)
}
ItemKind::Union(struct_def, generics) => {
- let struct_def = folder.fold_variant_data(struct_def);
- ItemKind::Union(struct_def, folder.fold_generics(generics))
+ let generics = folder.fold_generics(generics);
+ ItemKind::Union(folder.fold_variant_data(struct_def), generics)
}
ItemKind::DefaultImpl(unsafety, ref trait_ref) => {
ItemKind::DefaultImpl(unsafety, folder.fold_trait_ref((*trait_ref).clone()))
}
- ItemKind::Impl(unsafety, polarity, generics, ifce, ty, impl_items) => {
- let new_impl_items = impl_items.move_flat_map(|item| {
- folder.fold_impl_item(item)
- });
- let ifce = match ifce {
- None => None,
- Some(ref trait_ref) => {
- Some(folder.fold_trait_ref((*trait_ref).clone()))
- }
- };
- ItemKind::Impl(unsafety,
- polarity,
- folder.fold_generics(generics),
- ifce,
- folder.fold_ty(ty),
- new_impl_items)
- }
- ItemKind::Trait(unsafety, generics, bounds, items) => {
- let bounds = folder.fold_bounds(bounds);
- let items = items.move_flat_map(|item| {
- folder.fold_trait_item(item)
- });
- ItemKind::Trait(unsafety,
- folder.fold_generics(generics),
- bounds,
- items)
- }
+ ItemKind::Impl(unsafety, polarity, generics, ifce, ty, impl_items) => ItemKind::Impl(
+ unsafety,
+ polarity,
+ folder.fold_generics(generics),
+ ifce.map(|trait_ref| folder.fold_trait_ref(trait_ref.clone())),
+ folder.fold_ty(ty),
+ impl_items.move_flat_map(|item| folder.fold_impl_item(item)),
+ ),
+ ItemKind::Trait(unsafety, generics, bounds, items) => ItemKind::Trait(
+ unsafety,
+ folder.fold_generics(generics),
+ folder.fold_bounds(bounds),
+ items.move_flat_map(|item| folder.fold_trait_item(item)),
+ ),
ItemKind::Mac(m) => ItemKind::Mac(folder.fold_mac(m)),
}
}
-> SmallVector<ImplItem> {
SmallVector::one(ImplItem {
id: folder.new_id(i.id),
+ vis: folder.fold_vis(i.vis),
ident: folder.fold_ident(i.ident),
attrs: fold_attrs(i.attrs, folder),
- vis: folder.fold_vis(i.vis),
defaultness: i.defaultness,
node: match i.node {
ast::ImplItemKind::Const(ty, expr) => {
// fold one item into exactly one item
pub fn noop_fold_item_simple<T: Folder>(Item {id, ident, attrs, node, vis, span}: Item,
folder: &mut T) -> Item {
- let id = folder.new_id(id);
- let node = folder.fold_item_kind(node);
-
Item {
- id: id,
+ id: folder.new_id(id),
+ vis: folder.fold_vis(vis),
ident: folder.fold_ident(ident),
attrs: fold_attrs(attrs, folder),
- node: node,
- vis: folder.fold_vis(vis),
+ node: folder.fold_item_kind(node),
span: folder.new_span(span)
}
}
pub fn noop_fold_foreign_item<T: Folder>(ni: ForeignItem, folder: &mut T) -> ForeignItem {
ForeignItem {
id: folder.new_id(ni.id),
+ vis: folder.fold_vis(ni.vis),
ident: folder.fold_ident(ni.ident),
attrs: fold_attrs(ni.attrs, folder),
node: match ni.node {
ForeignItemKind::Static(folder.fold_ty(t), m)
}
},
- vis: folder.fold_vis(ni.vis),
span: folder.new_span(ni.span)
}
}
}
impl JsonEmitter {
+ pub fn stderr(registry: Option<Registry>,
+ code_map: Rc<CodeMap>) -> JsonEmitter {
+ JsonEmitter {
+ dst: Box::new(io::stderr()),
+ registry: registry,
+ cm: code_map,
+ }
+ }
+
pub fn basic() -> JsonEmitter {
JsonEmitter::stderr(None, Rc::new(CodeMap::new()))
}
- pub fn stderr(registry: Option<Registry>,
- code_map: Rc<CodeMap>) -> JsonEmitter {
+ pub fn new(dst: Box<Write + Send>,
+ registry: Option<Registry>,
+ code_map: Rc<CodeMap>) -> JsonEmitter {
JsonEmitter {
- dst: Box::new(io::stderr()),
+ dst: dst,
registry: registry,
cm: code_map,
}
#![feature(unicode)]
#![feature(question_mark)]
#![feature(rustc_diagnostic_macros)]
+#![feature(specialization)]
extern crate serialize;
extern crate term;
self.expect(&token::OpenDelim(token::Bracket))?;
let meta_item = self.parse_meta_item()?;
- let hi = self.last_span.hi;
self.expect(&token::CloseDelim(token::Bracket))?;
+ let hi = self.last_span.hi;
(mk_sp(lo, hi), meta_item, style)
}
/// The last character to be read
pub curr: Option<char>,
pub filemap: Rc<syntax_pos::FileMap>,
+ /// If Some, stop reading the source at this position (inclusive).
+ pub terminator: Option<BytePos>,
+ /// Whether to record new-lines in filemap. This is only necessary the first
+ /// time a filemap is lexed. If part of a filemap is being re-lexed, this
+ /// should be set to false.
+ pub save_new_lines: bool,
// cached:
pub peek_tok: token::Token,
pub peek_span: Span,
impl<'a> Reader for StringReader<'a> {
fn is_eof(&self) -> bool {
- self.curr.is_none()
+ if self.curr.is_none() {
+ return true;
+ }
+
+ match self.terminator {
+ Some(t) => self.pos > t,
+ None => false,
+ }
}
/// Return the next token. EFFECT: advances the string_reader.
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
pub fn new_raw<'b>(span_diagnostic: &'b Handler,
filemap: Rc<syntax_pos::FileMap>)
-> StringReader<'b> {
+ let mut sr = StringReader::new_raw_internal(span_diagnostic, filemap);
+ sr.bump();
+ sr
+ }
+
+ fn new_raw_internal<'b>(span_diagnostic: &'b Handler,
+ filemap: Rc<syntax_pos::FileMap>)
+ -> StringReader<'b> {
if filemap.src.is_none() {
span_diagnostic.bug(&format!("Cannot lex filemap \
without source: {}",
let source_text = (*filemap.src.as_ref().unwrap()).clone();
- let mut sr = StringReader {
+ StringReader {
span_diagnostic: span_diagnostic,
pos: filemap.start_pos,
last_pos: filemap.start_pos,
col: CharPos(0),
curr: Some('\n'),
filemap: filemap,
+ terminator: None,
+ save_new_lines: true,
// dummy values; not read
peek_tok: token::Eof,
peek_span: syntax_pos::DUMMY_SP,
source_text: source_text,
fatal_errs: Vec::new(),
- };
- sr.bump();
- sr
+ }
}
pub fn new<'b>(span_diagnostic: &'b Handler,
self.curr = Some(ch);
self.col = self.col + CharPos(1);
if last_char == '\n' {
- self.filemap.next_line(self.last_pos);
+ if self.save_new_lines {
+ self.filemap.next_line(self.last_pos);
+ }
self.col = CharPos(0);
}
use codemap::CodeMap;
use syntax_pos::{self, Span, FileMap};
use errors::{Handler, ColorConfig, DiagnosticBuilder};
+use feature_gate::UnstableFeatures;
use parse::parser::Parser;
use parse::token::InternedString;
use ptr::P;
/// Info about a parsing session.
pub struct ParseSess {
pub span_diagnostic: Handler, // better be the same as the one in the reader!
+ pub unstable_features: UnstableFeatures,
/// Used to determine and report recursive mod inclusions
included_mod_stack: RefCell<Vec<PathBuf>>,
code_map: Rc<CodeMap>,
pub fn with_span_handler(handler: Handler, code_map: Rc<CodeMap>) -> ParseSess {
ParseSess {
span_diagnostic: handler,
+ unstable_features: UnstableFeatures::from_environment(),
included_mod_stack: RefCell::new(vec![]),
code_map: code_map
}
lhs
}
+#[derive(PartialEq)]
+enum LastTokenKind {
+ DocComment,
+ Comma,
+ Interpolated,
+ Eof,
+ Other,
+}
+
/* ident is handled by common.rs */
pub struct Parser<'a> {
/// the span of the prior token:
pub last_span: Span,
pub cfg: CrateConfig,
- /// the previous token or None (only stashed sometimes).
- pub last_token: Option<Box<token::Token>>,
- last_token_interpolated: bool,
- last_token_eof: bool,
+ /// the previous token kind
+ last_token_kind: LastTokenKind,
pub buffer: [TokenAndSpan; 4],
pub buffer_start: isize,
pub buffer_end: isize,
token: tok0.tok,
span: span,
last_span: span,
- last_token: None,
- last_token_interpolated: false,
- last_token_eof: false,
+ last_token_kind: LastTokenKind::Other,
buffer: [
placeholder.clone(),
placeholder.clone(),
expr: PResult<'a, P<Expr>>)
-> PResult<'a, (Span, P<Expr>)> {
expr.map(|e| {
- if self.last_token_interpolated {
+ if self.last_token_kind == LastTokenKind::Interpolated {
(self.last_span, e)
} else {
(e.span, e)
self.bug("ident interpolation not converted to real token");
}
_ => {
- let last_token = self.last_token.clone().map(|t| *t);
- Err(match last_token {
- Some(token::DocComment(_)) => self.span_fatal_help(self.last_span,
+ Err(if self.last_token_kind == LastTokenKind::DocComment {
+ self.span_fatal_help(self.last_span,
"found a documentation comment that doesn't document anything",
"doc comments must come before what they document, maybe a comment was \
- intended with `//`?"),
- _ => {
+ intended with `//`?")
+ } else {
let mut err = self.fatal(&format!("expected identifier, found `{}`",
self.this_token_to_string()));
if self.token == token::Underscore {
err.note("`_` is a wildcard pattern, not an identifier");
}
err
- }
- })
+ })
}
}
}
- fn parse_ident_into_path(&mut self) -> PResult<'a, ast::Path> {
- let ident = self.parse_ident()?;
- Ok(ast::Path::from_ident(self.last_span, ident))
- }
-
/// Check if the next token is `tok`, and return `true` if so.
///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
/// Eat and discard tokens until one of `kets` is encountered. Respects token trees,
/// passes through any errors encountered. Used for error recovery.
pub fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
+ let handler = self.diagnostic();
+
self.parse_seq_to_before_tokens(kets,
SeqSep::none(),
|p| p.parse_token_tree(),
- |mut e| e.cancel());
+ |mut e| handler.cancel(&mut e));
}
/// Parse a sequence, including the closing delimiter. The function
/// Advance the parser by one token
pub fn bump(&mut self) {
- if self.last_token_eof {
+ if self.last_token_kind == LastTokenKind::Eof {
// Bumping after EOF is a bad sign, usually an infinite loop.
self.bug("attempted to bump the parser past EOF (may be stuck in a loop)");
}
- if self.token == token::Eof {
- self.last_token_eof = true;
- }
-
self.last_span = self.span;
- // Stash token for error recovery (sometimes; clone is not necessarily cheap).
- self.last_token = if self.token.is_ident() ||
- self.token.is_path() ||
- self.token.is_doc_comment() ||
- self.token == token::Comma {
- Some(Box::new(self.token.clone()))
- } else {
- None
+
+ // Record last token kind for possible error recovery.
+ self.last_token_kind = match self.token {
+ token::DocComment(..) => LastTokenKind::DocComment,
+ token::Comma => LastTokenKind::Comma,
+ token::Interpolated(..) => LastTokenKind::Interpolated,
+ token::Eof => LastTokenKind::Eof,
+ _ => LastTokenKind::Other,
};
- self.last_token_interpolated = self.token.is_interpolated();
+
let next = if self.buffer_start == self.buffer_end {
self.reader.real_token()
} else {
lo: BytePos,
hi: BytePos) {
self.last_span = mk_sp(self.span.lo, lo);
- // It would be incorrect to just stash current token, but fortunately
- // for tokens currently using `bump_with`, last_token will be of no
- // use anyway.
- self.last_token = None;
- self.last_token_interpolated = false;
+ // It would be incorrect to record the kind of the current token, but
+ // fortunately for tokens currently using `bump_with`, the
+ // last_token_kind will be of no use anyway.
+ self.last_token_kind = LastTokenKind::Other;
self.span = mk_sp(lo, hi);
self.token = next;
self.expected_tokens.clear();
self.sess.span_diagnostic.abort_if_errors();
}
+ fn cancel(&self, err: &mut DiagnosticBuilder) {
+ self.sess.span_diagnostic.cancel(err)
+ }
+
pub fn diagnostic(&self) -> &'a errors::Handler {
&self.sess.span_diagnostic
}
None
};
(ident, TraitItemKind::Const(ty, default))
- } else if !self.token.is_any_keyword()
- && self.look_ahead(1, |t| *t == token::Not)
- && (self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren))
- || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) {
- // trait item macro.
- // code copied from parse_macro_use_or_failure... abstraction!
- let lo = self.span.lo;
- let pth = self.parse_ident_into_path()?;
- self.expect(&token::Not)?;
+ } else if self.token.is_path_start() {
+ // trait item macro.
+ // code copied from parse_macro_use_or_failure... abstraction!
+ let lo = self.span.lo;
+ let pth = self.parse_path(PathStyle::Mod)?;
+ self.expect(&token::Not)?;
- // eat a matched-delimiter token tree:
- let delim = self.expect_open_delim()?;
- let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
- SeqSep::none(),
- |pp| pp.parse_token_tree())?;
- let m_ = Mac_ { path: pth, tts: tts };
- let m: ast::Mac = codemap::Spanned { node: m_,
- span: mk_sp(lo,
- self.last_span.hi) };
- if delim != token::Brace {
- self.expect(&token::Semi)?
- }
- (keywords::Invalid.ident(), ast::TraitItemKind::Macro(m))
- } else {
- let (constness, unsafety, abi) = match self.parse_fn_front_matter() {
- Ok(cua) => cua,
- Err(e) => {
- loop {
- match self.token {
- token::Eof => break,
- token::CloseDelim(token::Brace) |
- token::Semi => {
- self.bump();
- break;
- }
- token::OpenDelim(token::Brace) => {
- self.parse_token_tree()?;
- break;
- }
- _ => self.bump()
+ // eat a matched-delimiter token tree:
+ let delim = self.expect_open_delim()?;
+ let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
+ SeqSep::none(),
+ |pp| pp.parse_token_tree())?;
+ if delim != token::Brace {
+ self.expect(&token::Semi)?
+ }
+
+ let mac = spanned(lo, self.last_span.hi, Mac_ { path: pth, tts: tts });
+ (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac))
+ } else {
+ let (constness, unsafety, abi) = match self.parse_fn_front_matter() {
+ Ok(cua) => cua,
+ Err(e) => {
+ loop {
+ match self.token {
+ token::Eof => break,
+ token::CloseDelim(token::Brace) |
+ token::Semi => {
+ self.bump();
+ break;
+ }
+ token::OpenDelim(token::Brace) => {
+ self.parse_token_tree()?;
+ break;
}
+ _ => self.bump(),
}
-
- return Err(e);
}
- };
- let ident = self.parse_ident()?;
- let mut generics = self.parse_generics()?;
+ return Err(e);
+ }
+ };
- let d = self.parse_fn_decl_with_self(|p: &mut Parser<'a>|{
- // This is somewhat dubious; We don't want to allow
- // argument names to be left off if there is a
- // definition...
- p.parse_arg_general(false)
- })?;
+ let ident = self.parse_ident()?;
+ let mut generics = self.parse_generics()?;
- generics.where_clause = self.parse_where_clause()?;
- let sig = ast::MethodSig {
- unsafety: unsafety,
- constness: constness,
- decl: d,
- generics: generics,
- abi: abi,
- };
+ let d = self.parse_fn_decl_with_self(|p: &mut Parser<'a>|{
+ // This is somewhat dubious; We don't want to allow
+ // argument names to be left off if there is a
+ // definition...
+ p.parse_arg_general(false)
+ })?;
- let body = match self.token {
- token::Semi => {
- self.bump();
- debug!("parse_trait_methods(): parsing required method");
- None
- }
- token::OpenDelim(token::Brace) => {
- debug!("parse_trait_methods(): parsing provided method");
- let (inner_attrs, body) =
- self.parse_inner_attrs_and_block()?;
- attrs.extend(inner_attrs.iter().cloned());
- Some(body)
- }
+ generics.where_clause = self.parse_where_clause()?;
+ let sig = ast::MethodSig {
+ unsafety: unsafety,
+ constness: constness,
+ decl: d,
+ generics: generics,
+ abi: abi,
+ };
- _ => {
- let token_str = self.this_token_to_string();
- return Err(self.fatal(&format!("expected `;` or `{{`, found `{}`",
- token_str)[..]))
- }
- };
- (ident, ast::TraitItemKind::Method(sig, body))
+ let body = match self.token {
+ token::Semi => {
+ self.bump();
+ debug!("parse_trait_methods(): parsing required method");
+ None
+ }
+ token::OpenDelim(token::Brace) => {
+ debug!("parse_trait_methods(): parsing provided method");
+ let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(inner_attrs.iter().cloned());
+ Some(body)
+ }
+ _ => {
+ let token_str = self.this_token_to_string();
+ return Err(self.fatal(&format!("expected `;` or `{{`, found `{}`", token_str)));
+ }
};
+ (ident, ast::TraitItemKind::Method(sig, body))
+ };
+
Ok(TraitItem {
id: ast::DUMMY_NODE_ID,
ident: name,
TyKind::Path(Some(qself), path)
} else if self.token.is_path_start() {
let path = self.parse_path(PathStyle::Type)?;
- if self.check(&token::Not) {
+ if self.eat(&token::Not) {
// MACRO INVOCATION
- self.bump();
let delim = self.expect_open_delim()?;
let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
SeqSep::none(),
let pth = self.parse_path(PathStyle::Expr)?;
// `!`, as an operator, is prefix, so we know this isn't that
- if self.check(&token::Not) {
+ if self.eat(&token::Not) {
// MACRO INVOCATION expression
- self.bump();
-
let delim = self.expect_open_delim()?;
- let tts = self.parse_seq_to_end(
- &token::CloseDelim(delim),
- SeqSep::none(),
- |p| p.parse_token_tree())?;
+ let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
+ SeqSep::none(),
+ |p| p.parse_token_tree())?;
let hi = self.last_span.hi;
-
- return Ok(self.mk_mac_expr(lo,
- hi,
- Mac_ { path: pth, tts: tts },
- attrs));
+ return Ok(self.mk_mac_expr(lo, hi, Mac_ { path: pth, tts: tts }, attrs));
}
if self.check(&token::OpenDelim(token::Brace)) {
// This is a struct literal, unless we're prohibited
Restrictions::RESTRICTION_NO_STRUCT_LITERAL
);
if !prohibited {
- // It's a struct literal.
- self.bump();
- let mut fields = Vec::new();
- let mut base = None;
-
- attrs.extend(self.parse_inner_attributes()?);
-
- while self.token != token::CloseDelim(token::Brace) {
- if self.eat(&token::DotDot) {
- match self.parse_expr() {
- Ok(e) => {
- base = Some(e);
- }
- Err(mut e) => {
- e.emit();
- self.recover_stmt();
- }
- }
- break;
- }
-
- match self.parse_field() {
- Ok(f) => fields.push(f),
- Err(mut e) => {
- e.emit();
- self.recover_stmt();
- break;
- }
- }
-
- match self.expect_one_of(&[token::Comma],
- &[token::CloseDelim(token::Brace)]) {
- Ok(()) => {}
- Err(mut e) => {
- e.emit();
- self.recover_stmt();
- break;
- }
- }
- }
-
- hi = self.span.hi;
- self.expect(&token::CloseDelim(token::Brace))?;
- ex = ExprKind::Struct(pth, fields, base);
- return Ok(self.mk_expr(lo, hi, ex, attrs));
+ return self.parse_struct_expr(lo, pth, attrs);
}
}
ex = ExprKind::Lit(P(lit));
}
Err(mut err) => {
- err.cancel();
+ self.cancel(&mut err);
let msg = format!("expected expression, found {}",
self.this_token_descr());
return Err(self.fatal(&msg));
return Ok(self.mk_expr(lo, hi, ex, attrs));
}
+ fn parse_struct_expr(&mut self, lo: BytePos, pth: ast::Path, mut attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>> {
+ self.bump();
+ let mut fields = Vec::new();
+ let mut base = None;
+
+ attrs.extend(self.parse_inner_attributes()?);
+
+ while self.token != token::CloseDelim(token::Brace) {
+ if self.eat(&token::DotDot) {
+ match self.parse_expr() {
+ Ok(e) => {
+ base = Some(e);
+ }
+ Err(mut e) => {
+ e.emit();
+ self.recover_stmt();
+ }
+ }
+ break;
+ }
+
+ match self.parse_field() {
+ Ok(f) => fields.push(f),
+ Err(mut e) => {
+ e.emit();
+ self.recover_stmt();
+ break;
+ }
+ }
+
+ match self.expect_one_of(&[token::Comma],
+ &[token::CloseDelim(token::Brace)]) {
+ Ok(()) => {}
+ Err(mut e) => {
+ e.emit();
+ self.recover_stmt();
+ break;
+ }
+ }
+ }
+
+ let hi = self.span.hi;
+ self.expect(&token::CloseDelim(token::Brace))?;
+ return Ok(self.mk_expr(lo, hi, ExprKind::Struct(pth, fields, base), attrs));
+ }
+
fn parse_or_use_outer_attributes(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, ThinVec<Attribute>> {
self.expected_tokens.push(TokenType::Operator);
while let Some(op) = AssocOp::from_token(&self.token) {
- let lhs_span = if self.last_token_interpolated {
+ let lhs_span = if self.last_token_kind == LastTokenKind::Interpolated {
self.last_span
} else {
lhs.span
let lo = self.span.lo;
let pat;
match self.token {
- token::Underscore => {
- // Parse _
- self.bump();
- pat = PatKind::Wild;
- }
- token::BinOp(token::And) | token::AndAnd => {
- // Parse &pat / &mut pat
- self.expect_and()?;
- let mutbl = self.parse_mutability()?;
- if let token::Lifetime(ident) = self.token {
- return Err(self.fatal(&format!("unexpected lifetime `{}` in pattern", ident)));
+ token::Underscore => {
+ // Parse _
+ self.bump();
+ pat = PatKind::Wild;
+ }
+ token::BinOp(token::And) | token::AndAnd => {
+ // Parse &pat / &mut pat
+ self.expect_and()?;
+ let mutbl = self.parse_mutability()?;
+ if let token::Lifetime(ident) = self.token {
+ return Err(self.fatal(&format!("unexpected lifetime `{}` in pattern", ident)));
+ }
+ let subpat = self.parse_pat()?;
+ pat = PatKind::Ref(subpat, mutbl);
+ }
+ token::OpenDelim(token::Paren) => {
+ // Parse (pat,pat,pat,...) as tuple pattern
+ self.bump();
+ let (fields, ddpos) = self.parse_pat_tuple_elements(true)?;
+ self.expect(&token::CloseDelim(token::Paren))?;
+ pat = PatKind::Tuple(fields, ddpos);
+ }
+ token::OpenDelim(token::Bracket) => {
+ // Parse [pat,pat,...] as slice pattern
+ self.bump();
+ let (before, slice, after) = self.parse_pat_vec_elements()?;
+ self.expect(&token::CloseDelim(token::Bracket))?;
+ pat = PatKind::Vec(before, slice, after);
}
-
- let subpat = self.parse_pat()?;
- pat = PatKind::Ref(subpat, mutbl);
- }
- token::OpenDelim(token::Paren) => {
- // Parse (pat,pat,pat,...) as tuple pattern
- self.bump();
- let (fields, ddpos) = self.parse_pat_tuple_elements(true)?;
- self.expect(&token::CloseDelim(token::Paren))?;
- pat = PatKind::Tuple(fields, ddpos);
- }
- token::OpenDelim(token::Bracket) => {
- // Parse [pat,pat,...] as slice pattern
- self.bump();
- let (before, slice, after) = self.parse_pat_vec_elements()?;
- self.expect(&token::CloseDelim(token::Bracket))?;
- pat = PatKind::Vec(before, slice, after);
- }
- _ => {
// At this point, token != _, &, &&, (, [
- if self.eat_keyword(keywords::Mut) {
+ _ => if self.eat_keyword(keywords::Mut) {
// Parse mut ident @ pat
pat = self.parse_pat_ident(BindingMode::ByValue(Mutability::Mutable))?;
} else if self.eat_keyword(keywords::Ref) {
// Parse box pat
let subpat = self.parse_pat()?;
pat = PatKind::Box(subpat);
+ } else if self.token.is_ident() && self.token.is_path_start() &&
+ self.look_ahead(1, |t| match *t {
+ token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) |
+ token::DotDotDot | token::ModSep | token::Not => false,
+ _ => true,
+ }) {
+ // Parse ident @ pat
+ // This can give false positives and parse nullary enums,
+ // they are dealt with later in resolve
+ let binding_mode = BindingMode::ByValue(Mutability::Immutable);
+ pat = self.parse_pat_ident(binding_mode)?;
} else if self.token.is_path_start() {
// Parse pattern starting with a path
- if self.token.is_ident() && self.look_ahead(1, |t| *t != token::DotDotDot &&
- *t != token::OpenDelim(token::Brace) &&
- *t != token::OpenDelim(token::Paren) &&
- *t != token::ModSep) {
- // Plain idents have some extra abilities here compared to general paths
- if self.look_ahead(1, |t| *t == token::Not) {
+ let (qself, path) = if self.eat_lt() {
+ // Parse a qualified path
+ let (qself, path) = self.parse_qualified_path(PathStyle::Expr)?;
+ (Some(qself), path)
+ } else {
+ // Parse an unqualified path
+ (None, self.parse_path(PathStyle::Expr)?)
+ };
+ match self.token {
+ token::Not if qself.is_none() => {
// Parse macro invocation
- let path = self.parse_ident_into_path()?;
self.bump();
let delim = self.expect_open_delim()?;
- let tts = self.parse_seq_to_end(
- &token::CloseDelim(delim),
- SeqSep::none(), |p| p.parse_token_tree())?;
- let mac = Mac_ { path: path, tts: tts };
- pat = PatKind::Mac(codemap::Spanned {node: mac,
- span: mk_sp(lo, self.last_span.hi)});
- } else {
- // Parse ident @ pat
- // This can give false positives and parse nullary enums,
- // they are dealt with later in resolve
- let binding_mode = BindingMode::ByValue(Mutability::Immutable);
- pat = self.parse_pat_ident(binding_mode)?;
+ let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
+ SeqSep::none(),
+ |p| p.parse_token_tree())?;
+ let mac = spanned(lo, self.last_span.hi, Mac_ { path: path, tts: tts });
+ pat = PatKind::Mac(mac);
}
- } else {
- let (qself, path) = if self.eat_lt() {
- // Parse a qualified path
- let (qself, path) =
- self.parse_qualified_path(PathStyle::Expr)?;
- (Some(qself), path)
- } else {
- // Parse an unqualified path
- (None, self.parse_path(PathStyle::Expr)?)
- };
- match self.token {
- token::DotDotDot => {
+ token::DotDotDot => {
// Parse range
let hi = self.last_span.hi;
let begin =
self.bump();
let end = self.parse_pat_range_end()?;
pat = PatKind::Range(begin, end);
- }
- token::OpenDelim(token::Brace) => {
- if qself.is_some() {
+ }
+ token::OpenDelim(token::Brace) => {
+ if qself.is_some() {
return Err(self.fatal("unexpected `{` after qualified path"));
}
// Parse struct pattern
});
self.bump();
pat = PatKind::Struct(path, fields, etc);
- }
- token::OpenDelim(token::Paren) => {
+ }
+ token::OpenDelim(token::Paren) => {
if qself.is_some() {
return Err(self.fatal("unexpected `(` after qualified path"));
}
let (fields, ddpos) = self.parse_pat_tuple_elements(false)?;
self.expect(&token::CloseDelim(token::Paren))?;
pat = PatKind::TupleStruct(path, fields, ddpos)
- }
- _ => {
- pat = PatKind::Path(qself, path);
- }
}
+ _ => pat = PatKind::Path(qself, path),
}
} else {
// Try to parse everything else as literal with optional minus
}
}
Err(mut err) => {
- err.cancel();
+ self.cancel(&mut err);
let msg = format!("expected pattern, found {}", self.this_token_descr());
return Err(self.fatal(&msg));
}
}
}
- }
}
let hi = self.last_span.hi;
}
}
- fn parse_stmt_(&mut self, macro_expanded: bool) -> Option<Stmt> {
- self.parse_stmt_without_recovery(macro_expanded).unwrap_or_else(|mut e| {
+ fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> {
+ self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| {
e.emit();
self.recover_stmt_(SemiColonMode::Break);
None
})
}
- fn parse_stmt_without_recovery(&mut self, macro_expanded: bool) -> PResult<'a, Option<Stmt>> {
+ fn parse_stmt_without_recovery(&mut self,
+ macro_legacy_warnings: bool)
+ -> PResult<'a, Option<Stmt>> {
maybe_whole!(Some deref self, NtStmt);
let attrs = self.parse_outer_attributes()?;
node: StmtKind::Local(self.parse_local(attrs.into())?),
span: mk_sp(lo, self.last_span.hi),
}
- } else if self.token.is_ident()
- && !self.token.is_any_keyword()
- && self.look_ahead(1, |t| *t == token::Not) {
- // it's a macro invocation:
+ } else if self.token.is_path_start() && self.token != token::Lt && {
+ !self.check_keyword(keywords::Union) ||
+ self.look_ahead(1, |t| *t == token::Not || *t == token::ModSep)
+ } {
+ let pth = self.parse_path(PathStyle::Expr)?;
- // Potential trouble: if we allow macros with paths instead of
- // idents, we'd need to look ahead past the whole path here...
- let pth = self.parse_ident_into_path()?;
- self.bump();
+ if !self.eat(&token::Not) {
+ let expr = if self.check(&token::OpenDelim(token::Brace)) {
+ self.parse_struct_expr(lo, pth, ThinVec::new())?
+ } else {
+ let hi = self.last_span.hi;
+ self.mk_expr(lo, hi, ExprKind::Path(None, pth), ThinVec::new())
+ };
+
+ let expr = self.with_res(Restrictions::RESTRICTION_STMT_EXPR, |this| {
+ let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?;
+ this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
+ })?;
+ return Ok(Some(Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Expr(expr),
+ span: mk_sp(lo, self.last_span.hi),
+ }));
+ }
+
+ // it's a macro invocation
let id = match self.token {
token::OpenDelim(_) => keywords::Invalid.ident(), // no special identifier
_ => self.parse_ident()?,
// We used to incorrectly stop parsing macro-expanded statements here.
// If the next token will be an error anyway but could have parsed with the
// earlier behavior, stop parsing here and emit a warning to avoid breakage.
- else if macro_expanded && self.token.can_begin_expr() && match self.token {
+ else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token {
// These can continue an expression, so we can't stop parsing and warn.
token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) |
token::BinOp(token::Minus) | token::BinOp(token::Star) |
None => {
let unused_attrs = |attrs: &[_], s: &mut Self| {
if attrs.len() > 0 {
- let last_token = s.last_token.clone().map(|t| *t);
- match last_token {
- Some(token::DocComment(_)) => s.span_err_help(s.last_span,
+ if s.last_token_kind == LastTokenKind::DocComment {
+ s.span_err_help(s.last_span,
"found a documentation comment that doesn't document anything",
"doc comments must come before what they document, maybe a \
- comment was intended with `//`?"),
- _ => s.span_err(s.span, "expected statement after outer attribute"),
+ comment was intended with `//`?");
+ } else {
+ s.span_err(s.span, "expected statement after outer attribute");
}
}
};
}
Err(mut e) => {
self.recover_stmt_(SemiColonMode::Break);
- e.cancel();
+ self.cancel(&mut e);
}
_ => ()
}
}
/// Parse a statement, including the trailing semicolon.
- pub fn parse_full_stmt(&mut self, macro_expanded: bool) -> PResult<'a, Option<Stmt>> {
- let mut stmt = match self.parse_stmt_(macro_expanded) {
+ pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
+ let mut stmt = match self.parse_stmt_(macro_legacy_warnings) {
Some(stmt) => stmt,
None => return Ok(None),
};
}
StmtKind::Local(..) => {
// We used to incorrectly allow a macro-expanded let statement to lack a semicolon.
- if macro_expanded && self.token != token::Semi {
+ if macro_legacy_warnings && self.token != token::Semi {
self.warn_missing_semicolon();
} else {
self.expect_one_of(&[token::Semi], &[])?;
let missing_comma = !lifetimes.is_empty() &&
!self.token.is_like_gt() &&
- self.last_token
- .as_ref().map_or(true,
- |x| &**x != &token::Comma);
+ self.last_token_kind != LastTokenKind::Comma;
if missing_comma {
let span_hi = match self.parse_ty() {
Ok(..) => self.span.hi,
Err(ref mut err) => {
- err.cancel();
+ self.cancel(err);
span_hi
}
};
fn parse_impl_method(&mut self, vis: &Visibility)
-> PResult<'a, (Ident, Vec<ast::Attribute>, ast::ImplItemKind)> {
// code copied from parse_macro_use_or_failure... abstraction!
- if !self.token.is_any_keyword()
- && self.look_ahead(1, |t| *t == token::Not)
- && (self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren))
- || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) {
+ if self.token.is_path_start() {
// method macro.
let last_span = self.last_span;
self.complain_if_pub_macro(&vis, last_span);
let lo = self.span.lo;
- let pth = self.parse_ident_into_path()?;
+ let pth = self.parse_path(PathStyle::Mod)?;
self.expect(&token::Not)?;
// eat a matched-delimiter token tree:
let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
SeqSep::none(),
|p| p.parse_token_tree())?;
- let m_ = Mac_ { path: pth, tts: tts };
- let m: ast::Mac = codemap::Spanned { node: m_,
- span: mk_sp(lo,
- self.last_span.hi) };
if delim != token::Brace {
self.expect(&token::Semi)?
}
- Ok((keywords::Invalid.ident(), vec![], ast::ImplItemKind::Macro(m)))
+
+ let mac = spanned(lo, self.last_span.hi, Mac_ { path: pth, tts: tts });
+ Ok((keywords::Invalid.ident(), vec![], ast::ImplItemKind::Macro(mac)))
} else {
let (constness, unsafety, abi) = self.parse_fn_front_matter()?;
let ident = self.parse_ident()?;
}
} else {
let directory = self.directory.clone();
- self.push_directory(id, &outer_attrs);
+ let restrictions = self.push_directory(id, &outer_attrs);
self.expect(&token::OpenDelim(token::Brace))?;
let mod_inner_lo = self.span.lo;
let attrs = self.parse_inner_attributes()?;
- let m = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?;
+ let m = self.with_res(restrictions, |this| {
+ this.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)
+ })?;
self.directory = directory;
Ok((id, ItemKind::Mod(m), Some(attrs)))
}
}
- fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
- let default_path = self.id_to_interned_str(id);
- let file_path = match ::attr::first_attr_value_str_by_name(attrs, "path") {
- Some(d) => d,
- None => default_path,
- };
- self.directory.push(&*file_path)
+ fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) -> Restrictions {
+ if let Some(path) = ::attr::first_attr_value_str_by_name(attrs, "path") {
+ self.directory.push(&*path);
+ self.restrictions - Restrictions::NO_NONINLINE_MOD
+ } else {
+ let default_path = self.id_to_interned_str(id);
+ self.directory.push(&*default_path);
+ self.restrictions
+ }
}
pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option<PathBuf> {
lo: BytePos,
visibility: Visibility
) -> PResult<'a, Option<P<Item>>> {
- if macros_allowed && !self.token.is_any_keyword()
- && self.look_ahead(1, |t| *t == token::Not)
- && (self.look_ahead(2, |t| t.is_ident())
- || self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren))
- || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) {
+ if macros_allowed && self.token.is_path_start() {
// MACRO INVOCATION ITEM
let last_span = self.last_span;
let mac_lo = self.span.lo;
// item macro.
- let pth = self.parse_ident_into_path()?;
+ let pth = self.parse_path(PathStyle::Mod)?;
self.expect(&token::Not)?;
// a 'special' identifier (like what `macro_rules!` uses)
let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
SeqSep::none(),
|p| p.parse_token_tree())?;
- // single-variant-enum... :
- let m = Mac_ { path: pth, tts: tts };
- let m: ast::Mac = codemap::Spanned { node: m,
- span: mk_sp(mac_lo,
- self.last_span.hi) };
-
if delim != token::Brace {
if !self.eat(&token::Semi) {
let last_span = self.last_span;
}
}
- let item_ = ItemKind::Mac(m);
- let last_span = self.last_span;
- let item = self.mk_item(lo,
- last_span.hi,
- id,
- item_,
- visibility,
- attrs);
+ let hi = self.last_span.hi;
+ let mac = spanned(mac_lo, hi, Mac_ { path: pth, tts: tts });
+ let item = self.mk_item(lo, hi, id, ItemKind::Mac(mac), visibility, attrs);
return Ok(Some(item));
}
pub use self::Lit::*;
pub use self::Token::*;
-use ast::{self, BinOpKind};
+use ast::{self};
use ptr::P;
use util::interner::Interner;
use tokenstream;
self.is_path_segment_keyword() || self.is_ident() && !self.is_any_keyword()
}
- /// Maps a token to its corresponding binary operator.
- pub fn to_binop(&self) -> Option<BinOpKind> {
- match *self {
- BinOp(Star) => Some(BinOpKind::Mul),
- BinOp(Slash) => Some(BinOpKind::Div),
- BinOp(Percent) => Some(BinOpKind::Rem),
- BinOp(Plus) => Some(BinOpKind::Add),
- BinOp(Minus) => Some(BinOpKind::Sub),
- BinOp(Shl) => Some(BinOpKind::Shl),
- BinOp(Shr) => Some(BinOpKind::Shr),
- BinOp(And) => Some(BinOpKind::BitAnd),
- BinOp(Caret) => Some(BinOpKind::BitXor),
- BinOp(Or) => Some(BinOpKind::BitOr),
- Lt => Some(BinOpKind::Lt),
- Le => Some(BinOpKind::Le),
- Ge => Some(BinOpKind::Ge),
- Gt => Some(BinOpKind::Gt),
- EqEq => Some(BinOpKind::Eq),
- Ne => Some(BinOpKind::Ne),
- AndAnd => Some(BinOpKind::And),
- OrOr => Some(BinOpKind::Or),
- _ => None,
- }
- }
-
/// Returns `true` if the token is a given keyword, `kw`.
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
match *self {
use std::slice;
use std::mem;
use std::vec;
-use attr;
+use attr::{self, HasAttrs};
use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, FileMap, BytePos};
use std::rc::Rc;
}
debug!("current path: {}", path_name_i(&self.cx.path));
- let i = if is_test_fn(&self.cx, &i) || is_bench_fn(&self.cx, &i) {
+ if is_test_fn(&self.cx, &i) || is_bench_fn(&self.cx, &i) {
match i.node {
ast::ItemKind::Fn(_, ast::Unsafety::Unsafe, _, _, _, _) => {
let diag = self.cx.span_diagnostic;
};
self.cx.testfns.push(test);
self.tests.push(i.ident);
- // debug!("have {} test/bench functions",
- // cx.testfns.len());
-
- // Make all tests public so we can call them from outside
- // the module (note that the tests are re-exported and must
- // be made public themselves to avoid privacy errors).
- i.map(|mut i| {
- i.vis = ast::Visibility::Public;
- i
- })
}
}
- } else {
- i
- };
+ }
+ let mut item = i.unwrap();
// We don't want to recurse into anything other than mods, since
// mods or tests inside of functions will break things
- let res = match i.node {
- ast::ItemKind::Mod(..) => fold::noop_fold_item(i, self),
- _ => SmallVector::one(i),
- };
+ if let ast::ItemKind::Mod(module) = item.node {
+ let tests = mem::replace(&mut self.tests, Vec::new());
+ let tested_submods = mem::replace(&mut self.tested_submods, Vec::new());
+ let mut mod_folded = fold::noop_fold_mod(module, self);
+ let tests = mem::replace(&mut self.tests, tests);
+ let tested_submods = mem::replace(&mut self.tested_submods, tested_submods);
+
+ if !tests.is_empty() || !tested_submods.is_empty() {
+ let (it, sym) = mk_reexport_mod(&mut self.cx, item.id, tests, tested_submods);
+ mod_folded.items.push(it);
+
+ if !self.cx.path.is_empty() {
+ self.tested_submods.push((self.cx.path[self.cx.path.len()-1], sym));
+ } else {
+ debug!("pushing nothing, sym: {:?}", sym);
+ self.cx.toplevel_reexport = Some(sym);
+ }
+ }
+ item.node = ast::ItemKind::Mod(mod_folded);
+ }
if ident.name != keywords::Invalid.name() {
self.cx.path.pop();
}
- res
- }
-
- fn fold_mod(&mut self, m: ast::Mod) -> ast::Mod {
- let tests = mem::replace(&mut self.tests, Vec::new());
- let tested_submods = mem::replace(&mut self.tested_submods, Vec::new());
- let mut mod_folded = fold::noop_fold_mod(m, self);
- let tests = mem::replace(&mut self.tests, tests);
- let tested_submods = mem::replace(&mut self.tested_submods, tested_submods);
-
- if !tests.is_empty() || !tested_submods.is_empty() {
- let (it, sym) = mk_reexport_mod(&mut self.cx, tests, tested_submods);
- mod_folded.items.push(it);
-
- if !self.cx.path.is_empty() {
- self.tested_submods.push((self.cx.path[self.cx.path.len()-1], sym));
- } else {
- debug!("pushing nothing, sym: {:?}", sym);
- self.cx.toplevel_reexport = Some(sym);
- }
- }
-
- mod_folded
+ SmallVector::one(P(item))
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac }
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac }
}
-fn mk_reexport_mod(cx: &mut TestCtxt, tests: Vec<ast::Ident>,
+fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec<ast::Ident>,
tested_submods: Vec<(ast::Ident, ast::Ident)>) -> (P<ast::Item>, ast::Ident) {
let super_ = token::str_to_ident("super");
+ // Generate imports with `#[allow(private_in_public)]` to work around issue #36768.
+ let allow_private_in_public = cx.ext_cx.attribute(DUMMY_SP, cx.ext_cx.meta_list(
+ DUMMY_SP,
+ InternedString::new("allow"),
+ vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, InternedString::new("private_in_public"))],
+ ));
let items = tests.into_iter().map(|r| {
cx.ext_cx.item_use_simple(DUMMY_SP, ast::Visibility::Public,
cx.ext_cx.path(DUMMY_SP, vec![super_, r]))
+ .map_attrs(|_| vec![allow_private_in_public.clone()])
}).chain(tested_submods.into_iter().map(|(r, sym)| {
let path = cx.ext_cx.path(DUMMY_SP, vec![super_, r, sym]);
cx.ext_cx.item_use_simple_(DUMMY_SP, ast::Visibility::Public, r, path)
+ .map_attrs(|_| vec![allow_private_in_public.clone()])
})).collect();
let reexport_mod = ast::Mod {
};
let sym = token::gensym_ident("__test_reexports");
+ let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
+ cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item {
ident: sym.clone(),
attrs: Vec::new(),
use parse::lexer;
use parse;
use parse::token::{self, Token, Lit, Nonterminal};
+use print::pprust;
use std::fmt;
use std::iter::*;
}
}
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str(&pprust::tts_to_string(&self.to_tts()))
+ }
+}
+
// FIXME Reimplement this iterator to hold onto a slice iterator for a leaf, getting the
// next leaf's iterator when the current one is exhausted.
pub struct Iter<'a> {
pub fn walk_fn<V>(visitor: &mut V, kind: FnKind, declaration: &FnDecl, body: &Block, _span: Span)
where V: Visitor,
{
- walk_fn_decl(visitor, declaration);
walk_fn_kind(visitor, kind);
+ walk_fn_decl(visitor, declaration);
visitor.visit_block(body)
}
walk_list!(visitor, visit_expr, subexpressions);
}
ExprKind::Call(ref callee_expression, ref arguments) => {
+ visitor.visit_expr(callee_expression);
walk_list!(visitor, visit_expr, arguments);
- visitor.visit_expr(callee_expression)
}
ExprKind::MethodCall(ref ident, ref types, ref arguments) => {
visitor.visit_ident(ident.span, ident.node);
- walk_list!(visitor, visit_expr, arguments);
walk_list!(visitor, visit_ty, types);
+ walk_list!(visitor, visit_expr, arguments);
}
ExprKind::Binary(_, ref left_expression, ref right_expression) => {
visitor.visit_expr(left_expression);
}
ExprKind::Block(ref block) => visitor.visit_block(block),
ExprKind::Assign(ref left_hand_expression, ref right_hand_expression) => {
+ visitor.visit_expr(left_hand_expression);
visitor.visit_expr(right_hand_expression);
- visitor.visit_expr(left_hand_expression)
}
ExprKind::AssignOp(_, ref left_expression, ref right_expression) => {
+ visitor.visit_expr(left_expression);
visitor.visit_expr(right_expression);
- visitor.visit_expr(left_expression)
}
ExprKind::Field(ref subexpression, ref ident) => {
visitor.visit_expr(subexpression);
tts: &[tokenstream::TokenTree])
-> Box<base::MacResult + 'cx> {
if !cx.ecfg.enable_asm() {
- feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
+ feature_gate::emit_feature_err(&cx.parse_sess,
"asm",
sp,
feature_gate::GateIssue::Language,
tts: &[TokenTree])
-> Box<base::MacResult + 'cx> {
if !cx.ecfg.enable_concat_idents() {
- feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
+ feature_gate::emit_feature_err(&cx.parse_sess,
"concat_idents",
sp,
feature_gate::GateIssue::Language,
use syntax::ast::{self, ItemKind};
use syntax::codemap::{ExpnInfo, MacroAttribute, NameAndSpan, Span};
use syntax::ext::base::*;
-use syntax::fold::{self, Folder};
+use syntax::fold::Folder;
use syntax::parse::token::intern;
use syntax::print::pprust;
}
}
-struct ChangeSpan { span: Span }
-
-impl Folder for ChangeSpan {
- fn new_span(&mut self, _sp: Span) -> Span {
- self.span
- }
-
- fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
- fold::noop_fold_mac(mac, self)
- }
-}
attr::ReprAny | attr::ReprPacked | attr::ReprSimd => continue,
attr::ReprExtern => "i32",
- attr::ReprInt(_, attr::SignedInt(ast::IntTy::Is)) => "isize",
- attr::ReprInt(_, attr::SignedInt(ast::IntTy::I8)) => "i8",
- attr::ReprInt(_, attr::SignedInt(ast::IntTy::I16)) => "i16",
- attr::ReprInt(_, attr::SignedInt(ast::IntTy::I32)) => "i32",
- attr::ReprInt(_, attr::SignedInt(ast::IntTy::I64)) => "i64",
-
- attr::ReprInt(_, attr::UnsignedInt(ast::UintTy::Us)) => "usize",
- attr::ReprInt(_, attr::UnsignedInt(ast::UintTy::U8)) => "u8",
- attr::ReprInt(_, attr::UnsignedInt(ast::UintTy::U16)) => "u16",
- attr::ReprInt(_, attr::UnsignedInt(ast::UintTy::U32)) => "u32",
- attr::ReprInt(_, attr::UnsignedInt(ast::UintTy::U64)) => "u64",
+ attr::ReprInt(attr::SignedInt(ast::IntTy::Is)) => "isize",
+ attr::ReprInt(attr::SignedInt(ast::IntTy::I8)) => "i8",
+ attr::ReprInt(attr::SignedInt(ast::IntTy::I16)) => "i16",
+ attr::ReprInt(attr::SignedInt(ast::IntTy::I32)) => "i32",
+ attr::ReprInt(attr::SignedInt(ast::IntTy::I64)) => "i64",
+
+ attr::ReprInt(attr::UnsignedInt(ast::UintTy::Us)) => "usize",
+ attr::ReprInt(attr::UnsignedInt(ast::UintTy::U8)) => "u8",
+ attr::ReprInt(attr::UnsignedInt(ast::UintTy::U16)) => "u16",
+ attr::ReprInt(attr::UnsignedInt(ast::UintTy::U32)) => "u32",
+ attr::ReprInt(attr::UnsignedInt(ast::UintTy::U64)) => "u64",
}
}
}
cx.span_err(mitem.span, "unexpected value in `derive`");
}
- let traits = mitem.meta_item_list().unwrap_or(&[]);
+ let mut traits = mitem.meta_item_list().unwrap_or(&[]).to_owned();
if traits.is_empty() {
cx.span_warn(mitem.span, "empty trait list in `derive`");
}
+ // First, weed out malformed #[derive]
+ traits.retain(|titem| {
+ if titem.word().is_none() {
+ cx.span_err(titem.span, "malformed `derive` entry");
+ false
+ } else {
+ true
+ }
+ });
+
+ // Next, check for old-style #[derive(Foo)]
+ //
+ // These all get expanded to `#[derive_Foo]` and will get expanded first. If
+ // we actually add any attributes here then we return to get those expanded
+ // and then eventually we'll come back to finish off the other derive modes.
+ let mut new_attributes = Vec::new();
+ traits.retain(|titem| {
+ let tword = titem.word().unwrap();
+ let tname = tword.name();
+
+ let derive_mode = ast::Ident::with_empty_ctxt(intern(&tname));
+ let derive_mode = cx.resolver.resolve_derive_mode(derive_mode);
+ if is_builtin_trait(&tname) || derive_mode.is_some() {
+ return true
+ }
+
+ if !cx.ecfg.enable_custom_derive() {
+ feature_gate::emit_feature_err(&cx.parse_sess,
+ "custom_derive",
+ titem.span,
+ feature_gate::GateIssue::Language,
+ feature_gate::EXPLAIN_CUSTOM_DERIVE);
+ } else {
+ let name = intern_and_get_ident(&format!("derive_{}", tname));
+ let mitem = cx.meta_word(titem.span, name);
+ new_attributes.push(cx.attribute(mitem.span, mitem));
+ }
+ false
+ });
+ if new_attributes.len() > 0 {
+ item = item.map(|mut i| {
+ let list = cx.meta_list(mitem.span,
+ intern_and_get_ident("derive"),
+ traits);
+ i.attrs.extend(new_attributes);
+ i.attrs.push(cx.attribute(mitem.span, list));
+ i
+ });
+ return vec![Annotatable::Item(item)]
+ }
+
+ // Now check for macros-1.1 style custom #[derive].
+ //
+ // Expand each of them in order given, but *before* we expand any built-in
+ // derive modes. The logic here is to:
+ //
+ // 1. Collect the remaining `#[derive]` annotations into a list. If
+ // there are any left, attach a `#[derive]` attribute to the item
+ // that we're currently expanding with the remaining derive modes.
+ // 2. Manufacture a `#[derive(Foo)]` attribute to pass to the expander.
+ // 3. Expand the current item we're expanding, getting back a list of
+ // items that replace it.
+ // 4. Extend the returned list with the current list of items we've
+ // collected so far.
+ // 5. Return everything!
+ //
+ // If custom derive extensions end up threading through the `#[derive]`
+ // attribute, we'll get called again later on to continue expanding
+ // those modes.
+ let macros_11_derive = traits.iter()
+ .cloned()
+ .enumerate()
+ .filter(|&(_, ref name)| !is_builtin_trait(&name.name().unwrap()))
+ .next();
+ if let Some((i, titem)) = macros_11_derive {
+ let tname = ast::Ident::with_empty_ctxt(intern(&titem.name().unwrap()));
+ let ext = cx.resolver.resolve_derive_mode(tname).unwrap();
+ traits.remove(i);
+ if traits.len() > 0 {
+ item = item.map(|mut i| {
+ let list = cx.meta_list(mitem.span,
+ intern_and_get_ident("derive"),
+ traits);
+ i.attrs.push(cx.attribute(mitem.span, list));
+ i
+ });
+ }
+ let titem = cx.meta_list_item_word(titem.span, titem.name().unwrap());
+ let mitem = cx.meta_list(titem.span,
+ intern_and_get_ident("derive"),
+ vec![titem]);
+ let item = Annotatable::Item(item);
+ return ext.expand(cx, mitem.span, &mitem, item)
+ }
+
+ // Ok, at this point we know that there are no old-style `#[derive_Foo]` nor
+ // any macros-1.1 style `#[derive(Foo)]`. Expand all built-in traits here.
+
// RFC #1445. `#[derive(PartialEq, Eq)]` adds a (trusted)
// `#[structural_match]` attribute.
if traits.iter().filter_map(|t| t.name()).any(|t| t == "PartialEq") &&
});
}
- let mut other_items = Vec::new();
-
- let mut iter = traits.iter();
- while let Some(titem) = iter.next() {
-
- let tword = match titem.word() {
- Some(name) => name,
- None => {
- cx.span_err(titem.span, "malformed `derive` entry");
- continue
- }
+ let mut items = Vec::new();
+ for titem in traits.iter() {
+ let tname = titem.word().unwrap().name();
+ let name = intern_and_get_ident(&format!("derive({})", tname));
+ let mitem = cx.meta_word(titem.span, name);
+
+ let span = Span {
+ expn_id: cx.codemap().record_expansion(codemap::ExpnInfo {
+ call_site: titem.span,
+ callee: codemap::NameAndSpan {
+ format: codemap::MacroAttribute(intern(&format!("derive({})", tname))),
+ span: Some(titem.span),
+ allow_internal_unstable: true,
+ },
+ }),
+ ..titem.span
};
- let tname = tword.name();
- // If this is a built-in derive mode, then we expand it immediately
- // here.
- if is_builtin_trait(&tname) {
- let name = intern_and_get_ident(&format!("derive({})", tname));
- let mitem = cx.meta_word(titem.span, name);
-
- let span = Span {
- expn_id: cx.codemap().record_expansion(codemap::ExpnInfo {
- call_site: titem.span,
- callee: codemap::NameAndSpan {
- format: codemap::MacroAttribute(intern(&format!("derive({})", tname))),
- span: Some(titem.span),
- allow_internal_unstable: true,
- },
- }),
- ..titem.span
- };
-
- let my_item = Annotatable::Item(item);
- expand_builtin(&tname, cx, span, &mitem, &my_item, &mut |a| {
- other_items.push(a);
- });
- item = my_item.expect_item();
-
- // Otherwise if this is a `rustc_macro`-style derive mode, we process it
- // here. The logic here is to:
- //
- // 1. Collect the remaining `#[derive]` annotations into a list. If
- // there are any left, attach a `#[derive]` attribute to the item
- // that we're currently expanding with the remaining derive modes.
- // 2. Manufacture a `#[derive(Foo)]` attribute to pass to the expander.
- // 3. Expand the current item we're expanding, getting back a list of
- // items that replace it.
- // 4. Extend the returned list with the current list of items we've
- // collected so far.
- // 5. Return everything!
- //
- // If custom derive extensions end up threading through the `#[derive]`
- // attribute, we'll get called again later on to continue expanding
- // those modes.
- } else if let Some(ext) = cx.derive_modes.remove(&tname) {
- let remaining_derives = iter.cloned().collect::<Vec<_>>();
- if remaining_derives.len() > 0 {
- let list = cx.meta_list(titem.span,
- intern_and_get_ident("derive"),
- remaining_derives);
- let attr = cx.attribute(titem.span, list);
- item = item.map(|mut i| {
- i.attrs.push(attr);
- i
- });
- }
- let titem = cx.meta_list_item_word(titem.span, tname.clone());
- let mitem = cx.meta_list(titem.span,
- intern_and_get_ident("derive"),
- vec![titem]);
- let item = Annotatable::Item(item);
- let mut items = ext.expand(cx, mitem.span, &mitem, item);
- items.extend(other_items);
- cx.derive_modes.insert(tname.clone(), ext);
- return items
-
- // If we've gotten this far then it means that we're in the territory of
- // the old custom derive mechanism. If the feature isn't enabled, we
- // issue an error, otherwise manufacture the `derive_Foo` attribute.
- } else if !cx.ecfg.enable_custom_derive() {
- feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
- "custom_derive",
- titem.span,
- feature_gate::GateIssue::Language,
- feature_gate::EXPLAIN_CUSTOM_DERIVE);
- } else {
- let name = intern_and_get_ident(&format!("derive_{}", tname));
- let mitem = cx.meta_word(titem.span, name);
- item = item.map(|mut i| {
- i.attrs.push(cx.attribute(mitem.span, mitem));
- i
- });
- }
+ let my_item = Annotatable::Item(item);
+ expand_builtin(&tname, cx, span, &mitem, &my_item, &mut |a| {
+ items.push(a);
+ });
+ item = my_item.expect_item();
}
- other_items.insert(0, Annotatable::Item(item));
- return other_items
+ items.insert(0, Annotatable::Item(item));
+ return items
}
macro_rules! derive_traits {
pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, enable_quotes: bool) {
let mut register = |name, ext| {
- resolver.add_macro(Mark::root(), ast::Ident::with_empty_ctxt(intern(name)), Rc::new(ext));
+ resolver.add_ext(Mark::root(), ast::Ident::with_empty_ctxt(intern(name)), Rc::new(ext));
};
register("macro_rules", IdentTT(Box::new(MacroRulesExpander), None, false));
tts: &[tokenstream::TokenTree])
-> Box<base::MacResult + 'cx> {
if !cx.ecfg.enable_log_syntax() {
- feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
+ feature_gate::emit_feature_err(&cx.parse_sess,
"log_syntax",
sp,
feature_gate::GateIssue::Language,
tt: &[TokenTree])
-> Box<base::MacResult + 'static> {
if !cx.ecfg.enable_trace_macros() {
- feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
+ feature_gate::emit_feature_err(&cx.parse_sess,
"trace_macros",
sp,
feature_gate::GateIssue::Language,
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(question_mark)]
+#![feature(specialization)]
use std::cell::{Cell, RefCell};
use std::ops::{Add, Sub};
self.lo == other.lo && self.hi == other.hi
}
- /// Returns `Some(span)`, a union of `self` and `other`, on overlap.
- pub fn merge(self, other: Span) -> Option<Span> {
- if self.expn_id != other.expn_id {
- return None;
- }
-
- if (self.lo <= other.lo && self.hi > other.lo) ||
- (self.lo >= other.lo && self.lo < other.hi) {
- Some(Span {
- lo: cmp::min(self.lo, other.lo),
- hi: cmp::max(self.hi, other.hi),
- expn_id: self.expn_id,
- })
- } else {
- None
- }
- }
-
/// Returns `Some(span)`, where the start is trimmed by the end of `other`
pub fn trim_start(self, other: Span) -> Option<Span> {
if self.hi > other.hi {
pub label: Option<String>,
}
-impl Encodable for Span {
- fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+impl serialize::UseSpecializedEncodable for Span {
+ fn default_encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_struct("Span", 2, |s| {
s.emit_struct_field("lo", 0, |s| {
self.lo.encode(s)
}
}
-impl Decodable for Span {
- fn decode<D: Decoder>(d: &mut D) -> Result<Span, D::Error> {
+impl serialize::UseSpecializedDecodable for Span {
+ fn default_decode<D: Decoder>(d: &mut D) -> Result<Span, D::Error> {
d.read_struct("Span", 2, |d| {
- let lo = d.read_struct_field("lo", 0, |d| {
- BytePos::decode(d)
- })?;
-
- let hi = d.read_struct_field("hi", 1, |d| {
- BytePos::decode(d)
- })?;
-
+ let lo = d.read_struct_field("lo", 0, Decodable::decode)?;
+ let hi = d.read_struct_field("hi", 1, Decodable::decode)?;
Ok(mk_sp(lo, hi))
})
}
// According to /etc/terminfo/README, after looking at
// ~/.terminfo, ncurses will search /etc/terminfo, then
// /lib/terminfo, and eventually /usr/share/terminfo.
+ // On Haiku the database can be found at /boot/system/data/terminfo
Err(..) => {
dirs_to_search.push(PathBuf::from("/etc/terminfo"));
dirs_to_search.push(PathBuf::from("/lib/terminfo"));
dirs_to_search.push(PathBuf::from("/usr/share/terminfo"));
+ dirs_to_search.push(PathBuf::from("/boot/system/data/terminfo"));
}
}
}
pub color: ColorConfig,
pub quiet: bool,
pub test_threads: Option<usize>,
+ pub skip: Vec<String>,
}
impl TestOpts {
color: AutoColor,
quiet: false,
test_threads: None,
+ skip: vec![],
}
}
}
task, allow printing directly"),
getopts::optopt("", "test-threads", "Number of threads used for running tests \
in parallel", "n_threads"),
+ getopts::optmulti("", "skip", "Skip tests whose names contain FILTER (this flag can \
+ be used multiple times)","FILTER"),
getopts::optflag("q", "quiet", "Display one character per test instead of one line"),
getopts::optopt("", "color", "Configure coloring of output:
auto = colorize if stdout is a tty and tests are run on serially (default);
color: color,
quiet: quiet,
test_threads: test_threads,
+ skip: matches.opt_strs("skip"),
};
Some(Ok(test_opts))
}
cpus as usize
}
+
+ #[cfg(target_os = "haiku")]
+ fn num_cpus() -> usize {
+ // FIXME: implement
+ 1
+ }
}
pub fn filter_tests(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> Vec<TestDescAndFn> {
}
};
+ // Skip tests that match any of the skip filters
+ filtered = filtered.into_iter()
+ .filter(|t| !opts.skip.iter().any(|sf| t.desc.name.as_slice().contains(&sf[..])))
+ .collect();
+
// Maybe pull out the ignored test and unignore them
filtered = if !opts.run_ignored {
filtered
fn main() {
println!("cargo:rustc-cfg=cargobuild");
- let target = env::var("TARGET").unwrap();
+ let target = env::var("TARGET").expect("TARGET was not set");
if target.contains("linux") {
if target.contains("musl") && !target.contains("mips") {
#[cfg_attr(any(all(target_os = "linux", not(target_env = "musl")),
target_os = "freebsd",
target_os = "solaris",
+ target_os = "haiku",
all(target_os = "linux",
target_env = "musl",
not(target_arch = "x86"),
-Subproject commit 16b79d01fd6d942cf3c9120b92df56b13ec92665
+Subproject commit 7801978ec1f3637fcda1b564048ebc732bf586af
"syntax_pos 0.0.0",
]
-[[package]]
-name = "rbml"
-version = "0.0.0"
-dependencies = [
- "log 0.0.0",
- "serialize 0.0.0",
-]
-
[[package]]
name = "rustc"
version = "0.0.0"
"fmt_macros 0.0.0",
"graphviz 0.0.0",
"log 0.0.0",
- "rbml 0.0.0",
"rustc_back 0.0.0",
"rustc_bitflags 0.0.0",
"rustc_const_math 0.0.0",
dependencies = [
"graphviz 0.0.0",
"log 0.0.0",
- "rbml 0.0.0",
"rustc 0.0.0",
"rustc_data_structures 0.0.0",
"serialize 0.0.0",
dependencies = [
"flate 0.0.0",
"log 0.0.0",
- "rbml 0.0.0",
"rustc 0.0.0",
"rustc_back 0.0.0",
- "rustc_bitflags 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
Archive::Child child;
RustArchiveMember(): filename(NULL), name(NULL),
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
child(NULL, NULL, NULL)
#else
child(NULL, NULL)
struct RustArchiveIterator {
Archive::child_iterator cur;
Archive::child_iterator end;
-#if LLVM_VERSION_MINOR >= 9
+#if LLVM_VERSION_GE(3, 9)
Error err;
#endif
};
return nullptr;
}
-#if LLVM_VERSION_MINOR <= 8
+#if LLVM_VERSION_LE(3, 8)
ErrorOr<std::unique_ptr<Archive>> archive_or =
#else
Expected<std::unique_ptr<Archive>> archive_or =
Archive::create(buf_or.get()->getMemBufferRef());
if (!archive_or) {
-#if LLVM_VERSION_MINOR <= 8
+#if LLVM_VERSION_LE(3, 8)
LLVMRustSetLastError(archive_or.getError().message().c_str());
#else
LLVMRustSetLastError(toString(archive_or.takeError()).c_str());
LLVMRustArchiveIteratorNew(LLVMRustArchiveRef ra) {
Archive *ar = ra->getBinary();
RustArchiveIterator *rai = new RustArchiveIterator();
-#if LLVM_VERSION_MINOR <= 8
+#if LLVM_VERSION_LE(3, 8)
rai->cur = ar->child_begin();
#else
rai->cur = ar->child_begin(rai->err);
extern "C" LLVMRustArchiveChildConstRef
LLVMRustArchiveIteratorNext(LLVMRustArchiveIteratorRef rai) {
-#if LLVM_VERSION_MINOR >= 9
+#if LLVM_VERSION_GE(3, 9)
if (rai->err) {
LLVMRustSetLastError(toString(std::move(rai->err)).c_str());
return NULL;
#endif
if (rai->cur == rai->end)
return NULL;
-#if LLVM_VERSION_MINOR == 8
+#if LLVM_VERSION_EQ(3, 8)
const ErrorOr<Archive::Child>* cur = rai->cur.operator->();
if (!*cur) {
LLVMRustSetLastError(cur->getError().message().c_str());
bool WriteSymbtab,
LLVMRustArchiveKind rust_kind) {
-#if LLVM_VERSION_MINOR <= 8
+#if LLVM_VERSION_LE(3, 8)
std::vector<NewArchiveIterator> Members;
#else
std::vector<NewArchiveMember> Members;
auto Member = NewMembers[i];
assert(Member->name);
if (Member->filename) {
-#if LLVM_VERSION_MINOR >= 9
+#if LLVM_VERSION_GE(3, 9)
Expected<NewArchiveMember> MOrErr = NewArchiveMember::getFile(Member->filename, true);
if (!MOrErr) {
LLVMRustSetLastError(toString(MOrErr.takeError()).c_str());
return LLVMRustResult::Failure;
}
Members.push_back(std::move(*MOrErr));
-#elif LLVM_VERSION_MINOR == 8
+#elif LLVM_VERSION_EQ(3, 8)
Members.push_back(NewArchiveIterator(Member->filename));
#else
Members.push_back(NewArchiveIterator(Member->filename, Member->name));
#endif
} else {
-#if LLVM_VERSION_MINOR <= 8
+#if LLVM_VERSION_LE(3, 8)
Members.push_back(NewArchiveIterator(Member->child, Member->name));
#else
Expected<NewArchiveMember> MOrErr = NewArchiveMember::getOldMember(Member->child, true);
#endif
}
}
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
auto pair = writeArchive(Dst, Members, WriteSymbtab, Kind, true, false);
#else
auto pair = writeArchive(Dst, Members, WriteSymbtab, Kind, true);
initializeVectorization(Registry);
initializeIPO(Registry);
initializeAnalysis(Registry);
-#if LLVM_VERSION_MINOR == 7
+#if LLVM_VERSION_EQ(3, 7)
initializeIPA(Registry);
#endif
initializeTransformUtils(Registry);
bool FunctionSections,
bool DataSections) {
-#if LLVM_VERSION_MINOR <= 8
+#if LLVM_VERSION_LE(3, 8)
Reloc::Model RM;
#else
Optional<Reloc::Model> RM;
RM = Reloc::DynamicNoPIC;
break;
default:
-#if LLVM_VERSION_MINOR <= 8
+#if LLVM_VERSION_LE(3, 8)
RM = Reloc::Default;
#endif
break;
}
TargetOptions Options;
-#if LLVM_VERSION_MINOR <= 8
+#if LLVM_VERSION_LE(3, 8)
Options.PositionIndependentExecutable = PositionIndependentExecutable;
#endif
LLVMRustRunRestrictionPass(LLVMModuleRef M, char **symbols, size_t len) {
llvm::legacy::PassManager passes;
-#if LLVM_VERSION_MINOR <= 8
+#if LLVM_VERSION_LE(3, 8)
ArrayRef<const char*> ref(symbols, len);
passes.add(llvm::createInternalizePass(ref));
#else
extern "C" void
LLVMRustSetModulePIELevel(LLVMModuleRef M) {
-#if LLVM_VERSION_MINOR >= 9
+#if LLVM_VERSION_GE(3, 9)
unwrap(M)->setPIELevel(PIELevel::Level::Large);
#endif
}
LLVMRustMetadataRef File,
LLVMRustMetadataRef ParameterTypes) {
return wrap(Builder->createSubroutineType(
-#if LLVM_VERSION_MINOR == 7
+#if LLVM_VERSION_EQ(3, 7)
unwrapDI<DIFile>(File),
#endif
DITypeRefArray(unwrap<MDTuple>(ParameterTypes))));
LLVMValueRef Fn,
LLVMRustMetadataRef TParam,
LLVMRustMetadataRef Decl) {
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
DITemplateParameterArray TParams =
DITemplateParameterArray(unwrap<MDTuple>(TParam));
DISubprogram *Sub = Builder->createFunction(
int64_t* AddrOps,
unsigned AddrOpsCount,
unsigned ArgNo) {
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
if (Tag == 0x100) { // DW_TAG_auto_variable
return wrap(Builder->createAutoVariable(
unwrapDI<DIDescriptor>(Scope), Name,
raw_string_ostream Stream(Err);
DiagnosticPrinterRawOStream DP(Stream);
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
if (Linker::linkModules(*Dst, std::move(Src.get()))) {
#else
if (Linker::LinkModules(Dst, Src->get(), [&](const DiagnosticInfo &DI) { DI.print(DP); })) {
return LLVMRustDiagnosticKind::OptimizationRemarkMissed;
case DK_OptimizationRemarkAnalysis:
return LLVMRustDiagnosticKind::OptimizationRemarkAnalysis;
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
case DK_OptimizationRemarkAnalysisFPCommute:
return LLVMRustDiagnosticKind::OptimizationRemarkAnalysisFPCommute;
case DK_OptimizationRemarkAnalysisAliasing:
return LLVMRustDiagnosticKind::OptimizationRemarkAnalysisAliasing;
#endif
default:
-#if LLVM_VERSION_MINOR >= 9
+#if LLVM_VERSION_GE(3, 9)
return (kind >= DK_FirstRemark && kind <= DK_LastRemark) ?
LLVMRustDiagnosticKind::OptimizationRemarkOther :
LLVMRustDiagnosticKind::Other;
return LLVMVectorTypeKind;
case Type::X86_MMXTyID:
return LLVMX86_MMXTypeKind;
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
case Type::TokenTyID:
return LLVMTokenTypeKind;
#endif
unsigned ArgCnt,
LLVMValueRef *LLArgs,
const char *Name) {
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
Value **Args = unwrap(LLArgs);
if (ParentPad == NULL) {
Type *Ty = Type::getTokenTy(unwrap(Builder)->getContext());
LLVMRustBuildCleanupRet(LLVMBuilderRef Builder,
LLVMValueRef CleanupPad,
LLVMBasicBlockRef UnwindBB) {
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
CleanupPadInst *Inst = cast<CleanupPadInst>(unwrap(CleanupPad));
return wrap(unwrap(Builder)->CreateCleanupRet(Inst, unwrap(UnwindBB)));
#else
unsigned ArgCnt,
LLVMValueRef *LLArgs,
const char *Name) {
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
Value **Args = unwrap(LLArgs);
return wrap(unwrap(Builder)->CreateCatchPad(unwrap(ParentPad),
ArrayRef<Value*>(Args, ArgCnt),
LLVMRustBuildCatchRet(LLVMBuilderRef Builder,
LLVMValueRef Pad,
LLVMBasicBlockRef BB) {
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
return wrap(unwrap(Builder)->CreateCatchRet(cast<CatchPadInst>(unwrap(Pad)),
unwrap(BB)));
#else
LLVMBasicBlockRef BB,
unsigned NumHandlers,
const char *Name) {
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
if (ParentPad == NULL) {
Type *Ty = Type::getTokenTy(unwrap(Builder)->getContext());
ParentPad = wrap(Constant::getNullValue(Ty));
extern "C" void
LLVMRustAddHandler(LLVMValueRef CatchSwitchRef,
LLVMBasicBlockRef Handler) {
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
Value *CatchSwitch = unwrap(CatchSwitchRef);
cast<CatchSwitchInst>(CatchSwitch)->addHandler(unwrap(Handler));
#endif
extern "C" void
LLVMRustSetPersonalityFn(LLVMBuilderRef B,
LLVMValueRef Personality) {
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
unwrap(B)->GetInsertBlock()
->getParent()
->setPersonalityFn(cast<Function>(unwrap(Personality)));
#endif
}
-#if LLVM_VERSION_MINOR >= 8
+#if LLVM_VERSION_GE(3, 8)
extern "C" OperandBundleDef*
LLVMRustBuildOperandBundleDef(const char *Name,
LLVMValueRef *Inputs,
# If this file is modified, then llvm will be forcibly cleaned and then rebuilt.
# The actual contents of this file do not matter, but to trigger a change on the
# build bots then the contents should be changed so git updates the mtime.
-2016-08-30
+2016-09-17
#include "llvm-c/ExecutionEngine.h"
#include "llvm-c/Object.h"
-#if LLVM_VERSION_MINOR >= 7
+#define LLVM_VERSION_GE(major, minor) \
+ (LLVM_VERSION_MAJOR > (major) || LLVM_VERSION_MAJOR == (major) && LLVM_VERSION_MINOR >= (minor))
+
+#define LLVM_VERSION_EQ(major, minor) \
+ (LLVM_VERSION_MAJOR == (major) && LLVM_VERSION_MINOR == (minor))
+
+#define LLVM_VERSION_LE(major, minor) \
+ (LLVM_VERSION_MAJOR < (major) || LLVM_VERSION_MAJOR == (major) && LLVM_VERSION_MINOR <= (minor))
+
+#if LLVM_VERSION_GE(3, 7)
#include "llvm/IR/LegacyPassManager.h"
#else
#include "llvm/PassManager.h"
# tarball for a stable release you'll likely see `1.x.0-$date` where `1.x.0` was
# released on `$date`
-rustc: beta-2016-08-17
-rustc_key: 195e6261
-cargo: nightly-2016-08-21
+rustc: beta-2016-09-28
+rustc_key: 62b3e239
+cargo: nightly-2016-09-26
fn main() {
let s1 = Struct { _a: 0u32 };
+ //~ TRANS_ITEM drop-glue i8
//~ TRANS_ITEM fn instantiation_through_vtable::{{impl}}[0]::foo[0]<u32>
//~ TRANS_ITEM fn instantiation_through_vtable::{{impl}}[0]::bar[0]<u32>
let _ = &s1 as &Trait;
}
-//~ TRANS_ITEM fn overloaded_operators::{{impl}}[2]::eq[0]
-//~ TRANS_ITEM fn overloaded_operators::{{impl}}[2]::ne[0]
+//~ TRANS_ITEM fn overloaded_operators::{{impl}}[4]::eq[0]
+//~ TRANS_ITEM fn overloaded_operators::{{impl}}[4]::ne[0]
#[derive(PartialEq)]
pub struct Equatable(u32);
impl Add<u32> for Equatable {
type Output = u32;
- //~ TRANS_ITEM fn overloaded_operators::{{impl}}[3]::add[0]
+ //~ TRANS_ITEM fn overloaded_operators::{{impl}}[2]::add[0]
fn add(self, rhs: u32) -> u32 {
self.0 + rhs
}
impl Deref for Equatable {
type Target = u32;
- //~ TRANS_ITEM fn overloaded_operators::{{impl}}[4]::deref[0]
+ //~ TRANS_ITEM fn overloaded_operators::{{impl}}[3]::deref[0]
fn deref(&self) -> &Self::Target {
&self.0
}
{
// simple case
let bool_sized = &true;
+ //~ TRANS_ITEM drop-glue i8
//~ TRANS_ITEM fn unsizing::{{impl}}[0]::foo[0]
let _bool_unsized = bool_sized as &Trait;
mod inline {
// Important: This function should show up in all codegen units where it is inlined
- //~ TRANS_ITEM fn local_inlining::inline[0]::inlined_function[0] @@ local_inlining-inline[External] local_inlining-user1[Available] local_inlining-user2[Available]
+ //~ TRANS_ITEM fn local_inlining::inline[0]::inlined_function[0] @@ local_inlining-user1[Internal] local_inlining-user2[Internal]
#[inline(always)]
pub fn inlined_function()
{
mod inline {
- //~ TRANS_ITEM fn local_transitive_inlining::inline[0]::inlined_function[0] @@ local_transitive_inlining-inline[External] local_transitive_inlining-direct_user[Available] local_transitive_inlining-indirect_user[Available]
+ //~ TRANS_ITEM fn local_transitive_inlining::inline[0]::inlined_function[0] @@ local_transitive_inlining-indirect_user[Internal]
#[inline(always)]
pub fn inlined_function()
{
mod direct_user {
use super::inline;
- //~ TRANS_ITEM fn local_transitive_inlining::direct_user[0]::foo[0] @@ local_transitive_inlining-direct_user[External] local_transitive_inlining-indirect_user[Available]
+ //~ TRANS_ITEM fn local_transitive_inlining::direct_user[0]::foo[0] @@ local_transitive_inlining-indirect_user[Internal]
#[inline(always)]
pub fn foo() {
inline::inlined_function();
//~ TRANS_ITEM fn vtable_through_const::main[0] @@ vtable_through_const[External]
fn main() {
+ //~ TRANS_ITEM drop-glue i8 @@ vtable_through_const[Internal]
// Since Trait1::do_something() is instantiated via its default implementation,
// it is considered a generic and is instantiated here only because it is
// CHECK: [[S_b:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"** %b to i8*
// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_b]])
-// CHECK: [[S_tmp2:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"* %tmp2 to i8*
-// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_tmp2]])
+// CHECK: [[S__5:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"* %_5 to i8*
+// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S__5]])
-// CHECK: [[E_tmp2:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"* %tmp2 to i8*
-// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_tmp2]])
+// CHECK: [[E__5:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"* %_5 to i8*
+// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E__5]])
// CHECK: [[E_b:%[0-9]+]] = bitcast %"2.std::option::Option<i32>"** %b to i8*
// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_b]])
B(f32)
}
-// CHECK: @VAR2 = constant {{.*}} { i32 0, i32 666, {{.*}} }, section ".test_two"
+// CHECK: @VAR2 = constant {{.*}} { i32 0, i32 666 }, section ".test_two"
#[no_mangle]
#[link_section = ".test_two"]
pub static VAR2: E = E::A(666);
-// CHECK: @VAR3 = constant {{.*}} { i32 1, float 1.000000e+00, {{.*}} }, section ".test_three"
+// CHECK: @VAR3 = constant {{.*}} { i32 1, float 1.000000e+00 }, section ".test_three"
#[no_mangle]
#[link_section = ".test_three"]
pub static VAR3: E = E::B(1.);
#![no_std]
-extern crate core;
extern crate rand;
extern crate serialize as rustc_serialize;
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// aux-build:append-impl.rs
-
-#![feature(rustc_macro)]
-#![allow(warnings)]
-
-#[macro_use]
-extern crate append_impl;
-
-trait Append {
- fn foo(&self);
-}
-
-#[derive(PartialEq,
- Append,
- Eq)]
-struct A {
-//~^ ERROR: the semantics of constant patterns is not yet settled
- inner: u32,
-}
-
-fn main() {
- A { inner: 3 }.foo();
-}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// force-host
-// no-prefer-dynamic
-
-#![feature(rustc_macro)]
-#![feature(rustc_macro_lib)]
-#![crate_type = "rustc-macro"]
-
-extern crate rustc_macro;
-
-use rustc_macro::TokenStream;
-
-#[rustc_macro_derive(Append)]
-pub fn derive_a(input: TokenStream) -> TokenStream {
- let mut input = input.to_string();
- input.push_str("
- impl Append for A {
- fn foo(&self) {}
- }
- ");
- input.parse().unwrap()
-}
fn main() {
let x = Foo { a:1, b:2 };
- let Foo { a: x, a: y, b: 0 } = x; //~ ERROR E0025
+ let Foo { a: x, a: y, b: 0 } = x;
+ //~^ ERROR field `a` bound multiple times in the pattern
+ //~| NOTE multiple uses of `a` in pattern
+ //~| NOTE first use of `a`
}
fn main() {
let trait_obj: &SomeTrait = SomeTrait;
//~^ ERROR E0425
+ //~| NOTE unresolved name
//~| ERROR E0038
//~| method `foo` has no receiver
//~| NOTE the trait `SomeTrait` cannot be made into an object
// except according to those terms.
trait Foo {
- fn foo(&self, x: u8) -> bool;
+ fn foo(&self, x: u8) -> bool; //~ NOTE trait requires 2 parameters
+ fn bar(&self, x: u8, y: u8, z: u8); //~ NOTE trait requires 4 parameters
+ fn less(&self); //~ NOTE trait requires 1 parameter
}
struct Bar;
impl Foo for Bar {
fn foo(&self) -> bool { true } //~ ERROR E0050
+ //~| NOTE expected 2 parameters, found 1
+ fn bar(&self) { } //~ ERROR E0050
+ //~| NOTE expected 4 parameters, found 1
+ fn less(&self, x: u8, y: u8, z: u8) { } //~ ERROR E0050
+ //~| NOTE expected 1 parameter, found 4
}
fn main() {
struct Bar(u32);
pub fn bar() -> Bar { //~ ERROR E0446
+ //~| NOTE can't leak private type
Bar(0)
}
}
}
pub impl Bar {} //~ ERROR E0449
+ //~| NOTE `pub` not needed here
+ //~| NOTE place qualifiers on individual impl items instead
pub impl Foo for Bar { //~ ERROR E0449
+ //~| NOTE `pub` not needed here
pub fn foo() {} //~ ERROR E0449
+ //~| NOTE `pub` not needed here
}
fn main() {
fn main() {
unsafe { takes_u8(::std::mem::transmute(0u16)); } //~ ERROR E0512
+ //~| transmuting between 16 bits and 8 bits
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::mem;
+
+fn main() {
+ unsafe {
+ let size = mem::size_of::<u32>();
+ mem::transmute_copy::<u32, [u8; size]>(&8_8); //~ ERROR E0513
+ //~| NOTE no type for variable
+ }
+}
default fn fly(&self) {}
//~^ ERROR E0520
//~| NOTE cannot specialize default item `fly`
- //~| NOTE either the parent `impl` or `fly` in the parent `impl` must be marked `default`
+ //~| NOTE `fly` in the parent `impl` must be marked `default`
}
fn main() {
fn main() {
let s = Field::Fool { joke: 0 };
//~^ ERROR E0559
- //~| NOTE did you mean `x`?
+ //~| NOTE field does not exist - did you mean `x`?
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Simba {
+ mother: u32,
+}
+
+fn main() {
+ let s = Simba { mother: 1, father: 0 };
+ //~^ ERROR E0560
+ //~| NOTE `Simba` does not have this field
+}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-struct Simba {
- mother: u32,
-}
-
-fn main() {
- let s = Simba { mother: 1, father: 0 }; //~ ERROR E0560
-}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// error-pattern: aborting due to previous error
+
+fn main() {
+ 2 + +2;
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "lib"]
+
+pub trait Foo {
+ fn foo();
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub enum Homura {
+ Madoka { age: u32 }
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// error-pattern: unresolved name `m1::arguments`. Did you mean `arguments`?
+// error-pattern: unresolved name `m1::arguments`
mod m1 {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// error-pattern: unresolved name `m1::arguments`. Did you mean `arguments`?
+// error-pattern: unresolved name `m1::arguments`
mod m1 {
pub mod arguments {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-extern crate bäz; //~ ERROR non-ascii idents
+extern crate core as bäz; //~ ERROR non-ascii idents
use föö::bar; //~ ERROR non-ascii idents
impl Foo for *const BarTy {
fn bar(&self) {
baz();
- //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`?
+ //~^ ERROR: unresolved name `baz`
+ //~| NOTE did you mean to call `self.baz`?
a;
//~^ ERROR: unresolved name `a`
+ //~| NOTE unresolved name
}
}
impl<'a> Foo for &'a BarTy {
fn bar(&self) {
baz();
- //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`?
+ //~^ ERROR: unresolved name `baz`
+ //~| NOTE did you mean to call `self.baz`?
x;
- //~^ ERROR: unresolved name `x`. Did you mean `self.x`?
+ //~^ ERROR: unresolved name `x`
+ //~| NOTE did you mean `self.x`?
y;
- //~^ ERROR: unresolved name `y`. Did you mean `self.y`?
+ //~^ ERROR: unresolved name `y`
+ //~| NOTE did you mean `self.y`?
a;
//~^ ERROR: unresolved name `a`
+ //~| NOTE unresolved name
bah;
- //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`?
+ //~^ ERROR: unresolved name `bah`
+ //~| NOTE did you mean to call `Foo::bah`?
b;
//~^ ERROR: unresolved name `b`
+ //~| NOTE unresolved name
}
}
impl<'a> Foo for &'a mut BarTy {
fn bar(&self) {
baz();
- //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`?
+ //~^ ERROR: unresolved name `baz`
+ //~| NOTE did you mean to call `self.baz`?
x;
- //~^ ERROR: unresolved name `x`. Did you mean `self.x`?
+ //~^ ERROR: unresolved name `x`
+ //~| NOTE did you mean `self.x`?
y;
- //~^ ERROR: unresolved name `y`. Did you mean `self.y`?
+ //~^ ERROR: unresolved name `y`
+ //~| NOTE did you mean `self.y`?
a;
//~^ ERROR: unresolved name `a`
+ //~| NOTE unresolved name
bah;
- //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`?
+ //~^ ERROR: unresolved name `bah`
+ //~| NOTE did you mean to call `Foo::bah`?
b;
//~^ ERROR: unresolved name `b`
+ //~| NOTE unresolved name
}
}
impl Foo for Box<BarTy> {
fn bar(&self) {
baz();
- //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`?
+ //~^ ERROR: unresolved name `baz`
+ //~| NOTE did you mean to call `self.baz`?
bah;
- //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`?
+ //~^ ERROR: unresolved name `bah`
+ //~| NOTE did you mean to call `Foo::bah`?
}
}
impl Foo for *const isize {
fn bar(&self) {
baz();
- //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`?
+ //~^ ERROR: unresolved name `baz`
+ //~| NOTE did you mean to call `self.baz`?
bah;
- //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`?
+ //~^ ERROR: unresolved name `bah`
+ //~| NOTE did you mean to call `Foo::bah`?
}
}
impl<'a> Foo for &'a isize {
fn bar(&self) {
baz();
- //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`?
+ //~^ ERROR: unresolved name `baz`
+ //~| NOTE did you mean to call `self.baz`?
bah;
- //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`?
+ //~^ ERROR: unresolved name `bah`
+ //~| NOTE did you mean to call `Foo::bah`?
}
}
impl<'a> Foo for &'a mut isize {
fn bar(&self) {
baz();
- //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`?
+ //~^ ERROR: unresolved name `baz`
+ //~| NOTE did you mean to call `self.baz`?
bah;
- //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`?
+ //~^ ERROR: unresolved name `bah`
+ //~| NOTE did you mean to call `Foo::bah`?
}
}
impl Foo for Box<isize> {
fn bar(&self) {
baz();
- //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`?
+ //~^ ERROR: unresolved name `baz`
+ //~| NOTE did you mean to call `self.baz`?
bah;
- //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`?
+ //~^ ERROR: unresolved name `bah`
+ //~| NOTE did you mean to call `Foo::bah`?
}
}
fn main() {
let Foo {
- a: _, //~ NOTE field `a` previously bound here
- a: _ //~ ERROR field `a` bound multiple times in the pattern
+ a: _, //~ NOTE first use of `a`
+ a: _
+ //~^ ERROR field `a` bound multiple times in the pattern
+ //~| NOTE multiple uses of `a` in pattern
} = Foo { a: 29 };
let Foo {
- a, //~ NOTE field `a` previously bound here
- a: _ //~ ERROR field `a` bound multiple times in the pattern
+ a, //~ NOTE first use of `a`
+ a: _
+ //~^ ERROR field `a` bound multiple times in the pattern
+ //~| NOTE multiple uses of `a` in pattern
} = Foo { a: 29 };
let Foo {
- a, //~ NOTE field `a` previously bound here
- //~^ NOTE field `a` previously bound here
- a: _, //~ ERROR field `a` bound multiple times in the pattern
- a: x //~ ERROR field `a` bound multiple times in the pattern
+ a,
+ //~^ NOTE first use of `a`
+ //~| NOTE first use of `a`
+ a: _,
+ //~^ ERROR field `a` bound multiple times in the pattern
+ //~| NOTE multiple uses of `a` in pattern
+ a: x
+ //~^ ERROR field `a` bound multiple times in the pattern
+ //~| NOTE multiple uses of `a` in pattern
} = Foo { a: 29 };
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// aux-build:issue_19452_aux.rs
+extern crate issue_19452_aux;
+
enum Homura {
Madoka { age: u32 }
}
let homura = Homura::Madoka;
//~^ ERROR uses it like a function
//~| struct called like a function
+
+ let homura = issue_19452_aux::Homura::Madoka;
+ //~^ ERROR uses it like a function
+ //~| struct called like a function
}
fn main() {
let homura = Homura::Akemi { kaname: () };
//~^ ERROR variant `Homura::Akemi` has no field named `kaname`
+ //~| NOTE field does not exist - did you mean `madoka`?
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// error-pattern: expected item, found `parse_error`
+// error-pattern: expected one of `!` or `::`, found `<eof>`
include!("auxiliary/issue-21146-inc.rs");
fn main() {}
// If this provides a suggestion, it's a bug as MaybeDog doesn't impl Groom
shave();
//~^ ERROR: unresolved name `shave`
+ //~| NOTE unresolved name
}
}
fn shave(other: usize) {
whiskers -= other;
//~^ ERROR: unresolved name `whiskers`
+ //~| NOTE unresolved name
//~| HELP this is an associated function
shave(4);
- //~^ ERROR: unresolved name `shave`. Did you mean to call `Groom::shave`?
+ //~^ ERROR: unresolved name `shave`
+ //~| NOTE did you mean to call `Groom::shave`?
purr();
//~^ ERROR: unresolved name `purr`
+ //~| NOTE unresolved name
}
}
fn purr_louder() {
static_method();
//~^ ERROR: unresolved name `static_method`
+ //~| NOTE unresolved name
purr();
//~^ ERROR: unresolved name `purr`
+ //~| NOTE unresolved name
purr();
//~^ ERROR: unresolved name `purr`
+ //~| NOTE unresolved name
purr();
//~^ ERROR: unresolved name `purr`
+ //~| NOTE unresolved name
}
}
fn purr(&self) {
grow_older();
//~^ ERROR: unresolved name `grow_older`
+ //~| NOTE unresolved name
shave();
//~^ ERROR: unresolved name `shave`
+ //~| NOTE unresolved name
}
fn burn_whiskers(&mut self) {
whiskers = 0;
- //~^ ERROR: unresolved name `whiskers`. Did you mean `self.whiskers`?
+ //~^ ERROR: unresolved name `whiskers`
+ //~| NOTE did you mean `self.whiskers`?
}
pub fn grow_older(other:usize) {
whiskers = 4;
//~^ ERROR: unresolved name `whiskers`
+ //~| NOTE unresolved name
//~| HELP this is an associated function
purr_louder();
//~^ ERROR: unresolved name `purr_louder`
+ //~| NOTE unresolved name
}
}
fn main() {
self += 1;
//~^ ERROR: unresolved name `self`
+ //~| NOTE unresolved name
//~| HELP: module `self`
// it's a bug if this suggests a missing `self` as we're not in a method
}
fn main() {
// FIXME(#31407) this error should go away, but in the meantime we test that it
// is accompanied by a somewhat useful error message.
- let _: f64 = 1234567890123456789012345678901234567890e-340; //~ ERROR could not evaluate float
+ let _: f64 = 1234567890123456789012345678901234567890e-340;
+ //~^ ERROR constant evaluation error
+ //~| unimplemented constant expression: could not evaluate float literal
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![derive(Copy)] //~ ERROR `derive` may only be applied to structs, enums and unions
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Z continue-parse-after-error
+
+struct Foo<Self>(Self);
+//~^ ERROR expected identifier, found keyword `Self`
+
+trait Bar<Self> {}
+//~^ ERROR expected identifier, found keyword `Self`
+
+fn main() {}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:issue-36708.rs
+
+extern crate issue_36708 as lib;
+
+struct Bar;
+
+impl lib::Foo for Bar {
+ fn foo<T>() {}
+ //~^ ERROR E0049
+ //~| NOTE found 1 type parameter, expected 0
+}
+
+fn main() {}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+macro_rules! foo {
+ ( $()* ) => {};
+ //~^ ERROR repetition matches empty token tree
+ ( $()+ ) => {};
+ //~^ ERROR repetition matches empty token tree
+
+ ( $(),* ) => {}; // PASS
+ ( $(),+ ) => {}; // PASS
+
+ ( [$()*] ) => {};
+ //~^ ERROR repetition matches empty token tree
+ ( [$()+] ) => {};
+ //~^ ERROR repetition matches empty token tree
+
+ ( [$(),*] ) => {}; // PASS
+ ( [$(),+] ) => {}; // PASS
+
+ ( $($()* $(),* $(a)* $(a),* )* ) => {};
+ //~^ ERROR repetition matches empty token tree
+ ( $($()* $(),* $(a)* $(a),* )+ ) => {};
+ //~^ ERROR repetition matches empty token tree
+
+ ( $(a $(),* $(a)* $(a),* )* ) => {}; // PASS
+ ( $($(a)+ $(),* $(a)* $(a),* )+ ) => {}; // PASS
+
+ ( $(a $()+)* ) => {};
+ //~^ ERROR repetition matches empty token tree
+ ( $(a $()*)+ ) => {};
+ //~^ ERROR repetition matches empty token tree
+}
+
+
+// --- Original Issue --- //
+
+macro_rules! make_vec {
+ (a $e1:expr $($(, a $e2:expr)*)*) => ([$e1 $($(, $e2)*)*]);
+ //~^ ERROR repetition matches empty token tree
+}
+
+fn main() {
+ let _ = make_vec!(a 1, a 2, a 3);
+}
+
+
+// --- Minified Issue --- //
+
+macro_rules! m {
+ ( $()* ) => {}
+ //~^ ERROR repetition matches empty token tree
+}
+
+m!();
//~| ERROR macro expansion ignores token `typeof`
//~| ERROR macro expansion ignores token `;`
//~| ERROR macro expansion ignores token `;`
- //~| ERROR macro expansion ignores token `i`
}
-m!(); //~ NOTE the usage of `m!` is likely invalid in item context
-
fn main() {
let a: m!(); //~ NOTE the usage of `m!` is likely invalid in type context
let i = m!(); //~ NOTE the usage of `m!` is likely invalid in expression context
struct S(u8, u16);
fn main() {
- let s = S{0b1: 10, 0: 11}; //~ ERROR struct `S` has no field named `0b1`
+ let s = S{0b1: 10, 0: 11};
+ //~^ ERROR struct `S` has no field named `0b1`
+ //~| NOTE field does not exist - did you mean `1`?
match s {
- S{0: a, 0x1: b, ..} => {} //~ ERROR does not have a field named `0x1`
+ S{0: a, 0x1: b, ..} => {}
+ //~^ ERROR does not have a field named `0x1`
+ //~| NOTE struct `S::{{constructor}}` does not have field `0x1`
}
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+::foo::bar!(); //~ ERROR expected macro name without module separators
+foo::bar!(); //~ ERROR expected macro name without module separators
+
+trait T {
+ foo::bar!(); //~ ERROR expected macro name without module separators
+ ::foo::bar!(); //~ ERROR expected macro name without module separators
+}
+
+struct S {
+ x: foo::bar!(), //~ ERROR expected macro name without module separators
+ y: ::foo::bar!(), //~ ERROR expected macro name without module separators
+}
+
+impl S {
+ foo::bar!(); //~ ERROR expected macro name without module separators
+ ::foo::bar!(); //~ ERROR expected macro name without module separators
+}
+
+fn main() {
+ foo::bar!(); //~ ERROR expected macro name without module separators
+ ::foo::bar!(); //~ ERROR expected macro name without module separators
+
+ let _ = foo::bar!(); //~ ERROR expected macro name without module separators
+ let _ = ::foo::bar!(); //~ ERROR expected macro name without module separators
+
+ let foo::bar!() = 0; //~ ERROR expected macro name without module separators
+ let ::foo::bar!() = 0; //~ ERROR expected macro name without module separators
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(platform_intrinsics)]
+extern "platform-intrinsic" {
+ fn x86_mm_movemask_ps() -> i32; //~ERROR found 0, expected 1
+}
+
+fn main() { }
// except according to those terms.
fn main() {
- assert(true); //~ERROR unresolved name `assert`. Did you mean the macro `assert!`?
+ assert(true);
+ //~^ ERROR unresolved name `assert`
+ //~| NOTE did you mean the macro `assert!`?
}
ref mut Self => (),
//~^ ERROR expected identifier, found keyword `Self`
Self!() => (),
- //~^ ERROR expected identifier, found keyword `Self`
- //~^^ ERROR macro undefined: 'Self!'
+ //~^ ERROR macro undefined: 'Self!'
Foo { x: Self } => (),
//~^ ERROR expected identifier, found keyword `Self`
Foo { Self } => (),
}
}
-use std::option::Option as Self;
-//~^ ERROR expected identifier, found keyword `Self`
+mod m1 {
+ extern crate core as Self;
+ //~^ ERROR expected identifier, found keyword `Self`
+}
-extern crate Self;
-//~^ ERROR expected identifier, found keyword `Self`
+mod m2 {
+ use std::option::Option as Self;
+ //~^ ERROR expected identifier, found keyword `Self`
+}
-trait Self {}
-//~^ ERROR expected identifier, found keyword `Self`
+mod m3 {
+ trait Self {}
+ //~^ ERROR expected identifier, found keyword `Self`
+}
foo : 5,
bar : 42,
//~^ ERROR struct `A` has no field named `bar`
- //~| NOTE did you mean `barr`?
+ //~| NOTE field does not exist - did you mean `barr`?
car : 9,
};
}
foo : 5,
bar : 42,
//~^ ERROR struct `A` has no field named `bar`
- //~| NOTE did you mean `car`?
+ //~| NOTE field does not exist - did you mean `car`?
};
}
fn main() {
let foo = BuildData {
foo: 0,
- bar: 0 //~ ERROR struct `BuildData` has no field named `bar`
+ bar: 0
+ //~^ ERROR struct `BuildData` has no field named `bar`
+ //~| NOTE `BuildData` does not have this field
};
}
let k = B {
aa: 20,
//~^ ERROR struct `xc::B` has no field named `aa`
- //~| NOTE did you mean `a`?
+ //~| NOTE field does not exist - did you mean `a`?
bb: 20,
//~^ ERROR struct `xc::B` has no field named `bb`
- //~| NOTE did you mean `a`?
+ //~| NOTE field does not exist - did you mean `a`?
};
// local crate struct
let l = A {
aa: 20,
//~^ ERROR struct `A` has no field named `aa`
- //~| NOTE did you mean `a`?
+ //~| NOTE field does not exist - did you mean `a`?
bb: 20,
//~^ ERROR struct `A` has no field named `bb`
- //~| NOTE did you mean `b`?
+ //~| NOTE field does not exist - did you mean `b`?
};
}
fn main() {
if foo { //~ NOTE: unclosed delimiter
//~^ ERROR: unresolved name `foo`
+ //~| NOTE unresolved name
) //~ ERROR: incorrect close delimiter: `)`
}
callback: F)
-> io::Result<bool> {
if !is_directory(path.as_ref()) { //~ ERROR: unresolved name `is_directory`
+ //~| NOTE unresolved name
callback(path.as_ref(); //~ NOTE: unclosed delimiter
//~^ ERROR: expected one of
fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types
//~^^^ ERROR: unresolved name `bar`
//~^^^^ ERROR: unresolved name `foo`
//~^^^^^ ERROR: expected one of `)`, `,`, `.`, `<`, `?`
+ //~| NOTE unresolved name
+ //~| NOTE unresolved name
} //~ ERROR: incorrect close delimiter: `}`
//~^ ERROR: incorrect close delimiter: `}`
//~^^ ERROR: expected expression, found `)`
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-fn main() {
- let foo = 1;
-
- // `foo` shouldn't be suggested, it is too dissimilar from `bar`.
- println!("Hello {}", bar);
- //~^ ERROR: unresolved name `bar`
-
- // But this is close enough.
- println!("Hello {}", fob);
- //~^ ERROR: unresolved name `fob`. Did you mean `foo`?
-}
let u = U { a: 0, b: 1 }; //~ ERROR union expressions should have exactly one field
let u = U { a: 0, b: 1, c: 2 }; //~ ERROR union expressions should have exactly one field
//~^ ERROR union `U` has no field named `c`
+ //~| NOTE `U` does not have this field
let u = U { ..u }; //~ ERROR union expressions should have exactly one field
//~^ ERROR functional record update syntax requires a struct
let U { a, b } = u; //~ ERROR union patterns should have exactly one field
let U { a, b, c } = u; //~ ERROR union patterns should have exactly one field
//~^ ERROR union `U` does not have a field named `c`
+ //~| NOTE union `U` does not have field `c`
let U { .. } = u; //~ ERROR union patterns should have exactly one field
//~^ ERROR `..` cannot be used in union patterns
let U { a, .. } = u; //~ ERROR `..` cannot be used in union patterns
fn main() {
let u = U { principle: 0 };
//~^ ERROR union `U` has no field named `principle`
- //~| NOTE did you mean `principal`?
+ //~| NOTE field does not exist - did you mean `principal`?
let w = u.principial; //~ ERROR attempted access of field `principial` on type `U`
//~^ HELP did you mean `principal`?
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+macro_rules! macro_rules { () => {} } //~ ERROR user-defined macros may not be named `macro_rules`
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This test case makes sure that the compiler does not try to re-use anything
+// from the incremental compilation cache if the cache was produced by a
+// different compiler version. This is tested by artificially forcing the
+// emission of a different compiler version in the header of rpass1 artifacts,
+// and then making sure that the only object file of the test program gets
+// re-translated although the program stays unchanged.
+
+// The `l33t haxx0r` Rust compiler is known to produce incr. comp. artifacts
+// that are outrageously incompatible with just about anything, even itself:
+//[rpass1] rustc-env:RUSTC_FORCE_INCR_COMP_ARTIFACT_HEADER="l33t haxx0r rustc 2.1 LTS"
+
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+#![rustc_partition_translated(module="cache_file_headers", cfg="rpass2")]
+
+fn main() {
+ // empty
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type="rlib"]
+
+pub static A : u32 = 32;
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type="rlib"]
+
+pub static B: u32 = 32;
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:a.rs
+// aux-build:b.rs
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+
+#[cfg(rpass1)]
+extern crate a;
+#[cfg(rpass1)]
+extern crate b;
+
+#[cfg(rpass2)]
+extern crate b;
+#[cfg(rpass2)]
+extern crate a;
+
+use a::A;
+use b::B;
+
+//? #[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
+pub fn main() {
+ A + B;
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+
+// This test case tests the incremental compilation hash (ICH) implementation
+// for struct definitions.
+
+// The general pattern followed here is: Change one thing between rev1 and rev2
+// and make sure that the hash has changed, then change nothing between rev2 and
+// rev3 and make sure that the hash has not changed.
+
+// We also test the ICH for struct definitions exported in metadata. Same as
+// above, we want to make sure that the change between rev1 and rev2 also
+// results in a change of the ICH for the struct's metadata, and that it stays
+// the same between rev2 and rev3.
+
+// must-compile-successfully
+// revisions: cfail1 cfail2 cfail3
+// compile-flags: -Z query-dep-graph
+
+
+#![allow(warnings)]
+#![feature(rustc_attrs)]
+#![crate_type="rlib"]
+
+// Layout ----------------------------------------------------------------------
+#[cfg(cfail1)]
+pub struct LayoutPacked;
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+#[repr(packed)]
+pub struct LayoutPacked;
+
+#[cfg(cfail1)]
+struct LayoutC;
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+#[repr(C)]
+struct LayoutC;
+
+
+// Tuple Struct Change Field Type ----------------------------------------------
+
+#[cfg(cfail1)]
+struct TupleStructFieldType(i32);
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct TupleStructFieldType(u32);
+
+
+// Tuple Struct Add Field ------------------------------------------------------
+
+#[cfg(cfail1)]
+struct TupleStructAddField(i32);
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct TupleStructAddField(i32, u32);
+
+
+// Tuple Struct Field Visibility -----------------------------------------------
+
+#[cfg(cfail1)]
+struct TupleStructFieldVisibility(char);
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct TupleStructFieldVisibility(pub char);
+
+
+// Record Struct Field Type ----------------------------------------------------
+
+#[cfg(cfail1)]
+struct RecordStructFieldType { x: f32 }
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct RecordStructFieldType { x: u64 }
+
+
+// Record Struct Field Name ----------------------------------------------------
+
+#[cfg(cfail1)]
+struct RecordStructFieldName { x: f32 }
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct RecordStructFieldName { y: f32 }
+
+
+// Record Struct Add Field -----------------------------------------------------
+
+#[cfg(cfail1)]
+struct RecordStructAddField { x: f32 }
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct RecordStructAddField { x: f32, y: () }
+
+
+// Record Struct Field Visibility ----------------------------------------------
+
+#[cfg(cfail1)]
+struct RecordStructFieldVisibility { x: f32 }
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct RecordStructFieldVisibility { pub x: f32 }
+
+
+// Add Lifetime Parameter ------------------------------------------------------
+
+#[cfg(cfail1)]
+struct AddLifetimeParameter<'a>(&'a f32, &'a f64);
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct AddLifetimeParameter<'a, 'b>(&'a f32, &'b f64);
+
+
+// Add Lifetime Parameter Bound ------------------------------------------------
+
+#[cfg(cfail1)]
+struct AddLifetimeParameterBound<'a, 'b>(&'a f32, &'b f64);
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct AddLifetimeParameterBound<'a, 'b: 'a>(&'a f32, &'b f64);
+
+#[cfg(cfail1)]
+struct AddLifetimeParameterBoundWhereClause<'a, 'b>(&'a f32, &'b f64);
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct AddLifetimeParameterBoundWhereClause<'a, 'b>(&'a f32, &'b f64)
+ where 'b: 'a;
+
+
+// Add Type Parameter ----------------------------------------------------------
+
+#[cfg(cfail1)]
+struct AddTypeParameter<T1>(T1, T1);
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct AddTypeParameter<T1, T2>(T1, T2);
+
+
+// Add Type Parameter Bound ----------------------------------------------------
+
+#[cfg(cfail1)]
+struct AddTypeParameterBound<T>(T);
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct AddTypeParameterBound<T: Send>(T);
+
+
+#[cfg(cfail1)]
+struct AddTypeParameterBoundWhereClause<T>(T);
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_dirty(cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail3")]
+struct AddTypeParameterBoundWhereClause<T>(T) where T: Sync;
+
+
+// Empty struct ----------------------------------------------------------------
+
+#[rustc_clean(label="Hir", cfg="cfail2")]
+#[rustc_metadata_clean(cfg="cfail2")]
+pub struct EmptyStruct;
+
+
+// Visibility ------------------------------------------------------------------
+
+#[cfg(cfail1)]
+struct Visibility;
+
+#[cfg(not(cfail1))]
+#[rustc_dirty(label="Hir", cfg="cfail2")]
+#[rustc_clean(label="Hir", cfg="cfail3")]
+#[rustc_metadata_clean(cfg="cfail3")]
+pub struct Visibility;
// END RUST SOURCE
// START rustc.node13.Deaggregator.before.mir
// bb0: {
-// var0 = arg0; // scope 0 at main.rs:8:8: 8:9
-// tmp0 = var0; // scope 1 at main.rs:9:14: 9:15
-// return = Baz { x: tmp0, y: const F32(0), z: const false }; // scope ...
-// goto -> bb1; // scope 1 at main.rs:8:1: 10:2
+// _2 = _1;
+// _3 = _2;
+// _0 = Baz { x: _3, y: const F32(0), z: const false };
+// goto -> bb1;
// }
// END rustc.node13.Deaggregator.before.mir
// START rustc.node13.Deaggregator.after.mir
// bb0: {
-// var0 = arg0; // scope 0 at main.rs:8:8: 8:9
-// tmp0 = var0; // scope 1 at main.rs:9:14: 9:15
-// (return.0: usize) = tmp0; // scope 1 at main.rs:9:5: 9:34
-// (return.1: f32) = const F32(0); // scope 1 at main.rs:9:5: 9:34
-// (return.2: bool) = const false; // scope 1 at main.rs:9:5: 9:34
-// goto -> bb1; // scope 1 at main.rs:8:1: 10:2
+// _2 = _1;
+// _3 = _2;
+// (_0.0: usize) = _3;
+// (_0.1: f32) = const F32(0);
+// (_0.2: bool) = const false;
+// goto -> bb1;
// }
-// END rustc.node13.Deaggregator.after.mir
\ No newline at end of file
+// END rustc.node13.Deaggregator.after.mir
// END RUST SOURCE
// START rustc.node10.Deaggregator.before.mir
// bb0: {
-// var0 = arg0; // scope 0 at main.rs:7:8: 7:9
-// tmp0 = var0; // scope 1 at main.rs:8:19: 8:20
-// return = Baz::Foo { x: tmp0 }; // scope 1 at main.rs:8:5: 8:21
-// goto -> bb1; // scope 1 at main.rs:7:1: 9:2
+// _2 = _1;
+// _3 = _2;
+// _0 = Baz::Foo { x: _3 };
+// goto -> bb1;
// }
// END rustc.node10.Deaggregator.before.mir
// START rustc.node10.Deaggregator.after.mir
// bb0: {
-// var0 = arg0; // scope 0 at main.rs:7:8: 7:9
-// tmp0 = var0; // scope 1 at main.rs:8:19: 8:20
-// ((return as Foo).0: usize) = tmp0; // scope 1 at main.rs:8:5: 8:21
-// discriminant(return) = 1; // scope 1 at main.rs:8:5: 8:21
-// goto -> bb1; // scope 1 at main.rs:7:1: 9:2
+// _2 = _1;
+// _3 = _2;
+// ((_0 as Foo).0: usize) = _3;
+// discriminant(_0) = 1;
+// goto -> bb1;
// }
-// END rustc.node10.Deaggregator.after.mir
\ No newline at end of file
+// END rustc.node10.Deaggregator.after.mir
// END RUST SOURCE
// START rustc.node4.SimplifyBranches.initial-before.mir
// bb0: {
-// if(const false) -> [true: bb1, false: bb2]; // scope 0 at simplify_if.rs:12:5: 14:6
+// if(const false) -> [true: bb1, false: bb2];
// }
// END rustc.node4.SimplifyBranches.initial-before.mir
// START rustc.node4.SimplifyBranches.initial-after.mir
// bb0: {
-// goto -> bb2; // scope 0 at simplify_if.rs:12:5: 14:6
+// goto -> bb2;
// }
-// END rustc.node4.SimplifyBranches.initial-after.mir
\ No newline at end of file
+// END rustc.node4.SimplifyBranches.initial-after.mir
}
// END RUST SOURCE
-// START rustc.node4.PreTrans.after.mir
+// START rustc.node4.TypeckMir.before.mir
// bb0: {
-// StorageLive(var0); // scope 0 at storage_ranges.rs:12:9: 12:10
-// var0 = const 0i32; // scope 0 at storage_ranges.rs:12:13: 12:14
-// StorageLive(var1); // scope 1 at storage_ranges.rs:14:13: 14:14
-// StorageLive(tmp1); // scope 1 at storage_ranges.rs:14:18: 14:25
-// StorageLive(tmp2); // scope 1 at storage_ranges.rs:14:23: 14:24
-// tmp2 = var0; // scope 1 at storage_ranges.rs:14:23: 14:24
-// tmp1 = std::option::Option<i32>::Some(tmp2,); // scope 1 at storage_ranges.rs:14:18: 14:25
-// var1 = &tmp1; // scope 1 at storage_ranges.rs:14:17: 14:25
-// StorageDead(tmp2); // scope 1 at storage_ranges.rs:14:23: 14:24
-// tmp0 = (); // scope 2 at storage_ranges.rs:13:5: 15:6
-// StorageDead(tmp1); // scope 1 at storage_ranges.rs:14:18: 14:25
-// StorageDead(var1); // scope 1 at storage_ranges.rs:14:13: 14:14
-// StorageLive(var2); // scope 1 at storage_ranges.rs:16:9: 16:10
-// var2 = const 1i32; // scope 1 at storage_ranges.rs:16:13: 16:14
-// return = (); // scope 3 at storage_ranges.rs:11:11: 17:2
-// StorageDead(var2); // scope 1 at storage_ranges.rs:16:9: 16:10
-// StorageDead(var0); // scope 0 at storage_ranges.rs:12:9: 12:10
-// goto -> bb1; // scope 0 at storage_ranges.rs:11:1: 17:2
+// StorageLive(_1);
+// _1 = const 0i32;
+// StorageLive(_3);
+// StorageLive(_4);
+// StorageLive(_5);
+// _5 = _1;
+// _4 = std::option::Option<i32>::Some(_5,);
+// _3 = &_4;
+// StorageDead(_5);
+// _2 = ();
+// StorageDead(_4);
+// StorageDead(_3);
+// StorageLive(_6);
+// _6 = const 1i32;
+// _0 = ();
+// StorageDead(_6);
+// StorageDead(_1);
+// goto -> bb1;
// }
//
// bb1: {
-// return; // scope 0 at storage_ranges.rs:11:1: 17:2
+// return;
// }
-// END rustc.node4.PreTrans.after.mir
+// END rustc.node4.TypeckMir.before.mir
// compile-flags: -Z parse-only
extern {
- f(); //~ ERROR expected one of `fn`, `pub`, `static`, or `}`, found `f`
+ f(); //~ ERROR expected one of `!` or `::`, found `(`
}
fn main() {
// compile-flags: -Z parse-only
trait MyTrait<T>: Iterator {
- Item = T; //~ ERROR expected one of `const`, `extern`, `fn`, `type`, or `unsafe`, found `Item`
+ Item = T; //~ ERROR expected one of `!` or `::`, found `=`
+ //~| ERROR expected item, found `=`
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// error-pattern:thread 'main' panicked at 'attempt to multiply with overflow'
+// compile-flags: -C debug-assertions
+
+fn main() {
+ let _x = 2i32.pow(1024);
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// error-pattern:thread 'main' panicked at 'attempt to multiply with overflow'
+// compile-flags: -C debug-assertions
+
+fn main() {
+ let _x = 2u32.pow(1024);
+}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// error-pattern:thread 'main' panicked at 'attempt to multiply with overflow'
-// compile-flags: -C debug-assertions
-
-fn main() {
- let _x = 2i32.pow(1024);
-}
--- /dev/null
+-include ../tools.mk
+RUSTC_FLAGS = -C link-arg="-lfoo" -C link-arg="-lbar" -Z print-link-args
+
+all:
+ $(RUSTC) $(RUSTC_FLAGS) empty.rs | grep lfoo | grep lbar
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() { }
format!("_ _ --sysroot {} --crate-type dylib", path.to_str().unwrap())
.split(' ').map(|s| s.to_string()).collect();
- let (result, _) = rustc_driver::run_compiler_with_file_loader(
- &args, &mut JitCalls, box JitLoader);
+ let (result, _) = rustc_driver::run_compiler(
+ &args, &mut JitCalls, Some(box JitLoader), None);
if let Err(n) = result {
panic!("Error {}", n);
}
--- /dev/null
+-include ../tools.mk
+
+all:
+ $(RUSTC) foo.rs
+ $(RUSTC) bar.rs --emit dep-info
+ grep "rustc-macro source" $(TMPDIR)/bar.d && exit 1 || exit 0
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rustc_macro)]
+
+#[macro_use]
+extern crate foo;
+
+#[derive(A)]
+struct A;
+
+fn main() {
+ let _b = B;
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "rustc-macro"]
+#![feature(rustc_macro)]
+#![feature(rustc_macro_lib)]
+
+extern crate rustc_macro;
+
+use rustc_macro::TokenStream;
+
+#[rustc_macro_derive(A)]
+pub fn derive(input: TokenStream) -> TokenStream {
+ let input = input.to_string();
+ assert!(input.contains("struct A;"));
+ "struct B;".parse().unwrap()
+}
-include ../tools.mk
-# Test that #[inline(always)] functions still get inlined across compilation
-# unit boundaries. Compilation should produce three IR files, with each one
-# containing a definition of the inlined function. Also, the non-#[inline]
-# function should be defined in only one compilation unit.
+# Test that #[inline] functions still get inlined across compilation unit
+# boundaries. Compilation should produce three IR files, but only the two
+# compilation units that have a usage of the #[inline] function should
+# contain a definition. Also, the non-#[inline] function should be defined
+# in only one compilation unit.
all:
$(RUSTC) foo.rs --emit=llvm-ir -C codegen-units=3
- [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ i32\ .*inlined)" -eq "1" ]
- [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ available_externally\ i32\ .*inlined)" -eq "2" ]
+ [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ i32\ .*inlined)" -eq "0" ]
+ [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ internal\ i32\ .*inlined)" -eq "2" ]
[ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ i32\ .*normal)" -eq "1" ]
[ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c declare\ i32\ .*normal)" -eq "2" ]
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(plugin, plugin_registrar, rustc_private)]
+
+extern crate proc_macro;
+extern crate rustc_plugin;
+extern crate syntax;
+
+use proc_macro::prelude::*;
+use rustc_plugin::Registry;
+use syntax::ext::base::SyntaxExtension;
+use syntax::ext::proc_macro_shim::prelude::*;
+
+#[plugin_registrar]
+pub fn plugin_registrar(reg: &mut Registry) {
+ reg.register_syntax_extension(token::intern("attr_tru"),
+ SyntaxExtension::AttrProcMacro(Box::new(attr_tru)));
+ reg.register_syntax_extension(token::intern("attr_identity"),
+ SyntaxExtension::AttrProcMacro(Box::new(attr_identity)));
+ reg.register_syntax_extension(token::intern("tru"),
+ SyntaxExtension::ProcMacro(Box::new(tru)));
+ reg.register_syntax_extension(token::intern("ret_tru"),
+ SyntaxExtension::ProcMacro(Box::new(ret_tru)));
+ reg.register_syntax_extension(token::intern("identity"),
+ SyntaxExtension::ProcMacro(Box::new(identity)));
+}
+
+fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream {
+ lex("fn f1() -> bool { true }")
+}
+
+fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
+ let source = item.to_string();
+ lex(&source)
+}
+
+fn tru(_ts: TokenStream) -> TokenStream {
+ lex("true")
+}
+
+fn ret_tru(_ts: TokenStream) -> TokenStream {
+ lex("return true;")
+}
+
+fn identity(ts: TokenStream) -> TokenStream {
+ let source = ts.to_string();
+ lex(&source)
+}
let mut tc = TestCalls { count: 1 };
// we should never get use this filename, but lets make sure they are valid args.
let args = vec!["compiler-calls".to_string(), "foo.rs".to_string()];
- rustc_driver::run_compiler(&args, &mut tc);
+ rustc_driver::run_compiler(&args, &mut tc, None, None);
assert_eq!(tc.count, 30);
}
#![feature(rustc_private)]
-extern crate rbml;
extern crate serialize;
use std::io::Cursor;
use serialize::{Encodable, Encoder};
use serialize::json;
-
-use rbml::writer;
+use serialize::opaque;
#[derive(Encodable)]
struct Foo {
enum WireProtocol {
JSON,
- RBML,
+ Opaque,
// ...
}
fn encode_json<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
write!(wr, "{}", json::as_json(val));
}
-fn encode_rbml<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
- let mut encoder = writer::Encoder::new(wr);
+fn encode_opaque<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
+ let mut encoder = opaque::Encoder::new(wr);
val.encode(&mut encoder);
}
let proto = WireProtocol::JSON;
match proto {
WireProtocol::JSON => encode_json(&target, &mut wr),
- WireProtocol::RBML => encode_rbml(&target, &mut wr)
+ WireProtocol::Opaque => encode_opaque(&target, &mut wr)
}
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:proc_macro_def.rs
+// ignore-stage1
+// ignore-cross-compile
+
+#![feature(plugin, custom_attribute)]
+#![feature(type_macros)]
+
+#![plugin(proc_macro_def)]
+
+#[attr_tru]
+fn f1() -> bool {
+ return false;
+}
+
+#[attr_identity]
+fn f2() -> bool {
+ return identity!(true);
+}
+
+fn f3() -> identity!(bool) {
+ ret_tru!();
+}
+
+fn f4(x: bool) -> bool {
+ match x {
+ identity!(true) => false,
+ identity!(false) => true,
+ }
+}
+
+fn main() {
+ assert!(f1());
+ assert!(f2());
+ assert!(tru!());
+ assert!(f3());
+ assert!(identity!(5 == 5));
+ assert!(f4(false));
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:append-impl.rs
+
+#![feature(rustc_macro)]
+#![allow(warnings)]
+
+#[macro_use]
+extern crate append_impl;
+
+trait Append {
+ fn foo(&self);
+}
+
+#[derive(PartialEq,
+ Append,
+ Eq)]
+struct A {
+ inner: u32,
+}
+
+fn main() {
+ A { inner: 3 }.foo();
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// force-host
+// no-prefer-dynamic
+
+#![feature(rustc_macro)]
+#![feature(rustc_macro_lib)]
+#![crate_type = "rustc-macro"]
+
+extern crate rustc_macro;
+
+use rustc_macro::TokenStream;
+
+#[rustc_macro_derive(Append)]
+pub fn derive_a(input: TokenStream) -> TokenStream {
+ let mut input = input.to_string();
+ input.push_str("
+ impl Append for A {
+ fn foo(&self) {}
+ }
+ ");
+ input.parse().unwrap()
+}
pub fn derive(input: TokenStream) -> TokenStream {
let input = input.to_string();
assert!(input.contains("struct A;"));
- assert!(input.contains("#[derive(Eq, Copy, Clone)]"));
- "#[derive(Eq, Copy, Clone)] struct A;".parse().unwrap()
+ assert!(input.contains("#[derive(Debug, PartialEq, Eq, Copy, Clone)]"));
+ "#[derive(Debug, PartialEq, Eq, Copy, Clone)] struct A;".parse().unwrap()
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[derive(PartialEq, Debug)]
+struct Point { x : isize }
+
+pub fn main() {
+ assert_ne!(666,14);
+ assert_ne!("666".to_string(),"abc".to_string());
+ assert_ne!(Box::new(Point{x:666}),Box::new(Point{x:34}));
+ assert_ne!(&Point{x:666},&Point{x:34});
+ assert_ne!(666, 42, "no gods no masters");
+ assert_ne!(666, 42, "6 {} 6", "6");
+ assert_ne!(666, 42, "{x}, {y}, {z}", x = 6, y = 6, z = 6);
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub fn main() {
+ assert_ne!([6, 6, 6][..], vec![1, 2, 3][..]);
+}
pub const C1: usize = 1;
pub const C2: atomic::AtomicUsize = atomic::AtomicUsize::new(0);
-pub const C3: fn() = foo;
+pub const C3: fn() = { fn foo() {} foo };
pub const C4: usize = C1 * C1 + C1 / C1;
pub const C5: &'static usize = &C4;
pub static S1: usize = 3;
pub static S2: atomic::AtomicUsize = atomic::AtomicUsize::new(0);
-fn foo() {}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// ignore-emscripten
-
-// Test that `CString::new("hello").unwrap().as_ptr()` pattern
-// leads to failure.
-
-use std::env;
-use std::ffi::{CString, CStr};
-use std::os::raw::c_char;
-use std::process::{Command, Stdio};
-
-fn main() {
- let args: Vec<String> = env::args().collect();
- if args.len() > 1 && args[1] == "child" {
- // Repeat several times to be more confident that
- // it is `Drop` for `CString` that does the cleanup,
- // and not just some lucky UB.
- let xs = vec![CString::new("Hello").unwrap(); 10];
- let ys = xs.iter().map(|s| s.as_ptr()).collect::<Vec<_>>();
- drop(xs);
- assert!(ys.into_iter().any(is_hello));
- return;
- }
-
- let output = Command::new(&args[0]).arg("child").output().unwrap();
- assert!(!output.status.success());
-}
-
-fn is_hello(s: *const c_char) -> bool {
- // `s` is a dangling pointer and reading it is technically
- // undefined behavior. But we want to prevent the most diabolical
- // kind of UB (apart from nasal demons): reading a value that was
- // previously written.
- //
- // Segfaulting or reading an empty string is Ok,
- // reading "Hello" is bad.
- let s = unsafe { CStr::from_ptr(s) };
- let hello = CString::new("Hello").unwrap();
- s == hello.as_ref()
-}
// ignore-windows
// ignore-android
// ignore-emscripten
+// ignore-haiku
#![feature(libc)]
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+
+pub trait Indexable<T>: std::ops::Index<usize, Output = T> {
+ fn index2(&self, i: usize) -> &T {
+ &self[i]
+ }
+}
+fn main() {}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Issue #34427: On ARM, the code in `foo` at one time was generating
+// a machine code instruction of the form: `str r0, [r0, rN]!` (for
+// some N), which is not legal because the source register and base
+// register cannot be identical in the preindexed form signalled by
+// the `!`.
+//
+// See LLVM bug: https://llvm.org/bugs/show_bug.cgi?id=28809
+
+#[inline(never)]
+fn foo(n: usize) -> Vec<Option<(*mut (), &'static ())>> {
+ (0..n).map(|_| None).collect()
+}
+
+fn main() {
+ let _ = (foo(10), foo(32));
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ remove_axis(&3, 0);
+}
+
+trait Dimension {
+ fn slice(&self) -> &[usize];
+}
+
+impl Dimension for () {
+ fn slice(&self) -> &[usize] { &[] }
+}
+
+impl Dimension for usize {
+ fn slice(&self) -> &[usize] {
+ unsafe {
+ ::std::slice::from_raw_parts(self, 1)
+ }
+ }
+}
+
+fn remove_axis(value: &usize, axis: usize) -> () {
+ let tup = ();
+ let mut it = tup.slice().iter();
+ for (i, _) in value.slice().iter().enumerate() {
+ if i == axis {
+ continue;
+ }
+ it.next();
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags:--test
+#![deny(private_in_public)]
+
+#[test] fn foo() {}
+mod foo {}
+
+#[test] fn core() {}
+extern crate core;
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+const C1: i32 = 0x12345678;
+const C2: isize = C1 as i16 as isize;
+
+enum E {
+ V = C2
+}
+
+fn main() {
+ assert_eq!(C2 as u64, E::V as u64);
+}
pub fn main() {
assert_eq!(mod_dir_simple::syrup::foo(), 10);
+
+ #[path = "auxiliary"]
+ mod foo {
+ mod two_macros;
+ }
+
+ #[path = "auxiliary"]
+ mod bar {
+ macro_rules! m { () => { mod two_macros; } }
+ m!();
+ }
}
// test the size hints and emptying
let mut long = 0...255u8;
- let mut short = 42...42;
+ let mut short = 42...42u8;
assert_eq!(long.size_hint(), (256, Some(256)));
assert_eq!(short.size_hint(), (1, Some(1)));
long.next();
fn main() {
let x: Tuple!(i32, i32) = (1, 2);
}
+
+fn issue_36540() {
+ let i32 = 0;
+ macro_rules! m { () => { i32 } }
+ struct S<T = m!()>(m!(), T) where T: Trait<m!()>;
+
+ let x: m!() = m!();
+ std::cell::Cell::<m!()>::new(m!());
+ impl<T = m!()> std::ops::Index<m!()> for Trait<(m!(), T)>
+ where T: Trait<m!()>
+ {
+ type Output = m!();
+ fn index(&self, i: m!()) -> &m!() {
+ unimplemented!()
+ }
+ }
+}
+
+trait Trait<T> {}
assert_eq!(other1::id_u32_iterator(), other2::id_u32_iterator());
assert!(other1::id_i32_iterator() != other1::id_u32_iterator());
assert!(TypeId::of::<other1::I32Iterator>() != TypeId::of::<other1::U32Iterator>());
+
+ // Check fn pointer against collisions
+ assert!(TypeId::of::<fn(fn(A) -> A) -> A>() !=
+ TypeId::of::<fn(fn() -> A, A) -> A>());
}
12 | format!();
| ^^^^^^^^^^
|
- = note: this error originates in a macro from the standard library
+ = note: this error originates in a macro outside of the current crate
error: expected token: `,`
--> $DIR/bad-format-args.rs:13:5
13 | format!("" 1);
| ^^^^^^^^^^^^^^
|
- = note: this error originates in a macro from the standard library
+ = note: this error originates in a macro outside of the current crate
error: expected token: `,`
--> $DIR/bad-format-args.rs:14:5
14 | format!("", 1 1);
| ^^^^^^^^^^^^^^^^^
|
- = note: this error originates in a macro from the standard library
+ = note: this error originates in a macro outside of the current crate
error: aborting due to 3 previous errors
12 | assert!("foo");
| ^^^^^^^^^^^^^^^
|
- = note: this error originates in a macro from the standard library
+ = note: this error originates in a macro outside of the current crate
error: aborting due to previous error
| ^^^^^^ cannot infer type for `_`
|
= note: type annotations or generic parameter binding required
- = note: this error originates in a macro from the standard library
+ = note: this error originates in a macro outside of the current crate
error: aborting due to previous error
--> $DIR/tab.rs:14:2
|
14 | \tbar;
- | \t^^^
+ | \t^^^ unresolved name
error: aborting due to previous error
16 | myprintln!("{}"); //~ ERROR in this macro
| ^^^^^^^^^^^^^^^^^
|
- = note: this error originates in a macro from the standard library
+ = note: this error originates in a macro outside of the current crate
error: aborting due to previous error
--> $DIR/macro-backtrace-nested.rs:15:12
|
15 | () => (fake)
- | ^^^^
+ | ^^^^ unresolved name
...
27 | 1 + call_nested_expr!();
| ------------------- in this macro invocation
--> $DIR/macro-backtrace-nested.rs:15:12
|
15 | () => (fake)
- | ^^^^
+ | ^^^^ unresolved name
...
28 | call_nested_expr_sum!();
| ------------------------ in this macro invocation
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[foo]
+mod foo {
+ #![foo]
+}
--- /dev/null
+error: The attribute `foo` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
+ --> $DIR/issue-36530.rs:11:1
+ |
+11 | #[foo]
+ | ^^^^^^
+ |
+ = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error: The attribute `foo` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
+ --> $DIR/issue-36530.rs:13:5
+ |
+13 | #![foo]
+ | ^^^^^^^
+ |
+ = help: add #![feature(custom_attribute)] to the crate attributes to enable
+
+error: aborting due to 2 previous errors
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let foo = 1;
+
+ // `foo` shouldn't be suggested, it is too dissimilar from `bar`.
+ println!("Hello {}", bar);
+
+ // But this is close enough.
+ println!("Hello {}", fob);
+}
--- /dev/null
+error[E0425]: unresolved name `bar`
+ --> $DIR/typo-suggestion.rs:15:26
+ |
+15 | println!("Hello {}", bar);
+ | ^^^ unresolved name
+
+error[E0425]: unresolved name `fob`
+ --> $DIR/typo-suggestion.rs:18:26
+ |
+18 | println!("Hello {}", fob);
+ | ^^^ did you mean `foo`?
+
+error: aborting due to 2 previous errors
+
// testing harness and used when generating compilation
// arguments. (In particular, it propagates to the aux-builds.)
pub incremental_dir: Option<PathBuf>,
+ // Specifies that a cfail test must actually compile without errors.
+ pub must_compile_successfully: bool,
}
impl TestProps {
pub fn new() -> Self {
- let error_patterns = Vec::new();
- let aux_builds = Vec::new();
- let exec_env = Vec::new();
- let run_flags = None;
- let pp_exact = None;
- let check_lines = Vec::new();
- let build_aux_docs = false;
- let force_host = false;
- let check_stdout = false;
- let no_prefer_dynamic = false;
- let pretty_expanded = false;
- let pretty_compare_only = false;
- let forbid_output = Vec::new();
TestProps {
- error_patterns: error_patterns,
+ error_patterns: vec![],
compile_flags: vec![],
- run_flags: run_flags,
- pp_exact: pp_exact,
- aux_builds: aux_builds,
+ run_flags: None,
+ pp_exact: None,
+ aux_builds: vec![],
revisions: vec![],
rustc_env: vec![],
- exec_env: exec_env,
- check_lines: check_lines,
- build_aux_docs: build_aux_docs,
- force_host: force_host,
- check_stdout: check_stdout,
- no_prefer_dynamic: no_prefer_dynamic,
- pretty_expanded: pretty_expanded,
+ exec_env: vec![],
+ check_lines: vec![],
+ build_aux_docs: false,
+ force_host: false,
+ check_stdout: false,
+ no_prefer_dynamic: false,
+ pretty_expanded: false,
pretty_mode: format!("normal"),
- pretty_compare_only: pretty_compare_only,
- forbid_output: forbid_output,
+ pretty_compare_only: false,
+ forbid_output: vec![],
incremental_dir: None,
+ must_compile_successfully: false,
}
}
if let Some(of) = parse_forbid_output(ln) {
self.forbid_output.push(of);
}
+
+ if !self.must_compile_successfully {
+ self.must_compile_successfully = parse_must_compile_successfully(ln);
+ }
});
for key in vec!["RUST_TEST_NOCAPTURE", "RUST_TEST_THREADS"] {
parse_name_directive(line, "pretty-compare-only")
}
+fn parse_must_compile_successfully(line: &str) -> bool {
+ parse_name_directive(line, "must-compile-successfully")
+}
+
fn parse_env(line: &str, name: &str) -> Option<(String, String)> {
parse_name_value_directive(line, name).map(|nv| {
// nv is either FOO or FOO=BAR
},
color: test::AutoColor,
test_threads: None,
+ skip: vec![],
}
}
fn run_cfail_test(&self) {
let proc_res = self.compile_test();
- if proc_res.status.success() {
- self.fatal_proc_rec(
- &format!("{} test compiled successfully!", self.config.mode)[..],
- &proc_res);
- }
+ if self.props.must_compile_successfully {
+ if !proc_res.status.success() {
+ self.fatal_proc_rec(
+ "test compilation failed although it shouldn't!",
+ &proc_res);
+ }
+ } else {
+ if proc_res.status.success() {
+ self.fatal_proc_rec(
+ &format!("{} test compiled successfully!", self.config.mode)[..],
+ &proc_res);
+ }
- self.check_correct_failure_status(&proc_res);
+ self.check_correct_failure_status(&proc_res);
+ }
let output_to_check = self.get_output(&proc_res);
let expected_errors = errors::load_errors(&self.testpaths.file, self.revision);
} else {
self.check_error_patterns(&output_to_check, &proc_res);
}
+
self.check_no_compiler_crash(&proc_res);
self.check_forbid_output(&output_to_check, &proc_res);
}
output_to_check: &str,
proc_res: &ProcRes) {
if self.props.error_patterns.is_empty() {
- self.fatal(&format!("no error pattern specified in {:?}",
- self.testpaths.file.display()));
+ if self.props.must_compile_successfully {
+ return
+ } else {
+ self.fatal(&format!("no error pattern specified in {:?}",
+ self.testpaths.file.display()));
+ }
}
let mut next_err_idx = 0;
let mut next_err_pat = self.props.error_patterns[next_err_idx].trim();
fn check_no_compiler_crash(&self, proc_res: &ProcRes) {
for line in proc_res.stderr.lines() {
- if line.starts_with("error: internal compiler error:") {
+ if line.contains("error: internal compiler error") {
self.fatal_proc_rec("compiler encountered internal error", proc_res);
}
}
("darwin", "macos"),
("dragonfly", "dragonfly"),
("freebsd", "freebsd"),
+ ("haiku", "haiku"),
("ios", "ios"),
("linux", "linux"),
("mingw32", "windows"),
#[cfg(unix)]
pub fn check(path: &Path, bad: &mut bool) {
use std::fs;
+ use std::io::Read;
use std::process::{Command, Stdio};
use std::os::unix::prelude::*;
+ if let Ok(mut file) = fs::File::open("/proc/version") {
+ let mut contents = String::new();
+ file.read_to_string(&mut contents).unwrap();
+ // Probably on Windows Linux Subsystem, all files will be marked as
+ // executable, so skip checking.
+ if contents.contains("Microsoft") {
+ return;
+ }
+ }
+
super::walk(path,
&mut |path| super::filter_dirs(path) || path.ends_with("src/etc"),
&mut |file| {