-Subproject commit 5f3b9c4c6a7be1f177d6024cb83d150b6479148a
+Subproject commit c995e9eb5acf3976ae8674a0dc6d9e958053d9fd
dependencies = [
"build_helper 0.1.0",
"core 0.0.0",
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.0.0",
]
name = "arena"
version = "0.0.0"
+[[package]]
+name = "atty"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "bitflags"
version = "0.5.0"
[[package]]
name = "bitflags"
-version = "0.7.0"
+version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
"build_helper 0.1.0",
"cmake 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
name = "build-manifest"
version = "0.1.0"
dependencies = [
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "clap"
-version = "2.20.5"
+version = "2.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bitflags 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
dependencies = [
"build_helper 0.1.0",
"core 0.0.0",
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "0.0.0"
dependencies = [
"build_helper 0.1.0",
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
[[package]]
name = "gcc"
-version = "0.3.43"
+version = "0.3.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
"pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
version = "0.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "clap 2.20.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "clap 2.21.1 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"handlebars 0.25.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "rls-data"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rls-span 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rls-span"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "rustbook"
version = "0.1.0"
dependencies = [
- "clap 2.20.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "clap 2.21.1 (registry+https://github.com/rust-lang/crates.io-index)",
"mdbook 0.0.18 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-serialize"
-version = "0.3.22"
+version = "0.3.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
version = "0.0.0"
dependencies = [
"build_helper 0.1.0",
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_bitflags 0.0.0",
]
version = "0.0.0"
dependencies = [
"log 0.0.0",
+ "rls-data 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-span 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
- "serialize 0.0.0",
+ "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_pos 0.0.0",
]
dependencies = [
"arena 0.0.0",
"build_helper 0.1.0",
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.0.0",
"rustc 0.0.0",
"rustc_back 0.0.0",
"collections 0.0.0",
"compiler_builtins 0.0.0",
"core 0.0.0",
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.0.0",
"panic_abort 0.0.0",
"panic_unwind 0.0.0",
version = "0.1.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
[[package]]
name = "vec_map"
-version = "0.6.0"
+version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
[metadata]
"checksum aho-corasick 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0638fd549427caa90c499814196d1b9e3725eb4d15d7339d6de073a680ed0ca2"
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
+"checksum atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d912da0db7fa85514874458ca3651fe2cddace8d0b0505571dbdcd41ab490159"
"checksum bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4f67931368edf3a9a51d29886d245f1c3db2f1ef0dcc9e35ff70341b78c10d23"
-"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
-"checksum clap 2.20.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7db281b0520e97fbd15cd615dcd8f8bcad0c26f5f7d5effe705f090f39e9a758"
+"checksum bitflags 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "826e1ab483fc81a8143faa7203c4a3c02888ebd1a782e37e41fa34753ba9a162"
+"checksum clap 2.21.1 (registry+https://github.com/rust-lang/crates.io-index)" = "74a80f603221c9cd9aa27a28f52af452850051598537bb6b359c38a7d61e5cda"
"checksum cmake 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "e1acc68a3f714627af38f9f5d09706a28584ba60dfe2cca68f40bf779f941b25"
"checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
"checksum env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e3856f1697098606fc6cb97a93de88ca3f3bc35bb878c725920e6e82ecf05e83"
"checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922"
-"checksum gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)" = "c07c758b972368e703a562686adb39125707cc1ef3399da8c019fc6c2498a75d"
+"checksum gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)" = "a32cd40070d7611ab76343dcb3204b2bb28c8a9450989a83a3d590248142f439"
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
"checksum handlebars 0.25.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b2249f6f0dc5a3bb2b3b1a8f797dfccbc4b053344d773d654ad565e51427d335"
"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
"checksum quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0aad603e8d7fb67da22dbdf1f4b826ce8829e406124109e73cf1b2454b93a71c"
"checksum regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4278c17d0f6d62dfef0ab00028feb45bd7d2102843f80763474eeb1be8a10c01"
"checksum regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9191b1f57603095f105d317e375d19b1c9c5c3185ea9633a99a6dcbed04457"
-"checksum rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "237546c689f20bb44980270c73c3b9edd0891c1be49cc1274406134a66d3957b"
+"checksum rls-data 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "af1dfff00189fd7b78edb9af131b0de703676c04fa8126aed77fd2c586775a4d"
+"checksum rls-span 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8656f7b850ac85fb204ef94318c641bbb15a32766e12f9a589a23e4c0fbc38db"
+"checksum rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)" = "684ce48436d6465300c9ea783b6b14c4361d6b8dcbb1375b486a69cc19e2dfb0"
"checksum serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)" = "a702319c807c016e51f672e5c77d6f0b46afddd744b5e437d6b8436b888b458f"
"checksum serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)" = "dbc45439552eb8fb86907a2c41c1fd0ef97458efb87ff7f878db466eb581824e"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
-"checksum vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cac5efe5cb0fa14ec2f84f83c701c562ee63f6dcc680861b21d65c682adfb05f"
+"checksum vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8cdc8b93bd0198ed872357fb2e667f7125646b1762f16d60b2c96350d361897"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
cmd.arg("-Cprefer-dynamic");
}
+ // Pass the `rustbuild` feature flag to crates which rustbuild is
+ // building. See the comment in bootstrap/lib.rs where this env var is
+ // set for more details.
+ if env::var_os("RUSTBUILD_UNSTABLE").is_some() {
+ cmd.arg("--cfg").arg("rustbuild");
+ }
+
// Help the libc crate compile by assisting it in finding the MUSL
// native libraries.
if let Some(s) = env::var_os("MUSL_ROOT") {
use Build;
pub fn clean(build: &Build) {
- rm_rf(build, "tmp".as_ref());
- rm_rf(build, &build.out.join("tmp"));
- rm_rf(build, &build.out.join("dist"));
+ rm_rf("tmp".as_ref());
+ rm_rf(&build.out.join("tmp"));
+ rm_rf(&build.out.join("dist"));
for host in build.config.host.iter() {
let entries = match build.out.join(host).read_dir() {
continue
}
let path = t!(entry.path().canonicalize());
- rm_rf(build, &path);
+ rm_rf(&path);
}
}
}
-fn rm_rf(build: &Build, path: &Path) {
+fn rm_rf(path: &Path) {
if !path.exists() {
return
}
let file = t!(file).path();
if file.is_dir() {
- rm_rf(build, &file);
+ rm_rf(&file);
} else {
// On windows we can't remove a readonly file, and git will
// often clone files as readonly. As a result, we have some
///
/// These entries currently correspond to the various output directories of the
/// build system, with each mod generating output in a different directory.
-#[derive(Clone, Copy)]
+#[derive(Clone, Copy, PartialEq, Eq)]
pub enum Mode {
/// This cargo is going to build the standard library, placing output in the
/// "stageN-std" directory.
// For other crates, however, we know that we've already got a standard
// library up and running, so we can use the normal compiler to compile
// build scripts in that situation.
- if let Mode::Libstd = mode {
+ if mode == Mode::Libstd {
cargo.env("RUSTC_SNAPSHOT", &self.rustc)
.env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir());
} else {
.env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler));
}
+ // There are two invariants we try must maintain:
+ // * stable crates cannot depend on unstable crates (general Rust rule),
+ // * crates that end up in the sysroot must be unstable (rustbuild rule).
+ //
+ // In order to do enforce the latter, we pass the env var
+ // `RUSTBUILD_UNSTABLE` down the line for any crates which will end up
+ // in the sysroot. We read this in bootstrap/bin/rustc.rs and if it is
+ // set, then we pass the `rustbuild` feature to rustc when building the
+ // the crate.
+ //
+ // In turn, crates that can be used here should recognise the `rustbuild`
+ // feature and opt-in to `rustc_private`.
+ //
+ // We can't always pass `rustbuild` because crates which are outside of
+ // the comipiler, libs, and tests are stable and we don't want to make
+ // their deps unstable (since this would break the first invariant
+ // above).
+ if mode != Mode::Tool {
+ cargo.env("RUSTBUILD_UNSTABLE", "1");
+ }
+
// Ignore incremental modes except for stage0, since we're
// not guaranteeing correctness acros builds if the compiler
// is changing under your feet.`
--enable-extended \
--enable-sanitizers
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
+
+# This is the only builder which will create source tarballs
ENV DIST_SRC 1
+
+# When we build cargo in this container, we don't want it to use the system
+# libcurl, instead it should compile its own.
+ENV LIBCURL_NO_PKG_CONFIG 1
- [repr_simd](repr-simd.md)
- [rustc_attrs](rustc-attrs.md)
- [rustc_diagnostic_macros](rustc-diagnostic-macros.md)
+- [rvalue_static_promotion](rvalue-static-promotion.md)
- [sanitizer_runtime](sanitizer-runtime.md)
- [simd](simd.md)
- [simd_ffi](simd-ffi.md)
--- /dev/null
+# `rvalue_static_promotion`
+
+The tracking issue for this feature is: [#38865]
+
+[#38865]: https://github.com/rust-lang/rust/issues/38865
+
+------------------------
+
+The `rvalue_static_promotion` feature allows directly creating `'static` references to
+constant `rvalue`s, which in particular allowing for more concise code in the common case
+in which a `'static` reference is all that's needed.
+
+
+## Examples
+
+```rust
+#![feature(rvalue_static_promotion)]
+
+fn main() {
+ let DEFAULT_VALUE: &'static u32 = &42;
+ assert_eq!(*DEFAULT_VALUE, 42);
+}
+```
/// # Examples
///
/// ```
- /// #![feature(rc_raw)]
- ///
/// use std::sync::Arc;
///
/// let x = Arc::new(10);
/// let x_ptr = Arc::into_raw(x);
/// assert_eq!(unsafe { *x_ptr }, 10);
/// ```
- #[unstable(feature = "rc_raw", issue = "37197")]
- pub fn into_raw(this: Self) -> *mut T {
- let ptr = unsafe { &mut (**this.ptr).data as *mut _ };
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub fn into_raw(this: Self) -> *const T {
+ let ptr = unsafe { &(**this.ptr).data as *const _ };
mem::forget(this);
ptr
}
/// # Examples
///
/// ```
- /// #![feature(rc_raw)]
- ///
/// use std::sync::Arc;
///
/// let x = Arc::new(10);
///
/// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
/// ```
- #[unstable(feature = "rc_raw", issue = "37197")]
- pub unsafe fn from_raw(ptr: *mut T) -> Self {
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
// To find the corresponding pointer to the `ArcInner` we need to subtract the offset of the
// `data` field from the pointer.
- Arc { ptr: Shared::new((ptr as *mut u8).offset(-offset_of!(ArcInner<T>, data)) as *mut _) }
+ let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner<T>, data));
+ Arc {
+ ptr: Shared::new(ptr as *const _),
+ }
}
}
// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
- let ptr = *self.ptr;
+ let ptr = self.ptr.as_mut_ptr();
// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
}
#[inline]
- #[unstable(feature = "ptr_eq",
- reason = "newly added",
- issue = "36497")]
+ #[stable(feature = "ptr_eq", since = "1.17.0")]
/// Returns true if the two `Arc`s point to the same value (not
/// just values that compare as equal).
///
/// # Examples
///
/// ```
- /// #![feature(ptr_eq)]
- ///
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
// As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
unsafe {
- let inner = &mut **this.ptr;
+ let inner = &mut *this.ptr.as_mut_ptr();
&mut inner.data
}
}
// the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
unsafe {
- let inner = &mut **this.ptr;
+ let inner = &mut *this.ptr.as_mut_ptr();
Some(&mut inner.data)
}
} else {
/// # Examples
///
/// ```
- /// #![feature(rc_raw)]
- ///
/// use std::rc::Rc;
///
/// let x = Rc::new(10);
/// let x_ptr = Rc::into_raw(x);
/// assert_eq!(unsafe { *x_ptr }, 10);
/// ```
- #[unstable(feature = "rc_raw", issue = "37197")]
- pub fn into_raw(this: Self) -> *mut T {
- let ptr = unsafe { &mut (**this.ptr).value as *mut _ };
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub fn into_raw(this: Self) -> *const T {
+ let ptr = unsafe { &mut (*this.ptr.as_mut_ptr()).value as *const _ };
mem::forget(this);
ptr
}
/// # Examples
///
/// ```
- /// #![feature(rc_raw)]
- ///
/// use std::rc::Rc;
///
/// let x = Rc::new(10);
///
/// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
/// ```
- #[unstable(feature = "rc_raw", issue = "37197")]
- pub unsafe fn from_raw(ptr: *mut T) -> Self {
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
// To find the corresponding pointer to the `RcBox` we need to subtract the offset of the
// `value` field from the pointer.
- Rc { ptr: Shared::new((ptr as *mut u8).offset(-offset_of!(RcBox<T>, value)) as *mut _) }
+ Rc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(RcBox<T>, value)) as *const _) }
}
}
#[stable(feature = "rc_unique", since = "1.4.0")]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if Rc::is_unique(this) {
- let inner = unsafe { &mut **this.ptr };
+ let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
Some(&mut inner.value)
} else {
None
}
#[inline]
- #[unstable(feature = "ptr_eq",
- reason = "newly added",
- issue = "36497")]
+ #[stable(feature = "ptr_eq", since = "1.17.0")]
/// Returns true if the two `Rc`s point to the same value (not
/// just values that compare as equal).
///
/// # Examples
///
/// ```
- /// #![feature(ptr_eq)]
- ///
/// use std::rc::Rc;
///
/// let five = Rc::new(5);
// reference count is guaranteed to be 1 at this point, and we required
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
// reference to the inner value.
- let inner = unsafe { &mut **this.ptr };
+ let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
&mut inner.value
}
}
/// ```
fn drop(&mut self) {
unsafe {
- let ptr = *self.ptr;
+ let ptr = self.ptr.as_mut_ptr();
self.dec_strong();
if self.strong() == 0 {
}
/// An iterator over a sub-range of BTreeMap's entries.
+#[stable(feature = "btree_range", since = "1.17.0")]
pub struct Range<'a, K: 'a, V: 'a> {
front: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
back: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
}
/// A mutable iterator over a sub-range of BTreeMap's entries.
+#[stable(feature = "btree_range", since = "1.17.0")]
pub struct RangeMut<'a, K: 'a, V: 'a> {
front: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
back: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
/// Basic usage:
///
/// ```
- /// #![feature(btree_range, collections_bound)]
- ///
/// use std::collections::BTreeMap;
/// use std::collections::Bound::Included;
///
/// }
/// assert_eq!(Some((&5, &"b")), map.range(4..).next());
/// ```
- #[unstable(feature = "btree_range",
- reason = "matches collection reform specification, waiting for dust to settle",
- issue = "27787")]
+ #[stable(feature = "btree_range", since = "1.17.0")]
pub fn range<T: ?Sized, R>(&self, range: R) -> Range<K, V>
where T: Ord, K: Borrow<T>, R: RangeArgument<T>
{
/// Basic usage:
///
/// ```
- /// #![feature(btree_range)]
- ///
/// use std::collections::BTreeMap;
///
/// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"].iter()
/// println!("{} => {}", name, balance);
/// }
/// ```
- #[unstable(feature = "btree_range",
- reason = "matches collection reform specification, waiting for dust to settle",
- issue = "27787")]
+ #[stable(feature = "btree_range", since = "1.17.0")]
pub fn range_mut<T: ?Sized, R>(&mut self, range: R) -> RangeMut<K, V>
where T: Ord, K: Borrow<T>, R: RangeArgument<T>
{
/// [`BTreeSet`]: struct.BTreeSet.html
/// [`range`]: struct.BTreeSet.html#method.range
#[derive(Debug)]
+#[stable(feature = "btree_range", since = "1.17.0")]
pub struct Range<'a, T: 'a> {
iter: ::btree_map::Range<'a, T, ()>,
}
/// # Examples
///
/// ```
- /// #![feature(btree_range, collections_bound)]
- ///
/// use std::collections::BTreeSet;
/// use std::collections::Bound::Included;
///
/// }
/// assert_eq!(Some(&5), set.range(4..).next());
/// ```
- #[unstable(feature = "btree_range",
- reason = "matches collection reform specification, waiting for dust to settle",
- issue = "27787")]
+ #[stable(feature = "btree_range", since = "1.17.0")]
pub fn range<K: ?Sized, R>(&self, range: R) -> Range<T>
where K: Ord, T: Borrow<K>, R: RangeArgument<K>
{
//! like `{:08}` would yield `00000001` for the integer `1`, while the
//! same format would yield `-0000001` for the integer `-1`. Notice that
//! the negative version has one fewer zero than the positive version.
+//! Note that padding zeroes are always placed after the sign (if any)
+//! and before the digits. When used together with the `#` flag, a similar
+//! rule applies: padding zeroes are inserted after the prefix but before
+//! the digits.
//!
//! ## Width
//!
}
/// An endpoint of a range of keys.
-#[unstable(feature = "collections_bound", issue = "27787")]
+#[stable(feature = "collections_bound", since = "1.17.0")]
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub enum Bound<T> {
/// An inclusive bound.
+ #[stable(feature = "collections_bound", since = "1.17.0")]
Included(T),
/// An exclusive bound.
+ #[stable(feature = "collections_bound", since = "1.17.0")]
Excluded(T),
/// An infinite endpoint. Indicates that there is no bound in this direction.
+ #[stable(feature = "collections_bound", since = "1.17.0")]
Unbounded,
}
match self.head {
None => self.tail = node,
- Some(head) => (**head).prev = node,
+ Some(head) => (*head.as_mut_ptr()).prev = node,
}
self.head = node;
#[inline]
fn pop_front_node(&mut self) -> Option<Box<Node<T>>> {
self.head.map(|node| unsafe {
- let node = Box::from_raw(*node);
+ let node = Box::from_raw(node.as_mut_ptr());
self.head = node.next;
match self.head {
None => self.tail = None,
- Some(head) => (**head).prev = None,
+ Some(head) => (*head.as_mut_ptr()).prev = None,
}
self.len -= 1;
match self.tail {
None => self.head = node,
- Some(tail) => (**tail).next = node,
+ Some(tail) => (*tail.as_mut_ptr()).next = node,
}
self.tail = node;
#[inline]
fn pop_back_node(&mut self) -> Option<Box<Node<T>>> {
self.tail.map(|node| unsafe {
- let node = Box::from_raw(*node);
+ let node = Box::from_raw(node.as_mut_ptr());
self.tail = node.prev;
match self.tail {
None => self.head = None,
- Some(tail) => (**tail).next = None,
+ Some(tail) => (*tail.as_mut_ptr()).next = None,
}
self.len -= 1;
Some(tail) => {
if let Some(other_head) = other.head.take() {
unsafe {
- (**tail).next = Some(other_head);
- (**other_head).prev = Some(tail);
+ (*tail.as_mut_ptr()).next = Some(other_head);
+ (*other_head.as_mut_ptr()).prev = Some(tail);
}
self.tail = other.tail.take();
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn front_mut(&mut self) -> Option<&mut T> {
- self.head.map(|node| unsafe { &mut (**node).element })
+ self.head.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
}
/// Provides a reference to the back element, or `None` if the list is
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn back_mut(&mut self) -> Option<&mut T> {
- self.tail.map(|node| unsafe { &mut (**node).element })
+ self.tail.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
}
/// Adds an element first in the list.
let second_part_head;
unsafe {
- second_part_head = (**split_node.unwrap()).next.take();
+ second_part_head = (*split_node.unwrap().as_mut_ptr()).next.take();
if let Some(head) = second_part_head {
- (**head).prev = None;
+ (*head.as_mut_ptr()).prev = None;
}
}
None
} else {
self.head.map(|node| unsafe {
- let node = &mut **node;
+ let node = &mut *node.as_mut_ptr();
self.len -= 1;
self.head = node.next;
&mut node.element
None
} else {
self.tail.map(|node| unsafe {
- let node = &mut **node;
+ let node = &mut *node.as_mut_ptr();
self.len -= 1;
self.tail = node.prev;
&mut node.element
element: element,
})));
- (**prev).next = node;
- (**head).prev = node;
+ (*prev.as_mut_ptr()).next = node;
+ (*head.as_mut_ptr()).prev = node;
self.list.len += 1;
},
if self.len == 0 {
None
} else {
- self.head.map(|node| unsafe { &mut (**node).element })
+ self.head.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
}
}
}
/// ```
/// #![feature(collections)]
/// #![feature(collections_range)]
- /// #![feature(collections_bound)]
///
/// extern crate collections;
///
/// ```
/// #![feature(collections)]
/// #![feature(collections_range)]
- /// #![feature(collections_bound)]
///
/// extern crate collections;
///
core_slice::SliceExt::first_mut(self)
}
- /// Returns the first and all the rest of the elements of a slice.
+ /// Returns the first and all the rest of the elements of a slice, or `None` if it is empty.
///
/// # Examples
///
core_slice::SliceExt::split_first(self)
}
- /// Returns the first and all the rest of the elements of a slice.
+ /// Returns the first and all the rest of the elements of a slice, or `None` if it is empty.
///
/// # Examples
///
core_slice::SliceExt::split_first_mut(self)
}
- /// Returns the last and all the rest of the elements of a slice.
+ /// Returns the last and all the rest of the elements of a slice, or `None` if it is empty.
///
/// # Examples
///
}
- /// Returns the last and all the rest of the elements of a slice.
+ /// Returns the last and all the rest of the elements of a slice, or `None` if it is empty.
///
/// # Examples
///
/// The caller must ensure that the slice outlives the pointer this
/// function returns, or else it will end up pointing to garbage.
///
- /// Modifying the slice may cause its buffer to be reallocated, which
- /// would also make any pointers to it invalid.
+ /// Modifying the container referenced by this slice may cause its buffer
+ /// to be reallocated, which would also make any pointers to it invalid.
///
/// # Examples
///
/// The caller must ensure that the slice outlives the pointer this
/// function returns, or else it will end up pointing to garbage.
///
- /// Modifying the slice may cause its buffer to be reallocated, which
- /// would also make any pointers to it invalid.
+ /// Modifying the container referenced by this slice may cause its buffer
+ /// to be reallocated, which would also make any pointers to it invalid.
///
/// # Examples
///
for _x in self.by_ref() {}
// RawVec handles deallocation
- let _ = unsafe { RawVec::from_raw_parts(*self.buf, self.cap) };
+ let _ = unsafe { RawVec::from_raw_parts(self.buf.as_mut_ptr(), self.cap) };
}
}
if self.tail_len > 0 {
unsafe {
- let source_vec = &mut **self.vec;
+ let source_vec = &mut *self.vec.as_mut_ptr();
// memmove back untouched tail, update to new length
let start = source_vec.len();
let tail = self.tail_start;
fn drop(&mut self) {
for _ in self.by_ref() {}
- let source_deque = unsafe { &mut **self.deque };
+ let source_deque = unsafe { &mut *self.deque.as_mut_ptr() };
// T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
//
#![feature(binary_heap_extras)]
#![feature(binary_heap_peek_mut_pop)]
#![feature(box_syntax)]
-#![feature(btree_range)]
#![feature(inclusive_range_syntax)]
#![feature(collection_placement)]
#![feature(collections)]
-#![feature(collections_bound)]
#![feature(const_fn)]
#![feature(exact_size_is_empty)]
#![feature(pattern)]
macro_rules! ashl {
($a:expr, $b:expr, $ty:ty) => {{
let (a, b) = ($a, $b);
- let bits = (::core::mem::size_of::<$ty>() * 8) as $ty;
- let half_bits = bits >> 1;
+ let bits = ::core::mem::size_of::<$ty>().wrapping_mul(8) as $ty;
+ let half_bits = bits.wrapping_shr(1);
if b & half_bits != 0 {
<$ty>::from_parts(0, a.low().wrapping_shl(
b.wrapping_sub(half_bits) as u32))
macro_rules! ashr {
($a: expr, $b: expr, $ty:ty) => {{
let (a, b) = ($a, $b);
- let bits = (::core::mem::size_of::<$ty>() * 8) as $ty;
- let half_bits = bits >> 1;
+ let bits = ::core::mem::size_of::<$ty>().wrapping_mul(8) as $ty;
+ let half_bits = bits.wrapping_shr(1);
if b & half_bits != 0 {
<$ty>::from_parts(a.high().wrapping_shr(b.wrapping_sub(half_bits) as u32)
as <$ty as LargeInt>::LowHalf,
macro_rules! lshr {
($a: expr, $b: expr, $ty:ty) => {{
let (a, b) = ($a, $b);
- let bits = (::core::mem::size_of::<$ty>() * 8) as $ty;
- let half_bits = bits >> 1;
+ let bits = ::core::mem::size_of::<$ty>().wrapping_mul(8) as $ty;
+ let half_bits = bits.wrapping_shr(1);
if b & half_bits != 0 {
<$ty>::from_parts(a.high().wrapping_shr(b.wrapping_sub(half_bits) as u32), 0)
} else if b == 0 {
macro_rules! mul {
($a:expr, $b:expr, $ty: ty, $tyh: ty) => {{
let (a, b) = ($a, $b);
- let half_bits = ((::core::mem::size_of::<$tyh>() * 8) / 2) as u32;
+ let half_bits = ::core::mem::size_of::<$tyh>().wrapping_mul(4) as u32;
let lower_mask = (!0u64).wrapping_shr(half_bits);
let mut low = (a.low() & lower_mask).wrapping_mul(b.low() & lower_mask);
let mut t = low.wrapping_shr(half_bits);
let mantissa_fraction = repr & <$fromty as FloatStuff>::MANTISSA_MASK;
let mantissa = mantissa_fraction | <$fromty as FloatStuff>::MANTISSA_LEAD_BIT;
if sign == -1.0 || exponent < 0 { return 0 as u128; }
- if exponent > ::core::mem::size_of::<$outty>() as i32 * 8 {
+ if exponent > ::core::mem::size_of::<$outty>().wrapping_mul(8) as i32 {
return !(0 as u128);
}
(if exponent < (<$fromty as FloatStuff>::MANTISSA_BITS) as i32 {
let mantissa = mantissa_fraction | <$fromty as FloatStuff>::MANTISSA_LEAD_BIT;
if exponent < 0 { return 0 as i128; }
- if exponent > ::core::mem::size_of::<$outty>() as i32 * 8 {
+ if exponent > ::core::mem::size_of::<$outty>().wrapping_mul(8) as i32 {
let ret = if sign > 0.0 { <$outty>::max_value() } else { <$outty>::min_value() };
return ret
}
/// # Examples
///
/// ```
- /// #![feature(move_cell)]
/// use std::cell::Cell;
///
/// let c1 = Cell::new(5i32);
/// assert_eq!(5, c2.get());
/// ```
#[inline]
- #[unstable(feature = "move_cell", issue = "39264")]
+ #[stable(feature = "move_cell", since = "1.17.0")]
pub fn swap(&self, other: &Self) {
if ptr::eq(self, other) {
return;
/// # Examples
///
/// ```
- /// #![feature(move_cell)]
/// use std::cell::Cell;
///
/// let c = Cell::new(5);
///
/// assert_eq!(5, old);
/// ```
- #[unstable(feature = "move_cell", issue = "39264")]
+ #[stable(feature = "move_cell", since = "1.17.0")]
pub fn replace(&self, val: T) -> T {
mem::replace(unsafe { &mut *self.value.get() }, val)
}
/// # Examples
///
/// ```
- /// #![feature(move_cell)]
/// use std::cell::Cell;
///
/// let c = Cell::new(5);
///
/// assert_eq!(five, 5);
/// ```
- #[unstable(feature = "move_cell", issue = "39264")]
+ #[stable(feature = "move_cell", since = "1.17.0")]
pub fn into_inner(self) -> T {
unsafe { self.value.into_inner() }
}
/// # Examples
///
/// ```
- /// #![feature(move_cell)]
/// use std::cell::Cell;
///
/// let c = Cell::new(5);
/// assert_eq!(five, 5);
/// assert_eq!(c.into_inner(), 0);
/// ```
- #[unstable(feature = "move_cell", issue = "39264")]
+ #[stable(feature = "move_cell", since = "1.17.0")]
pub fn take(&self) -> T {
self.replace(Default::default())
}
#[unstable(feature = "try_from", issue = "33417")]
impl TryFrom<u32> for char {
- type Err = CharTryFromError;
+ type Error = CharTryFromError;
#[inline]
- fn try_from(i: u32) -> Result<Self, Self::Err> {
+ fn try_from(i: u32) -> Result<Self, Self::Error> {
if (i > MAX as u32) || (i >= 0xD800 && i <= 0xDFFF) {
Err(CharTryFromError(()))
} else {
/// # Examples
///
/// ```
- /// #![feature(ordering_chaining)]
- ///
/// use std::cmp::Ordering;
///
/// let result = Ordering::Equal.then(Ordering::Less);
/// assert_eq!(result, Ordering::Less);
/// ```
#[inline]
- #[unstable(feature = "ordering_chaining", issue = "37053")]
+ #[stable(feature = "ordering_chaining", since = "1.17.0")]
pub fn then(self, other: Ordering) -> Ordering {
match self {
Equal => other,
/// # Examples
///
/// ```
- /// #![feature(ordering_chaining)]
- ///
/// use std::cmp::Ordering;
///
/// let result = Ordering::Equal.then_with(|| Ordering::Less);
/// assert_eq!(result, Ordering::Less);
/// ```
#[inline]
- #[unstable(feature = "ordering_chaining", issue = "37053")]
+ #[stable(feature = "ordering_chaining", since = "1.17.0")]
pub fn then_with<F: FnOnce() -> Ordering>(self, f: F) -> Ordering {
match self {
Equal => f(),
#![stable(feature = "rust1", since = "1.0.0")]
+use str::FromStr;
+
/// A cheap, reference-to-reference conversion.
///
/// `AsRef` is very similar to, but different than, [`Borrow`]. See
#[unstable(feature = "try_from", issue = "33417")]
pub trait TryInto<T>: Sized {
/// The type returned in the event of a conversion error.
- type Err;
+ type Error;
/// Performs the conversion.
- fn try_into(self) -> Result<T, Self::Err>;
+ fn try_into(self) -> Result<T, Self::Error>;
}
/// Attempt to construct `Self` via a conversion.
#[unstable(feature = "try_from", issue = "33417")]
pub trait TryFrom<T>: Sized {
/// The type returned in the event of a conversion error.
- type Err;
+ type Error;
/// Performs the conversion.
- fn try_from(value: T) -> Result<Self, Self::Err>;
+ fn try_from(value: T) -> Result<Self, Self::Error>;
}
////////////////////////////////////////////////////////////////////////////////
// TryFrom implies TryInto
#[unstable(feature = "try_from", issue = "33417")]
impl<T, U> TryInto<U> for T where U: TryFrom<T> {
- type Err = U::Err;
+ type Error = U::Error;
- fn try_into(self) -> Result<U, U::Err> {
+ fn try_into(self) -> Result<U, U::Error> {
U::try_from(self)
}
}
self
}
}
+
+// FromStr implies TryFrom<&str>
+#[unstable(feature = "try_from", issue = "33417")]
+impl<'a, T> TryFrom<&'a str> for T where T: FromStr {
+ type Error = <T as FromStr>::Err;
+
+ fn try_from(s: &'a str) -> Result<T, Self::Error> {
+ FromStr::from_str(s)
+ }
+}
// is zero
Some(min) if self.sign_aware_zero_pad() => {
self.fill = '0';
+ self.align = rt::v1::Alignment::Right;
write_prefix(self)?;
self.with_padding(min - width, rt::v1::Alignment::Right, |f| {
f.buf.write_str(buf)
// for the sign-aware zero padding, we render the sign first and
// behave as if we had no sign from the beginning.
let mut formatted = formatted.clone();
- let mut align = self.align;
let old_fill = self.fill;
+ let old_align = self.align;
+ let mut align = old_align;
if self.sign_aware_zero_pad() {
// a sign always goes first
let sign = unsafe { str::from_utf8_unchecked(formatted.sign) };
width = if width < sign.len() { 0 } else { width - sign.len() };
align = rt::v1::Alignment::Right;
self.fill = '0';
+ self.align = rt::v1::Alignment::Right;
}
// remaining parts go through the ordinary padding process.
})
};
self.fill = old_fill;
+ self.align = old_align;
ret
} else {
// this is the common case and we take a shortcut
issue = "0")]
#![allow(missing_docs)]
-extern "rust-intrinsic" {
+#[cfg(not(stage0))]
+#[stable(feature = "drop_in_place", since = "1.8.0")]
+#[rustc_deprecated(reason = "no longer an intrinsic - use `ptr::drop_in_place` directly",
+ since = "1.18.0")]
+pub use ptr::drop_in_place;
+extern "rust-intrinsic" {
// NB: These intrinsics take raw pointers because they mutate aliased
// memory, which is not valid for either `&` or `&mut`.
pub fn size_of_val<T: ?Sized>(_: &T) -> usize;
pub fn min_align_of_val<T: ?Sized>(_: &T) -> usize;
+ #[cfg(stage0)]
/// Executes the destructor (if any) of the pointed-to value.
///
/// This has two use cases:
/// undefined behavior where y = 0 or x = `T::min_value()` and y = -1
pub fn unchecked_rem<T>(x: T, y: T) -> T;
+ /// Performs an unchecked left shift, resulting in undefined behavior when
+ /// y < 0 or y >= N, where N is the width of T in bits.
+ #[cfg(not(stage0))]
+ pub fn unchecked_shl<T>(x: T, y: T) -> T;
+ /// Performs an unchecked right shift, resulting in undefined behavior when
+ /// y < 0 or y >= N, where N is the width of T in bits.
+ #[cfg(not(stage0))]
+ pub fn unchecked_shr<T>(x: T, y: T) -> T;
+
/// Returns (a + b) mod 2^N, where N is the width of T in bits.
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `wrapping_add` method. For example,
// `Int` + `SignedInt` implemented for signed integers
macro_rules! int_impl {
- ($ActualT:ident, $UnsignedT:ty, $BITS:expr,
+ ($SelfT:ty, $ActualT:ident, $UnsignedT:ty, $BITS:expr,
$add_with_overflow:path,
$sub_with_overflow:path,
$mul_with_overflow:path) => {
/// ```
#[stable(feature = "num_wrapping", since = "1.2.0")]
#[inline(always)]
+ #[cfg(not(stage0))]
+ pub fn wrapping_shl(self, rhs: u32) -> Self {
+ unsafe {
+ intrinsics::unchecked_shl(self, (rhs & ($BITS - 1)) as $SelfT)
+ }
+ }
+
+ /// Stage 0
+ #[stable(feature = "num_wrapping", since = "1.2.0")]
+ #[inline(always)]
+ #[cfg(stage0)]
pub fn wrapping_shl(self, rhs: u32) -> Self {
self.overflowing_shl(rhs).0
}
/// ```
#[stable(feature = "num_wrapping", since = "1.2.0")]
#[inline(always)]
+ #[cfg(not(stage0))]
+ pub fn wrapping_shr(self, rhs: u32) -> Self {
+ unsafe {
+ intrinsics::unchecked_shr(self, (rhs & ($BITS - 1)) as $SelfT)
+ }
+ }
+
+ /// Stage 0
+ #[stable(feature = "num_wrapping", since = "1.2.0")]
+ #[inline(always)]
+ #[cfg(stage0)]
pub fn wrapping_shr(self, rhs: u32) -> Self {
self.overflowing_shr(rhs).0
}
/// ```
#[inline]
#[stable(feature = "wrapping", since = "1.7.0")]
+ #[cfg(not(stage0))]
+ pub fn overflowing_shl(self, rhs: u32) -> (Self, bool) {
+ (self.wrapping_shl(rhs), (rhs > ($BITS - 1)))
+ }
+
+ /// Stage 0
+ #[inline]
+ #[stable(feature = "wrapping", since = "1.7.0")]
+ #[cfg(stage0)]
pub fn overflowing_shl(self, rhs: u32) -> (Self, bool) {
(self << (rhs & ($BITS - 1)), (rhs > ($BITS - 1)))
}
/// ```
#[inline]
#[stable(feature = "wrapping", since = "1.7.0")]
+ #[cfg(not(stage0))]
+ pub fn overflowing_shr(self, rhs: u32) -> (Self, bool) {
+ (self.wrapping_shr(rhs), (rhs > ($BITS - 1)))
+ }
+
+ /// Stage 0
+ #[inline]
+ #[stable(feature = "wrapping", since = "1.7.0")]
+ #[cfg(stage0)]
pub fn overflowing_shr(self, rhs: u32) -> (Self, bool) {
(self >> (rhs & ($BITS - 1)), (rhs > ($BITS - 1)))
}
#[lang = "i8"]
impl i8 {
- int_impl! { i8, u8, 8,
+ int_impl! { i8, i8, u8, 8,
intrinsics::add_with_overflow,
intrinsics::sub_with_overflow,
intrinsics::mul_with_overflow }
#[lang = "i16"]
impl i16 {
- int_impl! { i16, u16, 16,
+ int_impl! { i16, i16, u16, 16,
intrinsics::add_with_overflow,
intrinsics::sub_with_overflow,
intrinsics::mul_with_overflow }
#[lang = "i32"]
impl i32 {
- int_impl! { i32, u32, 32,
+ int_impl! { i32, i32, u32, 32,
intrinsics::add_with_overflow,
intrinsics::sub_with_overflow,
intrinsics::mul_with_overflow }
#[lang = "i64"]
impl i64 {
- int_impl! { i64, u64, 64,
+ int_impl! { i64, i64, u64, 64,
intrinsics::add_with_overflow,
intrinsics::sub_with_overflow,
intrinsics::mul_with_overflow }
#[lang = "i128"]
impl i128 {
- int_impl! { i128, u128, 128,
+ int_impl! { i128, i128, u128, 128,
intrinsics::add_with_overflow,
intrinsics::sub_with_overflow,
intrinsics::mul_with_overflow }
#[cfg(target_pointer_width = "16")]
#[lang = "isize"]
impl isize {
- int_impl! { i16, u16, 16,
+ int_impl! { isize, i16, u16, 16,
intrinsics::add_with_overflow,
intrinsics::sub_with_overflow,
intrinsics::mul_with_overflow }
#[cfg(target_pointer_width = "32")]
#[lang = "isize"]
impl isize {
- int_impl! { i32, u32, 32,
+ int_impl! { isize, i32, u32, 32,
intrinsics::add_with_overflow,
intrinsics::sub_with_overflow,
intrinsics::mul_with_overflow }
#[cfg(target_pointer_width = "64")]
#[lang = "isize"]
impl isize {
- int_impl! { i64, u64, 64,
+ int_impl! { isize, i64, u64, 64,
intrinsics::add_with_overflow,
intrinsics::sub_with_overflow,
intrinsics::mul_with_overflow }
// `Int` + `UnsignedInt` implemented for unsigned integers
macro_rules! uint_impl {
- ($ActualT:ty, $BITS:expr,
+ ($SelfT:ty, $ActualT:ty, $BITS:expr,
$ctpop:path,
$ctlz:path,
$cttz:path,
/// ```
#[stable(feature = "num_wrapping", since = "1.2.0")]
#[inline(always)]
+ #[cfg(not(stage0))]
+ pub fn wrapping_shl(self, rhs: u32) -> Self {
+ unsafe {
+ intrinsics::unchecked_shl(self, (rhs & ($BITS - 1)) as $SelfT)
+ }
+ }
+
+ /// Stage 0
+ #[stable(feature = "num_wrapping", since = "1.2.0")]
+ #[inline(always)]
+ #[cfg(stage0)]
pub fn wrapping_shl(self, rhs: u32) -> Self {
self.overflowing_shl(rhs).0
}
/// ```
#[stable(feature = "num_wrapping", since = "1.2.0")]
#[inline(always)]
+ #[cfg(not(stage0))]
+ pub fn wrapping_shr(self, rhs: u32) -> Self {
+ unsafe {
+ intrinsics::unchecked_shr(self, (rhs & ($BITS - 1)) as $SelfT)
+ }
+ }
+
+ /// Stage 0
+ #[stable(feature = "num_wrapping", since = "1.2.0")]
+ #[inline(always)]
+ #[cfg(stage0)]
pub fn wrapping_shr(self, rhs: u32) -> Self {
self.overflowing_shr(rhs).0
}
/// ```
#[inline]
#[stable(feature = "wrapping", since = "1.7.0")]
+ #[cfg(not(stage0))]
+ pub fn overflowing_shl(self, rhs: u32) -> (Self, bool) {
+ (self.wrapping_shl(rhs), (rhs > ($BITS - 1)))
+ }
+
+ /// Stage 0
+ #[inline]
+ #[stable(feature = "wrapping", since = "1.7.0")]
+ #[cfg(stage0)]
pub fn overflowing_shl(self, rhs: u32) -> (Self, bool) {
(self << (rhs & ($BITS - 1)), (rhs > ($BITS - 1)))
}
/// ```
#[inline]
#[stable(feature = "wrapping", since = "1.7.0")]
+ #[cfg(not(stage0))]
+ pub fn overflowing_shr(self, rhs: u32) -> (Self, bool) {
+ (self.wrapping_shr(rhs), (rhs > ($BITS - 1)))
+
+ }
+
+ /// Stage 0
+ #[inline]
+ #[stable(feature = "wrapping", since = "1.7.0")]
+ #[cfg(stage0)]
pub fn overflowing_shr(self, rhs: u32) -> (Self, bool) {
(self >> (rhs & ($BITS - 1)), (rhs > ($BITS - 1)))
}
#[lang = "u8"]
impl u8 {
- uint_impl! { u8, 8,
+ uint_impl! { u8, u8, 8,
intrinsics::ctpop,
intrinsics::ctlz,
intrinsics::cttz,
#[lang = "u16"]
impl u16 {
- uint_impl! { u16, 16,
+ uint_impl! { u16, u16, 16,
intrinsics::ctpop,
intrinsics::ctlz,
intrinsics::cttz,
#[lang = "u32"]
impl u32 {
- uint_impl! { u32, 32,
+ uint_impl! { u32, u32, 32,
intrinsics::ctpop,
intrinsics::ctlz,
intrinsics::cttz,
#[lang = "u64"]
impl u64 {
- uint_impl! { u64, 64,
+ uint_impl! { u64, u64, 64,
intrinsics::ctpop,
intrinsics::ctlz,
intrinsics::cttz,
#[lang = "u128"]
impl u128 {
- uint_impl! { u128, 128,
+ uint_impl! { u128, u128, 128,
intrinsics::ctpop,
intrinsics::ctlz,
intrinsics::cttz,
#[cfg(target_pointer_width = "16")]
#[lang = "usize"]
impl usize {
- uint_impl! { u16, 16,
+ uint_impl! { usize, u16, 16,
intrinsics::ctpop,
intrinsics::ctlz,
intrinsics::cttz,
#[cfg(target_pointer_width = "32")]
#[lang = "usize"]
impl usize {
- uint_impl! { u32, 32,
+ uint_impl! { usize, u32, 32,
intrinsics::ctpop,
intrinsics::ctlz,
intrinsics::cttz,
#[cfg(target_pointer_width = "64")]
#[lang = "usize"]
impl usize {
- uint_impl! { u64, 64,
+ uint_impl! { usize, u64, 64,
intrinsics::ctpop,
intrinsics::ctlz,
intrinsics::cttz,
($storage:ty, $target:ty, $($source:ty),*) => {$(
#[unstable(feature = "try_from", issue = "33417")]
impl TryFrom<$source> for $target {
- type Err = TryFromIntError;
+ type Error = TryFromIntError;
fn try_from(u: $source) -> Result<$target, TryFromIntError> {
let min = <$target as FromStrRadixHelper>::min_value() as $storage;
($unsigned:ty, $($signed:ty),*) => {$(
#[unstable(feature = "try_from", issue = "33417")]
impl TryFrom<$unsigned> for $signed {
- type Err = TryFromIntError;
+ type Error = TryFromIntError;
fn try_from(u: $unsigned) -> Result<$signed, TryFromIntError> {
let max = <$signed as FromStrRadixHelper>::max_value() as u128;
#[unstable(feature = "try_from", issue = "33417")]
impl TryFrom<$signed> for $unsigned {
- type Err = TryFromIntError;
+ type Error = TryFromIntError;
fn try_from(u: $signed) -> Result<$unsigned, TryFromIntError> {
let max = <$unsigned as FromStrRadixHelper>::max_value() as u128;
#[stable(feature = "rust1", since = "1.0.0")]
pub use intrinsics::write_bytes;
+#[cfg(stage0)]
#[stable(feature = "drop_in_place", since = "1.8.0")]
pub use intrinsics::drop_in_place;
+#[cfg(not(stage0))]
+/// Executes the destructor (if any) of the pointed-to value.
+///
+/// This has two use cases:
+///
+/// * It is *required* to use `drop_in_place` to drop unsized types like
+/// trait objects, because they can't be read out onto the stack and
+/// dropped normally.
+///
+/// * It is friendlier to the optimizer to do this over `ptr::read` when
+/// dropping manually allocated memory (e.g. when writing Box/Rc/Vec),
+/// as the compiler doesn't need to prove that it's sound to elide the
+/// copy.
+///
+/// # Undefined Behavior
+///
+/// This has all the same safety problems as `ptr::read` with respect to
+/// invalid pointers, types, and double drops.
+#[stable(feature = "drop_in_place", since = "1.8.0")]
+#[lang="drop_in_place"]
+#[inline]
+#[allow(unconditional_recursion)]
+pub unsafe fn drop_in_place<T: ?Sized>(to_drop: *mut T) {
+ // Code here does not matter - this is replaced by the
+ // real drop glue by the compiler.
+ drop_in_place(to_drop);
+}
+
/// Creates a null raw pointer.
///
/// # Examples
/// Basic usage:
///
/// ```
-/// #![feature(ptr_unaligned)]
-///
/// let x = 12;
/// let y = &x as *const i32;
///
/// }
/// ```
#[inline(always)]
-#[unstable(feature = "ptr_unaligned", issue = "37955")]
+#[stable(feature = "ptr_unaligned", since = "1.17.0")]
pub unsafe fn read_unaligned<T>(src: *const T) -> T {
let mut tmp: T = mem::uninitialized();
copy_nonoverlapping(src as *const u8,
/// Basic usage:
///
/// ```
-/// #![feature(ptr_unaligned)]
-///
/// let mut x = 0;
/// let y = &mut x as *mut i32;
/// let z = 12;
/// }
/// ```
#[inline]
-#[unstable(feature = "ptr_unaligned", issue = "37955")]
+#[stable(feature = "ptr_unaligned", since = "1.17.0")]
pub unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
copy_nonoverlapping(&src as *const T as *const u8,
dst as *mut u8,
/// # Examples
///
/// ```
-/// #![feature(ptr_eq)]
/// use std::ptr;
///
/// let five = 5;
/// assert!(ptr::eq(five_ref, same_five_ref));
/// assert!(!ptr::eq(five_ref, other_five_ref));
/// ```
-#[unstable(feature = "ptr_eq", reason = "newly added", issue = "36497")]
+#[stable(feature = "ptr_eq", since = "1.17.0")]
#[inline]
pub fn eq<T: ?Sized>(a: *const T, b: *const T) -> bool {
a == b
/// # Safety
///
/// `ptr` must be non-null.
- pub unsafe fn new(ptr: *mut T) -> Self {
+ pub unsafe fn new(ptr: *const T) -> Self {
Shared { pointer: NonZero::new(ptr), _marker: PhantomData }
}
}
+#[unstable(feature = "shared", issue = "27730")]
+impl<T: ?Sized> Shared<T> {
+ /// Acquires the underlying pointer as a `*mut` pointer.
+ pub unsafe fn as_mut_ptr(&self) -> *mut T {
+ **self as _
+ }
+}
+
#[unstable(feature = "shared", issue = "27730")]
impl<T: ?Sized> Clone for Shared<T> {
fn clone(&self) -> Self {
#[unstable(feature = "shared", issue = "27730")]
impl<T: ?Sized> Deref for Shared<T> {
- type Target = *mut T;
+ type Target = *const T;
#[inline]
- fn deref(&self) -> &*mut T {
+ fn deref(&self) -> &*const T {
unsafe { mem::transmute(&*self.pointer) }
}
}
/// Basic usage:
///
/// ```{.should_panic}
- /// # #![feature(result_expect_err)]
/// let x: Result<u32, &str> = Ok(10);
/// x.expect_err("Testing expect_err"); // panics with `Testing expect_err: 10`
/// ```
#[inline]
- #[unstable(feature = "result_expect_err", issue = "39041")]
+ #[stable(feature = "result_expect_err", since = "1.17.0")]
pub fn expect_err(self, msg: &str) -> E {
match self {
Ok(t) => unwrap_failed(msg, t),
use self::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher};
use char;
+use convert::TryFrom;
use fmt;
use iter::{Map, Cloned, FusedIterator};
use mem;
#[stable(feature = "core", since = "1.6.0")]
fn is_empty(&self) -> bool;
#[stable(feature = "core", since = "1.6.0")]
- fn parse<T: FromStr>(&self) -> Result<T, T::Err>;
+ fn parse<'a, T: TryFrom<&'a str>>(&'a self) -> Result<T, T::Error>;
}
// truncate `&str` to length at most equal to `max`
fn is_empty(&self) -> bool { self.len() == 0 }
#[inline]
- fn parse<T: FromStr>(&self) -> Result<T, T::Err> { FromStr::from_str(self) }
+ fn parse<'a, T>(&'a self) -> Result<T, T::Error> where T: TryFrom<&'a str> {
+ T::try_from(self)
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
#![feature(nonzero)]
#![feature(rand)]
#![feature(raw)]
-#![feature(result_expect_err)]
#![feature(sip_hash_13)]
#![feature(slice_patterns)]
#![feature(step_by)]
#![feature(try_from)]
#![feature(unicode)]
#![feature(unique)]
-#![feature(ordering_chaining)]
-#![feature(ptr_unaligned)]
-#![feature(move_cell)]
#![feature(fmt_internals)]
extern crate core;
//! A library for procedural macro writers.
//!
//! ## Usage
-//! This crate provides the `qquote!` macro for syntax creation.
+//! This crate provides the `quote!` macro for syntax creation.
//!
-//! The `qquote!` macro uses the crate `syntax`, so users must declare `extern crate syntax;`
+//! The `quote!` macro uses the crate `syntax`, so users must declare `extern crate syntax;`
//! at the crate root. This is a temporary solution until we have better hygiene.
//!
//! ## Quasiquotation
//!
//! The quasiquoter creates output that, when run, constructs the tokenstream specified as
-//! input. For example, `qquote!(5 + 5)` will produce a program, that, when run, will
+//! input. For example, `quote!(5 + 5)` will produce a program, that, when run, will
//! construct the TokenStream `5 | + | 5`.
//!
//! ### Unquoting
//!
-//! Unquoting is currently done as `unquote`, and works by taking the single next
-//! TokenTree in the TokenStream as the unquoted term. Ergonomically, `unquote(foo)` works
-//! fine, but `unquote foo` is also supported.
+//! Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
+//! To quote `$` itself, use `$$`.
//!
-//! A simple example might be:
+//! A simple example is:
//!
//!```
//!fn double(tmp: TokenStream) -> TokenStream {
-//! qquote!(unquote(tmp) * 2)
+//! quote!($tmp * 2)
//!}
//!```
//!
-//! ### Large Example: Implementing Scheme's `cond`
+//! ### Large example: Scheme's `cond`
//!
-//! Below is the full implementation of Scheme's `cond` operator.
+//! Below is an example implementation of Scheme's `cond`.
//!
//! ```
-//! fn cond_rec(input: TokenStream) -> TokenStream {
-//! if input.is_empty() { return quote!(); }
-//!
-//! let next = input.slice(0..1);
-//! let rest = input.slice_from(1..);
-//!
-//! let clause : TokenStream = match next.maybe_delimited() {
-//! Some(ts) => ts,
-//! _ => panic!("Invalid input"),
-//! };
-//!
-//! // clause is ([test]) [rhs]
-//! if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) }
-//!
-//! let test: TokenStream = clause.slice(0..1);
-//! let rhs: TokenStream = clause.slice_from(1..);
-//!
-//! if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() {
-//! quote!({unquote(rhs)})
-//! } else {
-//! quote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } })
-//! }
+//! fn cond(input: TokenStream) -> TokenStream {
+//! let mut conds = Vec::new();
+//! let mut input = input.trees().peekable();
+//! while let Some(tree) = input.next() {
+//! let mut cond = match tree {
+//! TokenTree::Delimited(_, ref delimited) => delimited.stream(),
+//! _ => panic!("Invalid input"),
+//! };
+//! let mut trees = cond.trees();
+//! let test = trees.next();
+//! let rhs = trees.collect::<TokenStream>();
+//! if rhs.is_empty() {
+//! panic!("Invalid macro usage in cond: {}", cond);
+//! }
+//! let is_else = match test {
+//! Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true,
+//! _ => false,
+//! };
+//! conds.push(if is_else || input.peek().is_none() {
+//! quote!({ $rhs })
+//! } else {
+//! let test = test.unwrap();
+//! quote!(if $test { $rhs } else)
+//! });
+//! }
+//!
+//! conds.into_iter().collect()
//! }
//! ```
-//!
-
#![crate_name = "proc_macro_plugin"]
#![unstable(feature = "rustc_private", issue = "27812")]
#![feature(plugin_registrar)]
extern crate syntax;
extern crate syntax_pos;
-mod qquote;
-use qquote::qquote;
+mod quote;
+use quote::quote;
use rustc_plugin::Registry;
use syntax::ext::base::SyntaxExtension;
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
- reg.register_syntax_extension(Symbol::intern("qquote"),
- SyntaxExtension::ProcMacro(Box::new(qquote)));
+ reg.register_syntax_extension(Symbol::intern("quote"),
+ SyntaxExtension::ProcMacro(Box::new(quote)));
}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! # Quasiquoter
-//! This file contains the implementation internals of the quasiquoter provided by `qquote!`.
-
-use syntax::ast::Ident;
-use syntax::parse::token::{self, Token, Lit};
-use syntax::symbol::Symbol;
-use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream};
-use syntax_pos::DUMMY_SP;
-
-use std::iter;
-
-pub fn qquote<'cx>(stream: TokenStream) -> TokenStream {
- stream.quote()
-}
-
-trait Quote {
- fn quote(&self) -> TokenStream;
-}
-
-macro_rules! quote_tok {
- (,) => { Token::Comma };
- (.) => { Token::Dot };
- (:) => { Token::Colon };
- (::) => { Token::ModSep };
- (!) => { Token::Not };
- (<) => { Token::Lt };
- (>) => { Token::Gt };
- (_) => { Token::Underscore };
- ($i:ident) => { Token::Ident(Ident::from_str(stringify!($i))) };
-}
-
-macro_rules! quote_tree {
- ((unquote $($t:tt)*)) => { $($t)* };
- ((quote $($t:tt)*)) => { ($($t)*).quote() };
- (($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) };
- ([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) };
- ({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) };
- ($t:tt) => { TokenStream::from(TokenTree::Token(DUMMY_SP, quote_tok!($t))) };
-}
-
-fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
- TokenTree::Delimited(DUMMY_SP, Delimited { delim: delim, tts: stream.into() }).into()
-}
-
-macro_rules! quote {
- () => { TokenStream::empty() };
- ($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::<TokenStream>() };
-}
-
-impl<T: Quote> Quote for Option<T> {
- fn quote(&self) -> TokenStream {
- match *self {
- Some(ref t) => quote!(::std::option::Option::Some((quote t))),
- None => quote!(::std::option::Option::None),
- }
- }
-}
-
-impl Quote for TokenStream {
- fn quote(&self) -> TokenStream {
- if self.is_empty() {
- return quote!(::syntax::tokenstream::TokenStream::empty());
- }
-
- struct Quote(iter::Peekable<tokenstream::Cursor>);
-
- impl Iterator for Quote {
- type Item = TokenStream;
-
- fn next(&mut self) -> Option<TokenStream> {
- let is_unquote = match self.0.peek() {
- Some(&TokenTree::Token(_, Token::Ident(ident))) if ident.name == "unquote" => {
- self.0.next();
- true
- }
- _ => false,
- };
-
- self.0.next().map(|tree| {
- let quoted_tree = if is_unquote { tree.into() } else { tree.quote() };
- quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),)
- })
- }
- }
-
- let quoted = Quote(self.trees().peekable()).collect::<TokenStream>();
- quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>())
- }
-}
-
-impl Quote for TokenTree {
- fn quote(&self) -> TokenStream {
- match *self {
- TokenTree::Token(_, ref token) => quote! {
- ::syntax::tokenstream::TokenTree::Token(::syntax::ext::quote::rt::DUMMY_SP,
- (quote token))
- },
- TokenTree::Delimited(_, ref delimited) => quote! {
- ::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP,
- (quote delimited))
- },
- }
- }
-}
-
-impl Quote for Delimited {
- fn quote(&self) -> TokenStream {
- quote!(::syntax::tokenstream::Delimited {
- delim: (quote self.delim),
- tts: (quote self.stream()).into(),
- })
- }
-}
-
-impl<'a> Quote for &'a str {
- fn quote(&self) -> TokenStream {
- TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Str_(Symbol::intern(self)), None))
- .into()
- }
-}
-
-impl Quote for Ident {
- fn quote(&self) -> TokenStream {
- // FIXME(jseyfried) quote hygiene
- quote!(::syntax::ast::Ident::from_str((quote &*self.name.as_str())))
- }
-}
-
-impl Quote for Symbol {
- fn quote(&self) -> TokenStream {
- quote!(::syntax::symbol::Symbol::intern((quote &*self.as_str())))
- }
-}
-
-impl Quote for Token {
- fn quote(&self) -> TokenStream {
- macro_rules! gen_match {
- ($($i:ident),*; $($t:tt)*) => {
- match *self {
- $( Token::$i => quote!(::syntax::parse::token::$i), )*
- $( $t )*
- }
- }
- }
-
- gen_match! {
- Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot,
- Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question,
- Underscore;
-
- Token::OpenDelim(delim) => quote!(::syntax::parse::token::OpenDelim((quote delim))),
- Token::CloseDelim(delim) => quote!(::syntax::parse::token::CloseDelim((quote delim))),
- Token::BinOp(tok) => quote!(::syntax::parse::token::BinOp((quote tok))),
- Token::BinOpEq(tok) => quote!(::syntax::parse::token::BinOpEq((quote tok))),
- Token::Ident(ident) => quote!(::syntax::parse::token::Ident((quote ident))),
- Token::Lifetime(ident) => quote!(::syntax::parse::token::Lifetime((quote ident))),
- Token::Literal(lit, sfx) => quote! {
- ::syntax::parse::token::Literal((quote lit), (quote sfx))
- },
- _ => panic!("Unhandled case!"),
- }
- }
-}
-
-impl Quote for token::BinOpToken {
- fn quote(&self) -> TokenStream {
- macro_rules! gen_match {
- ($($i:ident),*) => {
- match *self {
- $( token::BinOpToken::$i => quote!(::syntax::parse::token::BinOpToken::$i), )*
- }
- }
- }
-
- gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr)
- }
-}
-
-impl Quote for Lit {
- fn quote(&self) -> TokenStream {
- macro_rules! gen_match {
- ($($i:ident),*) => {
- match *self {
- $( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )*
- _ => panic!("Unsupported literal"),
- }
- }
- }
-
- gen_match!(Byte, Char, Float, Str_, Integer, ByteStr)
- }
-}
-
-impl Quote for token::DelimToken {
- fn quote(&self) -> TokenStream {
- macro_rules! gen_match {
- ($($i:ident),*) => {
- match *self {
- $(token::DelimToken::$i => { quote!(::syntax::parse::token::DelimToken::$i) })*
- }
- }
- }
-
- gen_match!(Paren, Bracket, Brace, NoDelim)
- }
-}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! # Quasiquoter
+//! This file contains the implementation internals of the quasiquoter provided by `qquote!`.
+
+use syntax::ast::Ident;
+use syntax::parse::token::{self, Token, Lit};
+use syntax::symbol::Symbol;
+use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream};
+use syntax_pos::DUMMY_SP;
+
+use std::iter;
+
+pub fn quote<'cx>(stream: TokenStream) -> TokenStream {
+ stream.quote()
+}
+
+trait Quote {
+ fn quote(&self) -> TokenStream;
+}
+
+macro_rules! quote_tok {
+ (,) => { Token::Comma };
+ (.) => { Token::Dot };
+ (:) => { Token::Colon };
+ (::) => { Token::ModSep };
+ (!) => { Token::Not };
+ (<) => { Token::Lt };
+ (>) => { Token::Gt };
+ (_) => { Token::Underscore };
+ ($i:ident) => { Token::Ident(Ident::from_str(stringify!($i))) };
+}
+
+macro_rules! quote_tree {
+ ((unquote $($t:tt)*)) => { $($t)* };
+ ((quote $($t:tt)*)) => { ($($t)*).quote() };
+ (($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) };
+ ([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) };
+ ({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) };
+ ($t:tt) => { TokenStream::from(TokenTree::Token(DUMMY_SP, quote_tok!($t))) };
+}
+
+fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
+ TokenTree::Delimited(DUMMY_SP, Delimited { delim: delim, tts: stream.into() }).into()
+}
+
+macro_rules! quote {
+ () => { TokenStream::empty() };
+ ($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::<TokenStream>() };
+}
+
+impl<T: Quote> Quote for Option<T> {
+ fn quote(&self) -> TokenStream {
+ match *self {
+ Some(ref t) => quote!(::std::option::Option::Some((quote t))),
+ None => quote!(::std::option::Option::None),
+ }
+ }
+}
+
+impl Quote for TokenStream {
+ fn quote(&self) -> TokenStream {
+ if self.is_empty() {
+ return quote!(::syntax::tokenstream::TokenStream::empty());
+ }
+
+ struct Quoter(iter::Peekable<tokenstream::Cursor>);
+
+ impl Iterator for Quoter {
+ type Item = TokenStream;
+
+ fn next(&mut self) -> Option<TokenStream> {
+ let quoted_tree = if let Some(&TokenTree::Token(_, Token::Dollar)) = self.0.peek() {
+ self.0.next();
+ match self.0.next() {
+ Some(tree @ TokenTree::Token(_, Token::Ident(..))) => Some(tree.into()),
+ Some(tree @ TokenTree::Token(_, Token::Dollar)) => Some(tree.quote()),
+ // FIXME(jseyfried): improve these diagnostics
+ Some(..) => panic!("`$` must be followed by an ident or `$` in `quote!`"),
+ None => panic!("unexpected trailing `$` in `quote!`"),
+ }
+ } else {
+ self.0.next().as_ref().map(Quote::quote)
+ };
+
+ quoted_tree.map(|quoted_tree| {
+ quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),)
+ })
+ }
+ }
+
+ let quoted = Quoter(self.trees().peekable()).collect::<TokenStream>();
+ quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>())
+ }
+}
+
+impl Quote for TokenTree {
+ fn quote(&self) -> TokenStream {
+ match *self {
+ TokenTree::Token(_, ref token) => quote! {
+ ::syntax::tokenstream::TokenTree::Token(::syntax::ext::quote::rt::DUMMY_SP,
+ (quote token))
+ },
+ TokenTree::Delimited(_, ref delimited) => quote! {
+ ::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP,
+ (quote delimited))
+ },
+ }
+ }
+}
+
+impl Quote for Delimited {
+ fn quote(&self) -> TokenStream {
+ quote!(::syntax::tokenstream::Delimited {
+ delim: (quote self.delim),
+ tts: (quote self.stream()).into(),
+ })
+ }
+}
+
+impl<'a> Quote for &'a str {
+ fn quote(&self) -> TokenStream {
+ TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Str_(Symbol::intern(self)), None))
+ .into()
+ }
+}
+
+impl Quote for Ident {
+ fn quote(&self) -> TokenStream {
+ // FIXME(jseyfried) quote hygiene
+ quote!(::syntax::ast::Ident::from_str((quote &*self.name.as_str())))
+ }
+}
+
+impl Quote for Symbol {
+ fn quote(&self) -> TokenStream {
+ quote!(::syntax::symbol::Symbol::intern((quote &*self.as_str())))
+ }
+}
+
+impl Quote for Token {
+ fn quote(&self) -> TokenStream {
+ macro_rules! gen_match {
+ ($($i:ident),*; $($t:tt)*) => {
+ match *self {
+ $( Token::$i => quote!(::syntax::parse::token::$i), )*
+ $( $t )*
+ }
+ }
+ }
+
+ gen_match! {
+ Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot,
+ Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question,
+ Underscore;
+
+ Token::OpenDelim(delim) => quote!(::syntax::parse::token::OpenDelim((quote delim))),
+ Token::CloseDelim(delim) => quote!(::syntax::parse::token::CloseDelim((quote delim))),
+ Token::BinOp(tok) => quote!(::syntax::parse::token::BinOp((quote tok))),
+ Token::BinOpEq(tok) => quote!(::syntax::parse::token::BinOpEq((quote tok))),
+ Token::Ident(ident) => quote!(::syntax::parse::token::Ident((quote ident))),
+ Token::Lifetime(ident) => quote!(::syntax::parse::token::Lifetime((quote ident))),
+ Token::Literal(lit, sfx) => quote! {
+ ::syntax::parse::token::Literal((quote lit), (quote sfx))
+ },
+ _ => panic!("Unhandled case!"),
+ }
+ }
+}
+
+impl Quote for token::BinOpToken {
+ fn quote(&self) -> TokenStream {
+ macro_rules! gen_match {
+ ($($i:ident),*) => {
+ match *self {
+ $( token::BinOpToken::$i => quote!(::syntax::parse::token::BinOpToken::$i), )*
+ }
+ }
+ }
+
+ gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr)
+ }
+}
+
+impl Quote for Lit {
+ fn quote(&self) -> TokenStream {
+ macro_rules! gen_match {
+ ($($i:ident),*) => {
+ match *self {
+ $( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )*
+ _ => panic!("Unsupported literal"),
+ }
+ }
+ }
+
+ gen_match!(Byte, Char, Float, Str_, Integer, ByteStr)
+ }
+}
+
+impl Quote for token::DelimToken {
+ fn quote(&self) -> TokenStream {
+ macro_rules! gen_match {
+ ($($i:ident),*) => {
+ match *self {
+ $(token::DelimToken::$i => { quote!(::syntax::parse::token::DelimToken::$i) })*
+ }
+ }
+ }
+
+ gen_match!(Paren, Bracket, Brace, NoDelim)
+ }
+}
// things read/modify that MIR.
MirKrate,
Mir(D),
+ MirShim(Vec<D>),
BorrowCheckKrate,
BorrowCheck(D),
IntrinsicCheck(ref d) => op(d).map(IntrinsicCheck),
MatchCheck(ref d) => op(d).map(MatchCheck),
Mir(ref d) => op(d).map(Mir),
+ MirShim(ref def_ids) => {
+ let def_ids: Option<Vec<E>> = def_ids.iter().map(op).collect();
+ def_ids.map(MirShim)
+ }
BorrowCheck(ref d) => op(d).map(BorrowCheck),
RvalueCheck(ref d) => op(d).map(RvalueCheck),
StabilityCheck(ref d) => op(d).map(StabilityCheck),
}
fn check_attribute(&self, attr: &ast::Attribute, target: Target) {
- let name: &str = &attr.name().as_str();
- match name {
- "inline" => self.check_inline(attr, target),
- "repr" => self.check_repr(attr, target),
- _ => (),
+ if let Some(name) = attr.name() {
+ match &*name.as_str() {
+ "inline" => self.check_inline(attr, target),
+ "repr" => self.check_repr(attr, target),
+ _ => (),
+ }
}
}
}
let attrs = self.lower_attrs(&i.attrs);
let mut vis = self.lower_visibility(&i.vis);
if let ItemKind::MacroDef(ref tts) = i.node {
- if i.attrs.iter().any(|attr| attr.name() == "macro_export") {
+ if i.attrs.iter().any(|attr| attr.path == "macro_export") {
self.exported_macros.push(hir::MacroDef {
name: name, attrs: attrs, id: i.id, span: i.span, body: tts.clone().into(),
});
pub fn gather_attr(attr: &ast::Attribute) -> Vec<Result<(ast::Name, Level, Span), Span>> {
let mut out = vec![];
- let level = match Level::from_str(&attr.name().as_str()) {
+ let level = match attr.name().and_then(|name| Level::from_str(&name.as_str())) {
None => return out,
Some(lvl) => lvl,
};
+ let meta = unwrap_or!(attr.meta(), return out);
attr::mark_used(attr);
- let meta = &attr.value;
let metas = if let Some(metas) = meta.meta_item_list() {
metas
} else {
ExchangeMallocFnLangItem, "exchange_malloc", exchange_malloc_fn;
BoxFreeFnLangItem, "box_free", box_free_fn;
- StrDupUniqFnLangItem, "strdup_uniq", strdup_uniq_fn;
+ DropInPlaceFnLangItem, "drop_in_place", drop_in_place_fn;
StartFnLangItem, "start", start_fn;
ContravariantLifetimeItem, "contravariant_lifetime", contravariant_lifetime;
InvariantLifetimeItem, "invariant_lifetime", invariant_lifetime;
- NoCopyItem, "no_copy_bound", no_copy_bound;
-
NonZeroItem, "non_zero", non_zero;
DebugTraitLangItem, "debug_trait", debug_trait;
pub type cmt<'tcx> = Rc<cmt_<'tcx>>;
impl<'tcx> cmt_<'tcx> {
+ pub fn get_def(&self) -> Option<ast::NodeId> {
+ match self.cat {
+ Categorization::Deref(ref cmt, ..) |
+ Categorization::Interior(ref cmt, _) |
+ Categorization::Downcast(ref cmt, _) => {
+ if let Categorization::Local(nid) = cmt.cat {
+ Some(nid)
+ } else {
+ None
+ }
+ }
+ _ => None
+ }
+ }
+
pub fn get_field(&self, name: ast::Name) -> Option<DefId> {
match self.cat {
Categorization::Deref(ref cmt, ..) |
let promotable = self.tcx().rvalue_promotable_to_static.borrow().get(&id).cloned()
.unwrap_or(false);
- // Only promote `[T; 0]` before an RFC for rvalue promotions
- // is accepted.
+ // When the corresponding feature isn't toggled, only promote `[T; 0]`.
let promotable = match expr_ty.sty {
ty::TyArray(_, 0) => true,
- _ => promotable & false
+ _ => promotable && self.tcx().sess.features.borrow().rvalue_static_promotion,
};
// Compute maximum lifetime of this rvalue. This is 'static if
} else {
// Emit errors for non-staged-api crates.
for attr in attrs {
- let tag = attr.name();
+ let tag = unwrap_or!(attr.name(), continue);
if tag == "unstable" || tag == "stable" || tag == "rustc_deprecated" {
attr::mark_used(attr);
self.tcx.sess.span_err(attr.span(), "stability attributes may not be used \
let mut is_staged_api = false;
for attr in &krate.attrs {
- if attr.name() == "stable" || attr.name() == "unstable" {
+ if attr.path == "stable" || attr.path == "unstable" {
is_staged_api = true;
break
}
use rustc_data_structures::control_flow_graph::ControlFlowGraph;
use hir::def::CtorKind;
use hir::def_id::DefId;
-use ty::subst::Substs;
+use ty::subst::{Subst, Substs};
use ty::{self, AdtDef, ClosureSubsts, Region, Ty};
use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
use util::ppaux;
}
}
+impl<'tcx> Operand<'tcx> {
+ pub fn item<'a>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId,
+ substs: &'tcx Substs<'tcx>,
+ span: Span)
+ -> Self
+ {
+ Operand::Constant(Constant {
+ span: span,
+ ty: tcx.item_type(def_id).subst(tcx, substs),
+ literal: Literal::Item { def_id, substs }
+ })
+ }
+
+}
+
///////////////////////////////////////////////////////////////////////////
/// Rvalues
.filter(|a| a.check_name("rustc_on_unimplemented"))
.next()
{
- let err_sp = item.meta().span.substitute_dummy(span);
+ let err_sp = item.span.substitute_dummy(span);
let trait_str = self.tcx.item_path_str(trait_ref.def_id);
if let Some(istring) = item.value_str() {
let istring = &*istring.as_str();
pub use self::select::{MethodMatchResult, MethodMatched, MethodAmbiguous, MethodDidNotMatch};
pub use self::select::{MethodMatchedData}; // intentionally don't export variants
pub use self::specialize::{OverlapError, specialization_graph, specializes, translate_substs};
-pub use self::specialize::{SpecializesCache, find_method};
+pub use self::specialize::{SpecializesCache, find_associated_item};
pub use self::util::elaborate_predicates;
pub use self::util::supertraits;
pub use self::util::Supertraits;
use ty::{self, TyCtxt, TypeFoldable};
use syntax_pos::DUMMY_SP;
-use syntax::ast;
-
pub mod specialization_graph;
/// Information pertinent to an overlapping impl error.
}
/// Given a selected impl described by `impl_data`, returns the
-/// definition and substitions for the method with the name `name`,
-/// and trait method substitutions `substs`, in that impl, a less
-/// specialized impl, or the trait default, whichever applies.
-pub fn find_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- name: ast::Name,
- substs: &'tcx Substs<'tcx>,
- impl_data: &super::VtableImplData<'tcx, ()>)
- -> (DefId, &'tcx Substs<'tcx>)
-{
+/// definition and substitions for the method with the name `name`
+/// the kind `kind`, and trait method substitutions `substs`, in
+/// that impl, a less specialized impl, or the trait default,
+/// whichever applies.
+pub fn find_associated_item<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ item: &ty::AssociatedItem,
+ substs: &'tcx Substs<'tcx>,
+ impl_data: &super::VtableImplData<'tcx, ()>,
+) -> (DefId, &'tcx Substs<'tcx>) {
assert!(!substs.needs_infer());
let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap();
let trait_def = tcx.lookup_trait_def(trait_def_id);
let ancestors = trait_def.ancestors(impl_data.impl_def_id);
- match ancestors.defs(tcx, name, ty::AssociatedKind::Method).next() {
+ match ancestors.defs(tcx, item.name, item.kind).next() {
Some(node_item) => {
let substs = tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
let substs = substs.rebase_onto(tcx, trait_def_id, impl_data.substs);
(node_item.item.def_id, substs)
}
None => {
- bug!("method {:?} not found in {:?}", name, impl_data.impl_def_id)
+ bug!("{:?} not found in {:?}", item, impl_data.impl_def_id)
}
}
}
}
}
+impl<'a, T, R> InternIteratorElement<T, R> for &'a T
+ where T: Clone + 'a
+{
+ type Output = R;
+ fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
+ f(&iter.cloned().collect::<AccumulateVec<[_; 8]>>())
+ }
+}
+
impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
type Output = Result<R, E>;
fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use dep_graph::DepNode;
+use hir::def_id::DefId;
+use ty::{self, Ty, TypeFoldable, Substs};
+use util::ppaux;
+
+use std::borrow::Cow;
+use std::fmt;
+use syntax::ast;
+
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+pub struct Instance<'tcx> {
+ pub def: InstanceDef<'tcx>,
+ pub substs: &'tcx Substs<'tcx>,
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+pub enum InstanceDef<'tcx> {
+ Item(DefId),
+ Intrinsic(DefId),
+ // <fn() as FnTrait>::call_*
+ // def-id is FnTrait::call_*
+ FnPtrShim(DefId, Ty<'tcx>),
+ // <Trait as Trait>::fn
+ Virtual(DefId, usize),
+ // <[mut closure] as FnOnce>::call_once
+ ClosureOnceShim { call_once: DefId },
+ // drop_in_place::<T>; None for empty drop glue.
+ DropGlue(DefId, Option<Ty<'tcx>>),
+}
+
+impl<'tcx> InstanceDef<'tcx> {
+ #[inline]
+ pub fn def_id(&self) -> DefId {
+ match *self {
+ InstanceDef::Item(def_id) |
+ InstanceDef::FnPtrShim(def_id, _) |
+ InstanceDef::Virtual(def_id, _) |
+ InstanceDef::Intrinsic(def_id, ) |
+ InstanceDef::ClosureOnceShim { call_once: def_id }
+ => def_id,
+ InstanceDef::DropGlue(def_id, _) => def_id
+ }
+ }
+
+ #[inline]
+ pub fn def_ty<'a>(&self, tcx: ty::TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> {
+ tcx.item_type(self.def_id())
+ }
+
+ #[inline]
+ pub fn attrs<'a>(&self, tcx: ty::TyCtxt<'a, 'tcx, 'tcx>) -> Cow<'tcx, [ast::Attribute]> {
+ tcx.get_attrs(self.def_id())
+ }
+
+ pub(crate) fn dep_node(&self) -> DepNode<DefId> {
+ // HACK: def-id binning, project-style; someone replace this with
+ // real on-demand.
+ let ty = match self {
+ &InstanceDef::FnPtrShim(_, ty) => Some(ty),
+ &InstanceDef::DropGlue(_, ty) => ty,
+ _ => None
+ }.into_iter();
+
+ DepNode::MirShim(
+ Some(self.def_id()).into_iter().chain(
+ ty.flat_map(|t| t.walk()).flat_map(|t| match t.sty {
+ ty::TyAdt(adt_def, _) => Some(adt_def.did),
+ ty::TyProjection(ref proj) => Some(proj.trait_ref.def_id),
+ _ => None,
+ })
+ ).collect()
+ )
+ }
+}
+
+impl<'tcx> fmt::Display for Instance<'tcx> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ ppaux::parameterized(f, self.substs, self.def_id(), &[])?;
+ match self.def {
+ InstanceDef::Item(_) => Ok(()),
+ InstanceDef::Intrinsic(_) => {
+ write!(f, " - intrinsic")
+ }
+ InstanceDef::Virtual(_, num) => {
+ write!(f, " - shim(#{})", num)
+ }
+ InstanceDef::FnPtrShim(_, ty) => {
+ write!(f, " - shim({:?})", ty)
+ }
+ InstanceDef::ClosureOnceShim { .. } => {
+ write!(f, " - shim")
+ }
+ InstanceDef::DropGlue(_, ty) => {
+ write!(f, " - shim({:?})", ty)
+ }
+ }
+ }
+}
+
+impl<'a, 'b, 'tcx> Instance<'tcx> {
+ pub fn new(def_id: DefId, substs: &'tcx Substs<'tcx>)
+ -> Instance<'tcx> {
+ assert!(substs.is_normalized_for_trans() && !substs.has_escaping_regions(),
+ "substs of instance {:?} not normalized for trans: {:?}",
+ def_id, substs);
+ Instance { def: InstanceDef::Item(def_id), substs: substs }
+ }
+
+ pub fn mono(tcx: ty::TyCtxt<'a, 'tcx, 'b>, def_id: DefId) -> Instance<'tcx> {
+ Instance::new(def_id, tcx.global_tcx().empty_substs_for_def_id(def_id))
+ }
+
+ #[inline]
+ pub fn def_id(&self) -> DefId {
+ self.def.def_id()
+ }
+}
// except according to those terms.
use dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig};
-use hir::def_id::{CrateNum, DefId};
+use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use middle::const_val::ConstVal;
use mir;
use ty::{self, Ty, TyCtxt};
fn default_span(&self, tcx: TyCtxt) -> Span;
}
+impl<'tcx> Key for ty::InstanceDef<'tcx> {
+ fn map_crate(&self) -> CrateNum {
+ LOCAL_CRATE
+ }
+
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ tcx.def_span(self.def_id())
+ }
+}
+
impl Key for CrateNum {
fn map_crate(&self) -> CrateNum {
*self
}
}
-pub struct CycleError<'a> {
+pub struct CycleError<'a, 'tcx: 'a> {
span: Span,
- cycle: RefMut<'a, [(Span, Query)]>,
+ cycle: RefMut<'a, [(Span, Query<'tcx>)]>,
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
err.emit();
}
- fn cycle_check<F, R>(self, span: Span, query: Query, compute: F)
- -> Result<R, CycleError<'a>>
+ fn cycle_check<F, R>(self, span: Span, query: Query<'gcx>, compute: F)
+ -> Result<R, CycleError<'a, 'gcx>>
where F: FnOnce() -> R
{
{
}
}
+impl<'tcx> QueryDescription for queries::mir_shims<'tcx> {
+ fn describe(tcx: TyCtxt, def: ty::InstanceDef<'tcx>) -> String {
+ format!("generating MIR shim for `{}`",
+ tcx.item_path_str(def.def_id()))
+ }
+}
+
macro_rules! define_maps {
(<$tcx:tt>
$($(#[$attr:meta])*
pub $name:ident: $node:ident($K:ty) -> $V:ty),*) => {
pub struct Maps<$tcx> {
providers: IndexVec<CrateNum, Providers<$tcx>>,
- query_stack: RefCell<Vec<(Span, Query)>>,
+ query_stack: RefCell<Vec<(Span, Query<$tcx>)>>,
$($(#[$attr])* pub $name: RefCell<DepTrackingMap<queries::$name<$tcx>>>),*
}
#[allow(bad_style)]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
- pub enum Query {
+ pub enum Query<$tcx> {
$($(#[$attr])* $name($K)),*
}
- impl Query {
+ impl<$tcx> Query<$tcx> {
pub fn describe(&self, tcx: TyCtxt) -> String {
match *self {
$(Query::$name(key) => queries::$name::describe(tcx, key)),*
mut span: Span,
key: $K,
f: F)
- -> Result<R, CycleError<'a>>
+ -> Result<R, CycleError<'a, $tcx>>
where F: FnOnce(&$V) -> R
{
if let Some(result) = tcx.maps.$name.borrow().get(&key) {
}
pub fn try_get(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K)
- -> Result<$V, CycleError<'a>> {
+ -> Result<$V, CycleError<'a, $tcx>> {
Self::try_get_with(tcx, span, key, Clone::clone)
}
/// Results of evaluating monomorphic constants embedded in
/// other items, such as enum variant explicit discriminants.
- pub monomorphic_const_eval: MonomorphicConstEval(DefId) -> Result<ConstVal<'tcx>, ()>
+ pub monomorphic_const_eval: MonomorphicConstEval(DefId) -> Result<ConstVal<'tcx>, ()>,
+
+ pub mir_shims: mir_shim(ty::InstanceDef<'tcx>) -> &'tcx RefCell<mir::Mir<'tcx>>
}
fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode<DefId> {
fn coherent_inherent_impls_dep_node(_: CrateNum) -> DepNode<DefId> {
DepNode::Coherence
}
+
+fn mir_shim(instance: ty::InstanceDef) -> DepNode<DefId> {
+ instance.dep_node()
+}
pub use self::context::{TyCtxt, GlobalArenas, tls};
pub use self::context::{Lift, TypeckTables};
+pub use self::instance::{Instance, InstanceDef};
+
pub use self::trait_def::{TraitDef, TraitFlags};
pub use self::maps::queries;
mod contents;
mod context;
mod flags;
+mod instance;
mod structural_impls;
mod sty;
def_id,
ROOT_CODE_EXTENT)
}
- _ => {
+ Some(hir_map::NodeStructCtor(..)) |
+ Some(hir_map::NodeVariant(..)) => {
+ let def_id = tcx.hir.local_def_id(id);
+ tcx.construct_parameter_environment(tcx.hir.span(id),
+ def_id,
+ ROOT_CODE_EXTENT)
+ }
+ it => {
bug!("ParameterEnvironment::from_item(): \
- `{}` is not an item",
- tcx.hir.node_to_string(id))
+ `{}` = {:?} is unsupported",
+ tcx.hir.node_to_string(id), it)
}
}
}
queries::mir::get(self, DUMMY_SP, did).borrow()
}
+ /// Return the possibly-auto-generated MIR of a (DefId, Subst) pair.
+ pub fn instance_mir(self, instance: ty::InstanceDef<'gcx>)
+ -> Ref<'gcx, Mir<'gcx>>
+ {
+ match instance {
+ ty::InstanceDef::Item(did) if true => self.item_mir(did),
+ _ => queries::mir_shims::get(self, DUMMY_SP, instance).borrow(),
+ }
+ }
+
/// Given the DefId of an item, returns its MIR, borrowed immutably.
/// Returns None if there is no MIR for the DefId
pub fn maybe_item_mir(self, did: DefId) -> Option<Ref<'gcx, Mir<'gcx>>> {
}
def_id
}
+
+ /// Given the def-id of some item that has no type parameters, make
+ /// a suitable "empty substs" for it.
+ pub fn empty_substs_for_def_id(self, item_def_id: DefId) -> &'tcx ty::Substs<'tcx> {
+ ty::Substs::for_item(self, item_def_id,
+ |_, _| self.mk_region(ty::ReErased),
+ |_, _| {
+ bug!("empty_substs_for_def_id: {:?} has type parameters", item_def_id)
+ })
+ }
}
pub struct TypeIdHasher<'a, 'gcx: 'a+'tcx, 'tcx: 'a, W> {
}
}
}
-
-// Like std::fs::create_dir_all, except handles concurrent calls among multiple
-// threads or processes.
-pub fn create_dir_racy(path: &Path) -> io::Result<()> {
- match fs::create_dir(path) {
- Ok(()) => return Ok(()),
- Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => return Ok(()),
- Err(ref e) if e.kind() == io::ErrorKind::NotFound => (),
- Err(e) => return Err(e),
- }
- match path.parent() {
- Some(p) => try!(create_dir_racy(p)),
- None => return Err(io::Error::new(io::ErrorKind::Other, "failed to create whole tree")),
- }
- match fs::create_dir(path) {
- Ok(()) => Ok(()),
- Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(()),
- Err(e) => Err(e),
- }
-}
use rustc_data_structures::bitslice::bits_to_string;
use rustc_data_structures::indexed_set::{IdxSet};
use rustc_data_structures::indexed_vec::Idx;
+use rustc_mir::util as mir_util;
use dot;
use dot::IntoCow;
}
Ok(())
}
- ::rustc_mir::graphviz::write_node_label(
+ mir_util::write_graphviz_node_label(
*n, self.mbcx.mir(), &mut v, 4,
|w| {
let flow = self.mbcx.flow_state();
use rustc_data_structures::bitslice::{BitwiseOperator};
use rustc_data_structures::indexed_set::{IdxSet};
use rustc_data_structures::indexed_vec::Idx;
+use rustc_mir::util::elaborate_drops::DropFlagState;
use super::super::gather_moves::{HasMoveData, MoveData, MoveOutIndex, MovePathIndex};
use super::super::MoveDataParamEnv;
-use super::super::DropFlagState;
use super::super::drop_flag_effects_for_function_entry;
use super::super::drop_flag_effects_for_location;
use super::super::on_lookup_result_bits;
use super::dataflow::{DataflowResults};
use super::{drop_flag_effects_for_location, on_all_children_bits};
use super::on_lookup_result_bits;
-use super::{DropFlagState, MoveDataParamEnv};
-use super::patch::MirPatch;
-use rustc::ty::{self, Ty, TyCtxt};
-use rustc::ty::subst::{Kind, Subst, Substs};
-use rustc::ty::util::IntTypeExt;
+use super::MoveDataParamEnv;
+use rustc::ty::{self, TyCtxt};
use rustc::mir::*;
use rustc::mir::transform::{Pass, MirPass, MirSource};
use rustc::middle::const_val::ConstVal;
-use rustc::middle::lang_items;
use rustc::util::nodemap::FxHashMap;
use rustc_data_structures::indexed_set::IdxSetBuf;
use rustc_data_structures::indexed_vec::Idx;
+use rustc_mir::util::patch::MirPatch;
+use rustc_mir::util::elaborate_drops::{DropFlagState, elaborate_drop};
+use rustc_mir::util::elaborate_drops::{DropElaborator, DropStyle, DropFlagMode};
use syntax_pos::Span;
use std::fmt;
-use std::iter;
use std::u32;
pub struct ElaborateDrops;
}
}
-impl fmt::Debug for InitializationData {
+struct Elaborator<'a, 'b: 'a, 'tcx: 'b> {
+ init_data: &'a InitializationData,
+ ctxt: &'a mut ElaborateDropsCtxt<'b, 'tcx>,
+}
+
+impl<'a, 'b, 'tcx> fmt::Debug for Elaborator<'a, 'b, 'tcx> {
fn fmt(&self, _f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
Ok(())
}
}
+impl<'a, 'b, 'tcx> DropElaborator<'a, 'tcx> for Elaborator<'a, 'b, 'tcx> {
+ type Path = MovePathIndex;
+
+ fn patch(&mut self) -> &mut MirPatch<'tcx> {
+ &mut self.ctxt.patch
+ }
+
+ fn mir(&self) -> &'a Mir<'tcx> {
+ self.ctxt.mir
+ }
+
+ fn tcx(&self) -> ty::TyCtxt<'a, 'tcx, 'tcx> {
+ self.ctxt.tcx
+ }
+
+ fn param_env(&self) -> &'a ty::ParameterEnvironment<'tcx> {
+ self.ctxt.param_env()
+ }
+
+ fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
+ let ((maybe_live, maybe_dead), multipart) = match mode {
+ DropFlagMode::Shallow => (self.init_data.state(path), false),
+ DropFlagMode::Deep => {
+ let mut some_live = false;
+ let mut some_dead = false;
+ let mut children_count = 0;
+ on_all_children_bits(
+ self.tcx(), self.mir(), self.ctxt.move_data(),
+ path, |child| {
+ if self.ctxt.path_needs_drop(child) {
+ let (live, dead) = self.init_data.state(child);
+ debug!("elaborate_drop: state({:?}) = {:?}",
+ child, (live, dead));
+ some_live |= live;
+ some_dead |= dead;
+ children_count += 1;
+ }
+ });
+ ((some_live, some_dead), children_count != 1)
+ }
+ };
+ match (maybe_live, maybe_dead, multipart) {
+ (false, _, _) => DropStyle::Dead,
+ (true, false, _) => DropStyle::Static,
+ (true, true, false) => DropStyle::Conditional,
+ (true, true, true) => DropStyle::Open,
+ }
+ }
+
+ fn clear_drop_flag(&mut self, loc: Location, path: Self::Path, mode: DropFlagMode) {
+ match mode {
+ DropFlagMode::Shallow => {
+ self.ctxt.set_drop_flag(loc, path, DropFlagState::Absent);
+ }
+ DropFlagMode::Deep => {
+ on_all_children_bits(
+ self.tcx(), self.mir(), self.ctxt.move_data(), path,
+ |child| self.ctxt.set_drop_flag(loc, child, DropFlagState::Absent)
+ );
+ }
+ }
+ }
+
+ fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path> {
+ super::move_path_children_matching(self.ctxt.move_data(), path, |p| {
+ match p {
+ &Projection {
+ elem: ProjectionElem::Field(idx, _), ..
+ } => idx == field,
+ _ => false
+ }
+ })
+ }
+
+ fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path> {
+ super::move_path_children_matching(self.ctxt.move_data(), path, |p| {
+ match p {
+ &Projection { elem: ProjectionElem::Deref, .. } => true,
+ _ => false
+ }
+ })
+ }
+
+ fn downcast_subpath(&self, path: Self::Path, variant: usize) -> Option<Self::Path> {
+ super::move_path_children_matching(self.ctxt.move_data(), path, |p| {
+ match p {
+ &Projection {
+ elem: ProjectionElem::Downcast(_, idx), ..
+ } => idx == variant,
+ _ => false
+ }
+ })
+ }
+
+ fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>> {
+ self.ctxt.drop_flag(path).map(Operand::Consume)
+ }
+}
+
struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &'a Mir<'tcx>,
patch: MirPatch<'tcx>,
}
-#[derive(Copy, Clone, Debug)]
-struct DropCtxt<'a, 'tcx: 'a> {
- source_info: SourceInfo,
- is_cleanup: bool,
-
- init_data: &'a InitializationData,
-
- lvalue: &'a Lvalue<'tcx>,
- path: MovePathIndex,
- succ: BasicBlock,
- unwind: Option<BasicBlock>
-}
-
impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
fn move_data(&self) -> &'b MoveData<'tcx> { &self.env.move_data }
fn param_env(&self) -> &'b ty::ParameterEnvironment<'tcx> {
let init_data = self.initialization_data_at(loc);
match self.move_data().rev_lookup.find(location) {
LookupResult::Exact(path) => {
- self.elaborate_drop(&DropCtxt {
- source_info: terminator.source_info,
- is_cleanup: data.is_cleanup,
- init_data: &init_data,
- lvalue: location,
- path: path,
- succ: target,
- unwind: if data.is_cleanup {
+ elaborate_drop(
+ &mut Elaborator {
+ init_data: &init_data,
+ ctxt: self
+ },
+ terminator.source_info,
+ data.is_cleanup,
+ location,
+ path,
+ target,
+ if data.is_cleanup {
None
} else {
Some(Option::unwrap_or(unwind, resume_block))
- }
- }, bb);
+ },
+ bb)
}
LookupResult::Parent(..) => {
span_bug!(terminator.source_info.span,
debug!("elaborate_drop_and_replace({:?}) - tracked {:?}", terminator, path);
let init_data = self.initialization_data_at(loc);
- self.elaborate_drop(&DropCtxt {
- source_info: terminator.source_info,
- is_cleanup: data.is_cleanup,
- init_data: &init_data,
- lvalue: location,
- path: path,
- succ: target,
- unwind: Some(unwind)
- }, bb);
+ elaborate_drop(
+ &mut Elaborator {
+ init_data: &init_data,
+ ctxt: self
+ },
+ terminator.source_info,
+ data.is_cleanup,
+ location,
+ path,
+ target,
+ Some(unwind),
+ bb);
on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
self.set_drop_flag(Location { block: target, statement_index: 0 },
child, DropFlagState::Present);
}
}
- /// This elaborates a single drop instruction, located at `bb`, and
- /// patches over it.
- ///
- /// The elaborated drop checks the drop flags to only drop what
- /// is initialized.
- ///
- /// In addition, the relevant drop flags also need to be cleared
- /// to avoid double-drops. However, in the middle of a complex
- /// drop, one must avoid clearing some of the flags before they
- /// are read, as that would cause a memory leak.
- ///
- /// In particular, when dropping an ADT, multiple fields may be
- /// joined together under the `rest` subpath. They are all controlled
- /// by the primary drop flag, but only the last rest-field dropped
- /// should clear it (and it must also not clear anything else).
- ///
- /// FIXME: I think we should just control the flags externally
- /// and then we do not need this machinery.
- fn elaborate_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, bb: BasicBlock) {
- debug!("elaborate_drop({:?})", c);
-
- let mut some_live = false;
- let mut some_dead = false;
- let mut children_count = 0;
- on_all_children_bits(
- self.tcx, self.mir, self.move_data(),
- c.path, |child| {
- if self.path_needs_drop(child) {
- let (live, dead) = c.init_data.state(child);
- debug!("elaborate_drop: state({:?}) = {:?}",
- child, (live, dead));
- some_live |= live;
- some_dead |= dead;
- children_count += 1;
- }
- });
-
- debug!("elaborate_drop({:?}): live - {:?}", c,
- (some_live, some_dead));
- match (some_live, some_dead) {
- (false, false) | (false, true) => {
- // dead drop - patch it out
- self.patch.patch_terminator(bb, TerminatorKind::Goto {
- target: c.succ
- });
- }
- (true, false) => {
- // static drop - just set the flag
- self.patch.patch_terminator(bb, TerminatorKind::Drop {
- location: c.lvalue.clone(),
- target: c.succ,
- unwind: c.unwind
- });
- self.drop_flags_for_drop(c, bb);
- }
- (true, true) => {
- // dynamic drop
- let drop_bb = if children_count == 1 || self.must_complete_drop(c) {
- self.conditional_drop(c)
- } else {
- self.open_drop(c)
- };
- self.patch.patch_terminator(bb, TerminatorKind::Goto {
- target: drop_bb
- });
- }
- }
- }
-
- /// Return the lvalue and move path for each field of `variant`,
- /// (the move path is `None` if the field is a rest field).
- fn move_paths_for_fields(&self,
- base_lv: &Lvalue<'tcx>,
- variant_path: MovePathIndex,
- variant: &'tcx ty::VariantDef,
- substs: &'tcx Substs<'tcx>)
- -> Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>
- {
- variant.fields.iter().enumerate().map(|(i, f)| {
- let subpath =
- super::move_path_children_matching(self.move_data(), variant_path, |p| {
- match p {
- &Projection {
- elem: ProjectionElem::Field(idx, _), ..
- } => idx.index() == i,
- _ => false
- }
- });
-
- let field_ty =
- self.tcx.normalize_associated_type_in_env(
- &f.ty(self.tcx, substs),
- self.param_env()
- );
- (base_lv.clone().field(Field::new(i), field_ty), subpath)
- }).collect()
- }
-
- /// Create one-half of the drop ladder for a list of fields, and return
- /// the list of steps in it in reverse order.
- ///
- /// `unwind_ladder` is such a list of steps in reverse order,
- /// which is called instead of the next step if the drop unwinds
- /// (the first field is never reached). If it is `None`, all
- /// unwind targets are left blank.
- fn drop_halfladder<'a>(&mut self,
- c: &DropCtxt<'a, 'tcx>,
- unwind_ladder: Option<Vec<BasicBlock>>,
- succ: BasicBlock,
- fields: &[(Lvalue<'tcx>, Option<MovePathIndex>)],
- is_cleanup: bool)
- -> Vec<BasicBlock>
- {
- let mut unwind_succ = if is_cleanup {
- None
- } else {
- c.unwind
- };
-
- let mut succ = self.new_block(
- c, c.is_cleanup, TerminatorKind::Goto { target: succ }
- );
-
- // Always clear the "master" drop flag at the bottom of the
- // ladder. This is needed because the "master" drop flag
- // protects the ADT's discriminant, which is invalidated
- // after the ADT is dropped.
- self.set_drop_flag(
- Location { block: succ, statement_index: 0 },
- c.path,
- DropFlagState::Absent
- );
-
- fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
- succ = if let Some(path) = path {
- debug!("drop_ladder: for std field {} ({:?})", i, lv);
-
- self.elaborated_drop_block(&DropCtxt {
- source_info: c.source_info,
- is_cleanup: is_cleanup,
- init_data: c.init_data,
- lvalue: lv,
- path: path,
- succ: succ,
- unwind: unwind_succ,
- })
- } else {
- debug!("drop_ladder: for rest field {} ({:?})", i, lv);
-
- self.complete_drop(&DropCtxt {
- source_info: c.source_info,
- is_cleanup: is_cleanup,
- init_data: c.init_data,
- lvalue: lv,
- path: c.path,
- succ: succ,
- unwind: unwind_succ,
- }, false)
- };
-
- unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
- succ
- }).collect()
- }
-
- /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
- ///
- /// For example, with 3 fields, the drop ladder is
- ///
- /// .d0:
- /// ELAB(drop location.0 [target=.d1, unwind=.c1])
- /// .d1:
- /// ELAB(drop location.1 [target=.d2, unwind=.c2])
- /// .d2:
- /// ELAB(drop location.2 [target=`c.succ`, unwind=`c.unwind`])
- /// .c1:
- /// ELAB(drop location.1 [target=.c2])
- /// .c2:
- /// ELAB(drop location.2 [target=`c.unwind])
- fn drop_ladder<'a>(&mut self,
- c: &DropCtxt<'a, 'tcx>,
- fields: Vec<(Lvalue<'tcx>, Option<MovePathIndex>)>)
- -> BasicBlock
- {
- debug!("drop_ladder({:?}, {:?})", c, fields);
-
- let mut fields = fields;
- fields.retain(|&(ref lvalue, _)| {
- let ty = lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
- self.tcx.type_needs_drop_given_env(ty, self.param_env())
- });
-
- debug!("drop_ladder - fields needing drop: {:?}", fields);
-
- let unwind_ladder = if c.is_cleanup {
- None
- } else {
- Some(self.drop_halfladder(c, None, c.unwind.unwrap(), &fields, true))
- };
-
- self.drop_halfladder(c, unwind_ladder, c.succ, &fields, c.is_cleanup)
- .last().cloned().unwrap_or(c.succ)
- }
-
- fn open_drop_for_tuple<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, tys: &[Ty<'tcx>])
- -> BasicBlock
- {
- debug!("open_drop_for_tuple({:?}, {:?})", c, tys);
-
- let fields = tys.iter().enumerate().map(|(i, &ty)| {
- (c.lvalue.clone().field(Field::new(i), ty),
- super::move_path_children_matching(
- self.move_data(), c.path, |proj| match proj {
- &Projection {
- elem: ProjectionElem::Field(f, _), ..
- } => f.index() == i,
- _ => false
- }
- ))
- }).collect();
-
- self.drop_ladder(c, fields)
- }
-
- fn open_drop_for_box<'a>(&mut self, c: &DropCtxt<'a, 'tcx>, ty: Ty<'tcx>)
- -> BasicBlock
- {
- debug!("open_drop_for_box({:?}, {:?})", c, ty);
-
- let interior_path = super::move_path_children_matching(
- self.move_data(), c.path, |proj| match proj {
- &Projection { elem: ProjectionElem::Deref, .. } => true,
- _ => false
- }).unwrap();
-
- let interior = c.lvalue.clone().deref();
- let inner_c = DropCtxt {
- lvalue: &interior,
- unwind: c.unwind.map(|u| {
- self.box_free_block(c, ty, u, true)
- }),
- succ: self.box_free_block(c, ty, c.succ, c.is_cleanup),
- path: interior_path,
- ..*c
- };
-
- self.elaborated_drop_block(&inner_c)
- }
-
- fn open_drop_for_adt<'a>(&mut self, c: &DropCtxt<'a, 'tcx>,
- adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
- -> BasicBlock {
- debug!("open_drop_for_adt({:?}, {:?}, {:?})", c, adt, substs);
-
- match adt.variants.len() {
- 1 => {
- let fields = self.move_paths_for_fields(
- c.lvalue,
- c.path,
- &adt.variants[0],
- substs
- );
- self.drop_ladder(c, fields)
- }
- _ => {
- let mut values = Vec::with_capacity(adt.variants.len());
- let mut blocks = Vec::with_capacity(adt.variants.len());
- let mut otherwise = None;
- for (variant_index, discr) in adt.discriminants(self.tcx).enumerate() {
- let subpath = super::move_path_children_matching(
- self.move_data(), c.path, |proj| match proj {
- &Projection {
- elem: ProjectionElem::Downcast(_, idx), ..
- } => idx == variant_index,
- _ => false
- });
- if let Some(variant_path) = subpath {
- let base_lv = c.lvalue.clone().elem(
- ProjectionElem::Downcast(adt, variant_index)
- );
- let fields = self.move_paths_for_fields(
- &base_lv,
- variant_path,
- &adt.variants[variant_index],
- substs);
- values.push(discr);
- blocks.push(self.drop_ladder(c, fields));
- } else {
- // variant not found - drop the entire enum
- if let None = otherwise {
- otherwise = Some(self.complete_drop(c, true));
- }
- }
- }
- if let Some(block) = otherwise {
- blocks.push(block);
- } else {
- values.pop();
- }
- // If there are multiple variants, then if something
- // is present within the enum the discriminant, tracked
- // by the rest path, must be initialized.
- //
- // Additionally, we do not want to switch on the
- // discriminant after it is free-ed, because that
- // way lies only trouble.
- let discr_ty = adt.repr.discr_type().to_ty(self.tcx);
- let discr = Lvalue::Local(self.patch.new_temp(discr_ty));
- let switch_block = self.patch.new_block(BasicBlockData {
- statements: vec![
- Statement {
- source_info: c.source_info,
- kind: StatementKind::Assign(discr.clone(),
- Rvalue::Discriminant(c.lvalue.clone()))
- }
- ],
- terminator: Some(Terminator {
- source_info: c.source_info,
- kind: TerminatorKind::SwitchInt {
- discr: Operand::Consume(discr),
- switch_ty: discr_ty,
- values: From::from(values),
- targets: blocks,
- }
- }),
- is_cleanup: c.is_cleanup,
- });
- self.drop_flag_test_block(c, switch_block)
- }
- }
- }
-
- /// The slow-path - create an "open", elaborated drop for a type
- /// which is moved-out-of only partially, and patch `bb` to a jump
- /// to it. This must not be called on ADTs with a destructor,
- /// as these can't be moved-out-of, except for `Box<T>`, which is
- /// special-cased.
- ///
- /// This creates a "drop ladder" that drops the needed fields of the
- /// ADT, both in the success case or if one of the destructors fail.
- fn open_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
- let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
- match ty.sty {
- ty::TyClosure(def_id, substs) => {
- let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx).collect();
- self.open_drop_for_tuple(c, &tys)
- }
- ty::TyTuple(tys, _) => {
- self.open_drop_for_tuple(c, tys)
- }
- ty::TyAdt(def, _) if def.is_box() => {
- self.open_drop_for_box(c, ty.boxed_ty())
- }
- ty::TyAdt(def, substs) => {
- self.open_drop_for_adt(c, def, substs)
- }
- _ => bug!("open drop from non-ADT `{:?}`", ty)
- }
- }
-
- /// Return a basic block that drop an lvalue using the context
- /// and path in `c`. If `update_drop_flag` is true, also
- /// clear `c`.
- ///
- /// if FLAG(c.path)
- /// if(update_drop_flag) FLAG(c.path) = false
- /// drop(c.lv)
- fn complete_drop<'a>(
- &mut self,
- c: &DropCtxt<'a, 'tcx>,
- update_drop_flag: bool)
- -> BasicBlock
- {
- debug!("complete_drop({:?},{:?})", c, update_drop_flag);
-
- let drop_block = self.drop_block(c);
- if update_drop_flag {
- self.set_drop_flag(
- Location { block: drop_block, statement_index: 0 },
- c.path,
- DropFlagState::Absent
- );
- }
-
- self.drop_flag_test_block(c, drop_block)
- }
-
- /// Create a simple conditional drop.
- ///
- /// if FLAG(c.lv)
- /// FLAGS(c.lv) = false
- /// drop(c.lv)
- fn conditional_drop<'a>(&mut self, c: &DropCtxt<'a, 'tcx>)
- -> BasicBlock
- {
- debug!("conditional_drop({:?})", c);
- let drop_bb = self.drop_block(c);
- self.drop_flags_for_drop(c, drop_bb);
-
- self.drop_flag_test_block(c, drop_bb)
- }
-
- fn new_block<'a>(&mut self,
- c: &DropCtxt<'a, 'tcx>,
- is_cleanup: bool,
- k: TerminatorKind<'tcx>)
- -> BasicBlock
- {
- self.patch.new_block(BasicBlockData {
- statements: vec![],
- terminator: Some(Terminator {
- source_info: c.source_info, kind: k
- }),
- is_cleanup: is_cleanup
- })
- }
-
- fn elaborated_drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
- debug!("elaborated_drop_block({:?})", c);
- let blk = self.drop_block(c);
- self.elaborate_drop(c, blk);
- blk
- }
-
- fn drop_flag_test_block<'a>(&mut self,
- c: &DropCtxt<'a, 'tcx>,
- on_set: BasicBlock)
- -> BasicBlock {
- self.drop_flag_test_block_with_succ(c, c.is_cleanup, on_set, c.succ)
- }
-
- fn drop_flag_test_block_with_succ<'a>(&mut self,
- c: &DropCtxt<'a, 'tcx>,
- is_cleanup: bool,
- on_set: BasicBlock,
- on_unset: BasicBlock)
- -> BasicBlock
- {
- let (maybe_live, maybe_dead) = c.init_data.state(c.path);
- debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
- c, is_cleanup, on_set, (maybe_live, maybe_dead));
-
- match (maybe_live, maybe_dead) {
- (false, _) => on_unset,
- (true, false) => on_set,
- (true, true) => {
- let flag = self.drop_flag(c.path).unwrap();
- let term = TerminatorKind::if_(self.tcx, Operand::Consume(flag), on_set, on_unset);
- self.new_block(c, is_cleanup, term)
- }
- }
- }
-
- fn drop_block<'a>(&mut self, c: &DropCtxt<'a, 'tcx>) -> BasicBlock {
- self.new_block(c, c.is_cleanup, TerminatorKind::Drop {
- location: c.lvalue.clone(),
- target: c.succ,
- unwind: c.unwind
- })
- }
-
- fn box_free_block<'a>(
- &mut self,
- c: &DropCtxt<'a, 'tcx>,
- ty: Ty<'tcx>,
- target: BasicBlock,
- is_cleanup: bool
- ) -> BasicBlock {
- let block = self.unelaborated_free_block(c, ty, target, is_cleanup);
- self.drop_flag_test_block_with_succ(c, is_cleanup, block, target)
- }
-
- fn unelaborated_free_block<'a>(
- &mut self,
- c: &DropCtxt<'a, 'tcx>,
- ty: Ty<'tcx>,
- target: BasicBlock,
- is_cleanup: bool
- ) -> BasicBlock {
- let mut statements = vec![];
- if let Some(&flag) = self.drop_flags.get(&c.path) {
- statements.push(Statement {
- source_info: c.source_info,
- kind: StatementKind::Assign(
- Lvalue::Local(flag),
- self.constant_bool(c.source_info.span, false)
- )
- });
- }
-
- let tcx = self.tcx;
- let unit_temp = Lvalue::Local(self.patch.new_temp(tcx.mk_nil()));
- let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
- let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
- let fty = tcx.item_type(free_func).subst(tcx, substs);
-
- self.patch.new_block(BasicBlockData {
- statements: statements,
- terminator: Some(Terminator {
- source_info: c.source_info, kind: TerminatorKind::Call {
- func: Operand::Constant(Constant {
- span: c.source_info.span,
- ty: fty,
- literal: Literal::Item {
- def_id: free_func,
- substs: substs
- }
- }),
- args: vec![Operand::Consume(c.lvalue.clone())],
- destination: Some((unit_temp, target)),
- cleanup: None
- }
- }),
- is_cleanup: is_cleanup
- })
- }
-
- fn must_complete_drop<'a>(&self, c: &DropCtxt<'a, 'tcx>) -> bool {
- // if we have a destuctor, we must *not* split the drop.
-
- // dataflow can create unneeded children in some cases
- // - be sure to ignore them.
-
- let ty = c.lvalue.ty(self.mir, self.tcx).to_ty(self.tcx);
-
- match ty.sty {
- ty::TyAdt(def, _) => {
- if def.has_dtor(self.tcx) && !def.is_box() {
- self.tcx.sess.span_warn(
- c.source_info.span,
- &format!("dataflow bug??? moving out of type with dtor {:?}",
- c));
- true
- } else {
- false
- }
- }
- _ => false
- }
- }
-
fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
Rvalue::Use(Operand::Constant(Constant {
span: span,
}
}
}
-
- fn drop_flags_for_drop<'a>(&mut self,
- c: &DropCtxt<'a, 'tcx>,
- bb: BasicBlock)
- {
- let loc = self.patch.terminator_loc(self.mir, bb);
- on_all_children_bits(
- self.tcx, self.mir, self.move_data(), c.path,
- |child| self.set_drop_flag(loc, child, DropFlagState::Absent)
- );
- }
}
}
Rvalue::Ref(..) |
Rvalue::Discriminant(..) |
- Rvalue::Len(..) => {}
+ Rvalue::Len(..) |
Rvalue::Box(..) => {
// This returns an rvalue with uninitialized contents. We can't
// move out of it here because it is an rvalue - assignments always
use rustc::mir::{self, BasicBlock, BasicBlockData, Mir, Statement, Terminator, Location};
use rustc::session::Session;
use rustc::ty::{self, TyCtxt};
+use rustc_mir::util::elaborate_drops::DropFlagState;
mod abs_domain;
pub mod elaborate_drops;
mod dataflow;
mod gather_moves;
-mod patch;
// mod graphviz;
use self::dataflow::{BitDenotation};
}
}
-#[derive(Debug, PartialEq, Eq, Copy, Clone)]
-enum DropFlagState {
- Present, // i.e. initialized
- Absent, // i.e. deinitialized or "moved"
-}
-
-impl DropFlagState {
- fn value(self) -> bool {
- match self {
- DropFlagState::Present => true,
- DropFlagState::Absent => false
- }
- }
-}
-
fn move_path_children_matching<'tcx, F>(move_data: &MoveData<'tcx>,
path: MovePathIndex,
mut cond: F)
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::ty::Ty;
-use rustc::mir::*;
-use rustc_data_structures::indexed_vec::{IndexVec, Idx};
-
-/// This struct represents a patch to MIR, which can add
-/// new statements and basic blocks and patch over block
-/// terminators.
-pub struct MirPatch<'tcx> {
- patch_map: IndexVec<BasicBlock, Option<TerminatorKind<'tcx>>>,
- new_blocks: Vec<BasicBlockData<'tcx>>,
- new_statements: Vec<(Location, StatementKind<'tcx>)>,
- new_locals: Vec<LocalDecl<'tcx>>,
- resume_block: BasicBlock,
- next_local: usize,
-}
-
-impl<'tcx> MirPatch<'tcx> {
- pub fn new(mir: &Mir<'tcx>) -> Self {
- let mut result = MirPatch {
- patch_map: IndexVec::from_elem(None, mir.basic_blocks()),
- new_blocks: vec![],
- new_statements: vec![],
- new_locals: vec![],
- next_local: mir.local_decls.len(),
- resume_block: START_BLOCK
- };
-
- // make sure the MIR we create has a resume block. It is
- // completely legal to convert jumps to the resume block
- // to jumps to None, but we occasionally have to add
- // instructions just before that.
-
- let mut resume_block = None;
- let mut resume_stmt_block = None;
- for (bb, block) in mir.basic_blocks().iter_enumerated() {
- if let TerminatorKind::Resume = block.terminator().kind {
- if block.statements.len() > 0 {
- resume_stmt_block = Some(bb);
- } else {
- resume_block = Some(bb);
- }
- break
- }
- }
- let resume_block = resume_block.unwrap_or_else(|| {
- result.new_block(BasicBlockData {
- statements: vec![],
- terminator: Some(Terminator {
- source_info: SourceInfo {
- span: mir.span,
- scope: ARGUMENT_VISIBILITY_SCOPE
- },
- kind: TerminatorKind::Resume
- }),
- is_cleanup: true
- })});
- result.resume_block = resume_block;
- if let Some(resume_stmt_block) = resume_stmt_block {
- result.patch_terminator(resume_stmt_block, TerminatorKind::Goto {
- target: resume_block
- });
- }
- result
- }
-
- pub fn resume_block(&self) -> BasicBlock {
- self.resume_block
- }
-
- pub fn is_patched(&self, bb: BasicBlock) -> bool {
- self.patch_map[bb].is_some()
- }
-
- pub fn terminator_loc(&self, mir: &Mir<'tcx>, bb: BasicBlock) -> Location {
- let offset = match bb.index().checked_sub(mir.basic_blocks().len()) {
- Some(index) => self.new_blocks[index].statements.len(),
- None => mir[bb].statements.len()
- };
- Location {
- block: bb,
- statement_index: offset
- }
- }
-
- pub fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
- let index = self.next_local;
- self.next_local += 1;
- self.new_locals.push(LocalDecl::new_temp(ty));
- Local::new(index as usize)
- }
-
- pub fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock {
- let block = BasicBlock::new(self.patch_map.len());
- debug!("MirPatch: new_block: {:?}: {:?}", block, data);
- self.new_blocks.push(data);
- self.patch_map.push(None);
- block
- }
-
- pub fn patch_terminator(&mut self, block: BasicBlock, new: TerminatorKind<'tcx>) {
- assert!(self.patch_map[block].is_none());
- debug!("MirPatch: patch_terminator({:?}, {:?})", block, new);
- self.patch_map[block] = Some(new);
- }
-
- pub fn add_statement(&mut self, loc: Location, stmt: StatementKind<'tcx>) {
- debug!("MirPatch: add_statement({:?}, {:?})", loc, stmt);
- self.new_statements.push((loc, stmt));
- }
-
- pub fn add_assign(&mut self, loc: Location, lv: Lvalue<'tcx>, rv: Rvalue<'tcx>) {
- self.add_statement(loc, StatementKind::Assign(lv, rv));
- }
-
- pub fn apply(self, mir: &mut Mir<'tcx>) {
- debug!("MirPatch: {:?} new temps, starting from index {}: {:?}",
- self.new_locals.len(), mir.local_decls.len(), self.new_locals);
- debug!("MirPatch: {} new blocks, starting from index {}",
- self.new_blocks.len(), mir.basic_blocks().len());
- mir.basic_blocks_mut().extend(self.new_blocks);
- mir.local_decls.extend(self.new_locals);
- for (src, patch) in self.patch_map.into_iter_enumerated() {
- if let Some(patch) = patch {
- debug!("MirPatch: patching block {:?}", src);
- mir[src].terminator_mut().kind = patch;
- }
- }
-
- let mut new_statements = self.new_statements;
- new_statements.sort_by(|u,v| u.0.cmp(&v.0));
-
- let mut delta = 0;
- let mut last_bb = START_BLOCK;
- for (mut loc, stmt) in new_statements {
- if loc.block != last_bb {
- delta = 0;
- last_bb = loc.block;
- }
- debug!("MirPatch: adding statement {:?} at loc {:?}+{}",
- stmt, loc, delta);
- loc.statement_index += delta;
- let source_info = Self::source_info_for_index(
- &mir[loc.block], loc
- );
- mir[loc.block].statements.insert(
- loc.statement_index, Statement {
- source_info: source_info,
- kind: stmt
- });
- delta += 1;
- }
- }
-
- pub fn source_info_for_index(data: &BasicBlockData, loc: Location) -> SourceInfo {
- match data.statements.get(loc.statement_index) {
- Some(stmt) => stmt.source_info,
- None => data.terminator().source_info
- }
- }
-
- pub fn source_info_for_location(&self, mir: &Mir, loc: Location) -> SourceInfo {
- let data = match loc.block.index().checked_sub(mir.basic_blocks().len()) {
- Some(new) => &self.new_blocks[new],
- None => &mir[loc.block]
- };
- Self::source_info_for_index(data, loc)
- }
-}
pub fn bckerr_to_diag(&self, err: &BckError<'tcx>) -> DiagnosticBuilder<'a> {
let span = err.span.clone();
let mut immutable_field = None;
+ let mut local_def = None;
let msg = &match err.code {
err_mutbl => {
}
None
});
+ local_def = err.cmt.get_def()
+ .and_then(|nid| {
+ if !self.tcx.hir.is_argument(nid) {
+ Some(self.tcx.hir.span(nid))
+ } else {
+ None
+ }
+ });
format!("cannot borrow {} as mutable", descr)
}
if let Some((span, msg)) = immutable_field {
db.span_label(span, &msg);
}
+ if let Some(let_span) = local_def {
+ if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(let_span) {
+ db.span_label(let_span, &format!("consider changing this to `mut {}`", snippet));
+ }
+ }
db
}
} else {
db.span_label(*error_span, &format!("cannot borrow mutably"));
}
+ } else if let Categorization::Interior(ref cmt, _) = err.cmt.cat {
+ if let mc::MutabilityCategory::McImmutable = cmt.mutbl {
+ db.span_label(*error_span,
+ &"cannot mutably borrow immutable field");
+ }
}
}
}
if self.tail_len > 0 {
unsafe {
- let source_array_vec = &mut **self.array_vec;
+ let source_array_vec = &mut *self.array_vec.as_mut_ptr();
// memmove back untouched tail, update to new length
let start = source_array_vec.len();
let tail = self.tail_start;
ManuallyDrop::new()
}
}
-
#![feature(shared)]
#![feature(collections_range)]
-#![feature(collections_bound)]
#![cfg_attr(stage0,feature(field_init_shorthand))]
#![feature(nonzero)]
#![feature(rustc_private)]
use rustc_borrowck as borrowck;
use rustc_borrowck::graphviz as borrowck_dot;
-use rustc_mir::pretty::write_mir_pretty;
-use rustc_mir::graphviz::write_mir_graphviz;
+use rustc_mir::util::{write_mir_pretty, write_mir_graphviz};
use syntax::ast::{self, BlockCheckMode};
use syntax::fold::{self, Folder};
use syntax::ast::{self, Name, NodeId};
use syntax::attr;
use syntax::parse::token;
-use syntax::symbol::{Symbol, InternedString};
+use syntax::symbol::InternedString;
use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
use syntax::tokenstream;
use rustc::hir;
use rustc::hir::*;
use rustc::hir::def::Def;
use rustc::hir::def_id::DefId;
-use rustc::hir::intravisit as visit;
+use rustc::hir::intravisit::{self as visit, Visitor};
use rustc::ty::TyCtxt;
-use rustc_data_structures::fnv;
use std::hash::{Hash, Hasher};
use super::def_path_hash::DefPathHashes;
});
}
-impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'hash, 'tcx> {
+impl<'a, 'hash, 'tcx> Visitor<'tcx> for StrictVersionHashVisitor<'a, 'hash, 'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> visit::NestedVisitorMap<'this, 'tcx> {
if self.hash_bodies {
visit::NestedVisitorMap::OnlyBodies(&self.tcx.hir)
}
}
- fn hash_meta_item(&mut self, meta_item: &ast::MetaItem) {
- debug!("hash_meta_item: st={:?}", self.st);
-
- // ignoring span information, it doesn't matter here
- self.hash_discriminant(&meta_item.node);
- meta_item.name.as_str().len().hash(self.st);
- meta_item.name.as_str().hash(self.st);
-
- match meta_item.node {
- ast::MetaItemKind::Word => {}
- ast::MetaItemKind::NameValue(ref lit) => saw_lit(lit).hash(self.st),
- ast::MetaItemKind::List(ref items) => {
- // Sort subitems so the hash does not depend on their order
- let indices = self.indices_sorted_by(&items, |p| {
- (p.name().map(Symbol::as_str), fnv::hash(&p.literal().map(saw_lit)))
- });
- items.len().hash(self.st);
- for (index, &item_index) in indices.iter().enumerate() {
- index.hash(self.st);
- let nested_meta_item: &ast::NestedMetaItemKind = &items[item_index].node;
- self.hash_discriminant(nested_meta_item);
- match *nested_meta_item {
- ast::NestedMetaItemKind::MetaItem(ref meta_item) => {
- self.hash_meta_item(meta_item);
- }
- ast::NestedMetaItemKind::Literal(ref lit) => {
- saw_lit(lit).hash(self.st);
- }
- }
- }
- }
- }
- }
-
pub fn hash_attributes(&mut self, attributes: &[ast::Attribute]) {
debug!("hash_attributes: st={:?}", self.st);
let indices = self.indices_sorted_by(attributes, |attr| attr.name());
for i in indices {
let attr = &attributes[i];
- if !attr.is_sugared_doc &&
- !IGNORED_ATTRIBUTES.contains(&&*attr.value.name().as_str()) {
+ match attr.name() {
+ Some(name) if IGNORED_ATTRIBUTES.contains(&&*name.as_str()) => continue,
+ _ => {}
+ };
+ if !attr.is_sugared_doc {
SawAttribute(attr.style).hash(self.st);
- self.hash_meta_item(&attr.value);
+ for segment in &attr.path.segments {
+ SawIdent(segment.identifier.name.as_str()).hash(self.st);
+ }
+ for tt in attr.tokens.trees() {
+ self.hash_token_tree(&tt);
+ }
}
}
}
impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode<DefId> {
- for item in attr.meta_item_list().unwrap_or(&[]) {
+ for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
if item.check_name(LABEL) {
- let value = expect_associated_value(self.tcx, item);
+ let value = expect_associated_value(self.tcx, &item);
match DepNode::from_label_string(&value.as_str(), def_id) {
Ok(def_id) => return def_id,
Err(()) => {
debug!("check_config(attr={:?})", attr);
let config = &tcx.sess.parse_sess.config;
debug!("check_config: config={:?}", config);
- for item in attr.meta_item_list().unwrap_or(&[]) {
+ for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
if item.check_name(CFG) {
- let value = expect_associated_value(tcx, item);
+ let value = expect_associated_value(tcx, &item);
debug!("check_config: searching for cfg {:?}", value);
return config.contains(&(value, None));
}
}
fn create_dir(sess: &Session, path: &Path, dir_tag: &str) -> Result<(),()> {
- match fs_util::create_dir_racy(path) {
+ match std_fs::create_dir_all(path) {
Ok(()) => {
debug!("{} directory created successfully", dir_tag);
Ok(())
}
}
- let has_doc = attrs.iter().any(|a| a.is_value_str() && a.name() == "doc");
+ let has_doc = attrs.iter().any(|a| a.is_value_str() && a.check_name("doc"));
if !has_doc {
cx.span_lint(MISSING_DOCS,
sp,
impl EarlyLintPass for DeprecatedAttr {
fn check_attribute(&mut self, cx: &EarlyContext, attr: &ast::Attribute) {
- let name = attr.name();
+ let name = unwrap_or!(attr.name(), return);
for &&(n, _, ref g) in &self.depr_attrs {
if name == n {
if let &AttributeGate::Gated(Stability::Deprecated(link),
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnstableFeatures {
fn check_attribute(&mut self, ctx: &LateContext, attr: &ast::Attribute) {
- if attr.meta().check_name("feature") {
- if let Some(items) = attr.meta().meta_item_list() {
+ if attr.check_name("feature") {
+ if let Some(items) = attr.meta_item_list() {
for item in items {
ctx.span_lint(UNSTABLE_FEATURES, item.span(), "unstable feature");
}
#![feature(slice_patterns)]
#![feature(staged_api)]
+#[macro_use]
extern crate syntax;
#[macro_use]
extern crate rustc;
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedAttributes {
fn check_attribute(&mut self, cx: &LateContext, attr: &ast::Attribute) {
debug!("checking attribute: {:?}", attr);
+ let name = unwrap_or!(attr.name(), return);
// Note that check_name() marks the attribute as used if it matches.
for &(ref name, ty, _) in BUILTIN_ATTRIBUTES {
cx.span_lint(UNUSED_ATTRIBUTES, attr.span, "unused attribute");
// Is it a builtin attribute that must be used at the crate level?
let known_crate = BUILTIN_ATTRIBUTES.iter()
- .find(|&&(name, ty, _)| attr.name() == name && ty == AttributeType::CrateLevel)
+ .find(|&&(builtin, ty, _)| name == builtin && ty == AttributeType::CrateLevel)
.is_some();
// Has a plugin registered this attribute as one which must be used at
// the crate level?
let plugin_crate = plugin_attributes.iter()
- .find(|&&(ref x, t)| attr.name() == &**x && AttributeType::CrateLevel == t)
+ .find(|&&(ref x, t)| name == &**x && AttributeType::CrateLevel == t)
.is_some();
if known_crate || plugin_crate {
let msg = match attr.style {
impl<'a> CrateLoader<'a> {
pub fn preprocess(&mut self, krate: &ast::Crate) {
- for attr in krate.attrs.iter().filter(|m| m.name() == "link_args") {
- if let Some(linkarg) = attr.value_str() {
- self.cstore.add_used_link_args(&linkarg.as_str());
+ for attr in &krate.attrs {
+ if attr.path == "link_args" {
+ if let Some(linkarg) = attr.value_str() {
+ self.cstore.add_used_link_args(&linkarg.as_str());
+ }
}
}
}
}
pub fn is_staged_api(&self) -> bool {
- self.get_item_attrs(CRATE_DEF_INDEX)
- .iter()
- .any(|attr| attr.name() == "stable" || attr.name() == "unstable")
+ for attr in self.get_item_attrs(CRATE_DEF_INDEX) {
+ if attr.path == "stable" || attr.path == "unstable" {
+ return true;
+ }
+ }
+ false
}
pub fn is_allocator(&self) -> bool {
predicates: Some(self.encode_predicates(def_id)),
ast: None,
- mir: None,
+ mir: self.encode_mir(def_id),
}
}
predicates: Some(self.encode_predicates(def_id)),
ast: None,
- mir: None,
+ mir: self.encode_mir(def_id),
}
}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Def-use analysis.
-
-use rustc::mir::{Local, Location, Lvalue, Mir};
-use rustc::mir::visit::{LvalueContext, MutVisitor, Visitor};
-use rustc_data_structures::indexed_vec::IndexVec;
-use std::marker::PhantomData;
-use std::mem;
-
-pub struct DefUseAnalysis<'tcx> {
- info: IndexVec<Local, Info<'tcx>>,
-}
-
-#[derive(Clone)]
-pub struct Info<'tcx> {
- pub defs_and_uses: Vec<Use<'tcx>>,
-}
-
-#[derive(Clone)]
-pub struct Use<'tcx> {
- pub context: LvalueContext<'tcx>,
- pub location: Location,
-}
-
-impl<'tcx> DefUseAnalysis<'tcx> {
- pub fn new(mir: &Mir<'tcx>) -> DefUseAnalysis<'tcx> {
- DefUseAnalysis {
- info: IndexVec::from_elem_n(Info::new(), mir.local_decls.len()),
- }
- }
-
- pub fn analyze(&mut self, mir: &Mir<'tcx>) {
- let mut finder = DefUseFinder {
- info: mem::replace(&mut self.info, IndexVec::new()),
- };
- finder.visit_mir(mir);
- self.info = finder.info
- }
-
- pub fn local_info(&self, local: Local) -> &Info<'tcx> {
- &self.info[local]
- }
-
- pub fn local_info_mut(&mut self, local: Local) -> &mut Info<'tcx> {
- &mut self.info[local]
- }
-
- fn mutate_defs_and_uses<F>(&self, local: Local, mir: &mut Mir<'tcx>, mut callback: F)
- where F: for<'a> FnMut(&'a mut Lvalue<'tcx>,
- LvalueContext<'tcx>,
- Location) {
- for lvalue_use in &self.info[local].defs_and_uses {
- MutateUseVisitor::new(local,
- &mut callback,
- mir).visit_location(mir, lvalue_use.location)
- }
- }
-
- /// FIXME(pcwalton): This should update the def-use chains.
- pub fn replace_all_defs_and_uses_with(&self,
- local: Local,
- mir: &mut Mir<'tcx>,
- new_lvalue: Lvalue<'tcx>) {
- self.mutate_defs_and_uses(local, mir, |lvalue, _, _| *lvalue = new_lvalue.clone())
- }
-}
-
-struct DefUseFinder<'tcx> {
- info: IndexVec<Local, Info<'tcx>>,
-}
-
-impl<'tcx> DefUseFinder<'tcx> {
- fn lvalue_mut_info(&mut self, lvalue: &Lvalue<'tcx>) -> Option<&mut Info<'tcx>> {
- let info = &mut self.info;
-
- if let Lvalue::Local(local) = *lvalue {
- Some(&mut info[local])
- } else {
- None
- }
- }
-}
-
-impl<'tcx> Visitor<'tcx> for DefUseFinder<'tcx> {
- fn visit_lvalue(&mut self,
- lvalue: &Lvalue<'tcx>,
- context: LvalueContext<'tcx>,
- location: Location) {
- if let Some(ref mut info) = self.lvalue_mut_info(lvalue) {
- info.defs_and_uses.push(Use {
- context: context,
- location: location,
- })
- }
- self.super_lvalue(lvalue, context, location)
- }
-}
-
-impl<'tcx> Info<'tcx> {
- fn new() -> Info<'tcx> {
- Info {
- defs_and_uses: vec![],
- }
- }
-
- pub fn def_count(&self) -> usize {
- self.defs_and_uses.iter().filter(|lvalue_use| lvalue_use.context.is_mutating_use()).count()
- }
-
- pub fn def_count_not_including_drop(&self) -> usize {
- self.defs_and_uses.iter().filter(|lvalue_use| {
- lvalue_use.context.is_mutating_use() && !lvalue_use.context.is_drop()
- }).count()
- }
-
- pub fn use_count(&self) -> usize {
- self.defs_and_uses.iter().filter(|lvalue_use| {
- lvalue_use.context.is_nonmutating_use()
- }).count()
- }
-}
-
-struct MutateUseVisitor<'tcx, F> {
- query: Local,
- callback: F,
- phantom: PhantomData<&'tcx ()>,
-}
-
-impl<'tcx, F> MutateUseVisitor<'tcx, F> {
- fn new(query: Local, callback: F, _: &Mir<'tcx>)
- -> MutateUseVisitor<'tcx, F>
- where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, LvalueContext<'tcx>, Location) {
- MutateUseVisitor {
- query: query,
- callback: callback,
- phantom: PhantomData,
- }
- }
-}
-
-impl<'tcx, F> MutVisitor<'tcx> for MutateUseVisitor<'tcx, F>
- where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, LvalueContext<'tcx>, Location) {
- fn visit_lvalue(&mut self,
- lvalue: &mut Lvalue<'tcx>,
- context: LvalueContext<'tcx>,
- location: Location) {
- if let Lvalue::Local(local) = *lvalue {
- if local == self.query {
- (self.callback)(lvalue, context, location)
- }
- }
- self.super_lvalue(lvalue, context, location)
- }
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use dot;
-use rustc::hir::def_id::DefId;
-use rustc::mir::*;
-use rustc::ty::TyCtxt;
-use std::fmt::Debug;
-use std::io::{self, Write};
-use syntax::ast::NodeId;
-
-use rustc_data_structures::indexed_vec::Idx;
-
-/// Write a graphviz DOT graph of a list of MIRs.
-pub fn write_mir_graphviz<'a, 'b, 'tcx, W, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
- iter: I,
- w: &mut W)
- -> io::Result<()>
- where W: Write, I: Iterator<Item=DefId>
-{
- for def_id in iter {
- let nodeid = tcx.hir.as_local_node_id(def_id).unwrap();
- let mir = &tcx.item_mir(def_id);
-
- writeln!(w, "digraph Mir_{} {{", nodeid)?;
-
- // Global graph properties
- writeln!(w, r#" graph [fontname="monospace"];"#)?;
- writeln!(w, r#" node [fontname="monospace"];"#)?;
- writeln!(w, r#" edge [fontname="monospace"];"#)?;
-
- // Graph label
- write_graph_label(tcx, nodeid, mir, w)?;
-
- // Nodes
- for (block, _) in mir.basic_blocks().iter_enumerated() {
- write_node(block, mir, w)?;
- }
-
- // Edges
- for (source, _) in mir.basic_blocks().iter_enumerated() {
- write_edges(source, mir, w)?;
- }
- writeln!(w, "}}")?
- }
- Ok(())
-}
-
-/// Write a graphviz HTML-styled label for the given basic block, with
-/// all necessary escaping already performed. (This is suitable for
-/// emitting directly, as is done in this module, or for use with the
-/// LabelText::HtmlStr from libgraphviz.)
-///
-/// `init` and `fini` are callbacks for emitting additional rows of
-/// data (using HTML enclosed with `<tr>` in the emitted text).
-pub fn write_node_label<W: Write, INIT, FINI>(block: BasicBlock,
- mir: &Mir,
- w: &mut W,
- num_cols: u32,
- init: INIT,
- fini: FINI) -> io::Result<()>
- where INIT: Fn(&mut W) -> io::Result<()>,
- FINI: Fn(&mut W) -> io::Result<()>
-{
- let data = &mir[block];
-
- write!(w, r#"<table border="0" cellborder="1" cellspacing="0">"#)?;
-
- // Basic block number at the top.
- write!(w, r#"<tr><td {attrs} colspan="{colspan}">{blk}</td></tr>"#,
- attrs=r#"bgcolor="gray" align="center""#,
- colspan=num_cols,
- blk=block.index())?;
-
- init(w)?;
-
- // List of statements in the middle.
- if !data.statements.is_empty() {
- write!(w, r#"<tr><td align="left" balign="left">"#)?;
- for statement in &data.statements {
- write!(w, "{}<br/>", escape(statement))?;
- }
- write!(w, "</td></tr>")?;
- }
-
- // Terminator head at the bottom, not including the list of successor blocks. Those will be
- // displayed as labels on the edges between blocks.
- let mut terminator_head = String::new();
- data.terminator().kind.fmt_head(&mut terminator_head).unwrap();
- write!(w, r#"<tr><td align="left">{}</td></tr>"#, dot::escape_html(&terminator_head))?;
-
- fini(w)?;
-
- // Close the table
- writeln!(w, "</table>")
-}
-
-/// Write a graphviz DOT node for the given basic block.
-fn write_node<W: Write>(block: BasicBlock, mir: &Mir, w: &mut W) -> io::Result<()> {
- // Start a new node with the label to follow, in one of DOT's pseudo-HTML tables.
- write!(w, r#" {} [shape="none", label=<"#, node(block))?;
- write_node_label(block, mir, w, 1, |_| Ok(()), |_| Ok(()))?;
- // Close the node label and the node itself.
- writeln!(w, ">];")
-}
-
-/// Write graphviz DOT edges with labels between the given basic block and all of its successors.
-fn write_edges<W: Write>(source: BasicBlock, mir: &Mir, w: &mut W) -> io::Result<()> {
- let terminator = mir[source].terminator();
- let labels = terminator.kind.fmt_successor_labels();
-
- for (&target, label) in terminator.successors().iter().zip(labels) {
- writeln!(w, r#" {} -> {} [label="{}"];"#, node(source), node(target), label)?;
- }
-
- Ok(())
-}
-
-/// Write the graphviz DOT label for the overall graph. This is essentially a block of text that
-/// will appear below the graph, showing the type of the `fn` this MIR represents and the types of
-/// all the variables and temporaries.
-fn write_graph_label<'a, 'tcx, W: Write>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- nid: NodeId,
- mir: &Mir,
- w: &mut W)
- -> io::Result<()> {
- write!(w, " label=<fn {}(", dot::escape_html(&tcx.node_path_str(nid)))?;
-
- // fn argument types.
- for (i, arg) in mir.args_iter().enumerate() {
- if i > 0 {
- write!(w, ", ")?;
- }
- write!(w, "{:?}: {}", Lvalue::Local(arg), escape(&mir.local_decls[arg].ty))?;
- }
-
- write!(w, ") -> {}", escape(mir.return_ty))?;
- write!(w, r#"<br align="left"/>"#)?;
-
- for local in mir.vars_and_temps_iter() {
- let decl = &mir.local_decls[local];
-
- write!(w, "let ")?;
- if decl.mutability == Mutability::Mut {
- write!(w, "mut ")?;
- }
-
- if let Some(name) = decl.name {
- write!(w, r#"{:?}: {}; // {}<br align="left"/>"#,
- Lvalue::Local(local), escape(&decl.ty), name)?;
- } else {
- write!(w, r#"let mut {:?}: {};<br align="left"/>"#,
- Lvalue::Local(local), escape(&decl.ty))?;
- }
- }
-
- writeln!(w, ">;")
-}
-
-fn node(block: BasicBlock) -> String {
- format!("bb{}", block.index())
-}
-
-fn escape<T: Debug>(t: &T) -> String {
- dot::escape_html(&format!("{:?}", t))
-}
#![feature(associated_consts)]
#![feature(box_patterns)]
+#![feature(box_syntax)]
+#![cfg_attr(stage0, feature(field_init_shorthand))]
#![feature(i128_type)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
pub mod build;
pub mod callgraph;
-pub mod def_use;
-pub mod graphviz;
mod hair;
+mod shim;
pub mod mir_map;
-pub mod pretty;
pub mod transform;
+pub mod util;
use rustc::ty::maps::Providers;
pub fn provide(providers: &mut Providers) {
mir_map::provide(providers);
+ shim::provide(providers);
transform::qualify_consts::provide(providers);
}
\ No newline at end of file
use rustc::mir::Mir;
use rustc::mir::transform::MirSource;
use rustc::mir::visit::MutVisitor;
-use pretty;
+use shim;
use hair::cx::Cx;
+use util as mir_util;
use rustc::traits::Reveal;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::maps::Providers;
use rustc::ty::subst::Substs;
use rustc::hir;
+use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
use syntax::abi::Abi;
use syntax::ast;
use syntax_pos::Span;
tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
tcx.item_mir(body_owner_def_id);
});
+
+ // Tuple struct/variant constructors don't have a BodyId, so we need
+ // to build them separately.
+ struct GatherCtors<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'tcx, 'tcx>
+ }
+ impl<'a, 'tcx> Visitor<'tcx> for GatherCtors<'a, 'tcx> {
+ fn visit_variant_data(&mut self,
+ v: &'tcx hir::VariantData,
+ _: ast::Name,
+ _: &'tcx hir::Generics,
+ _: ast::NodeId,
+ _: Span) {
+ if let hir::VariantData::Tuple(_, node_id) = *v {
+ self.tcx.item_mir(self.tcx.hir.local_def_id(node_id));
+ }
+ intravisit::walk_struct_def(self, v)
+ }
+ fn nested_visit_map<'b>(&'b mut self) -> NestedVisitorMap<'b, 'tcx> {
+ NestedVisitorMap::None
+ }
+ }
+ tcx.visit_all_item_likes_in_krate(DepNode::Mir, &mut GatherCtors {
+ tcx: tcx
+ }.as_deep_visitor());
}
}
_ => hir::BodyId { node_id: expr.id }
}
}
+ hir::map::NodeVariant(variant) =>
+ return create_constructor_shim(tcx, id, &variant.node.data),
+ hir::map::NodeStructCtor(ctor) =>
+ return create_constructor_shim(tcx, id, ctor),
_ => unsupported()
};
mem::transmute::<Mir, Mir<'tcx>>(mir)
};
- pretty::dump_mir(tcx, "mir_map", &0, src, &mir);
+ mir_util::dump_mir(tcx, "mir_map", &0, src, &mir);
tcx.alloc_mir(mir)
})
}
}
+fn create_constructor_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ ctor_id: ast::NodeId,
+ v: &'tcx hir::VariantData)
+ -> &'tcx RefCell<Mir<'tcx>>
+{
+ let span = tcx.hir.span(ctor_id);
+ if let hir::VariantData::Tuple(ref fields, ctor_id) = *v {
+ let pe = ty::ParameterEnvironment::for_item(tcx, ctor_id);
+ tcx.infer_ctxt(pe, Reveal::UserFacing).enter(|infcx| {
+ let (mut mir, src) =
+ shim::build_adt_ctor(&infcx, ctor_id, fields, span);
+
+ // Convert the Mir to global types.
+ let tcx = infcx.tcx.global_tcx();
+ let mut globalizer = GlobalizeMir {
+ tcx: tcx,
+ span: mir.span
+ };
+ globalizer.visit_mir(&mut mir);
+ let mir = unsafe {
+ mem::transmute::<Mir, Mir<'tcx>>(mir)
+ };
+
+ mir_util::dump_mir(tcx, "mir_map", &0, src, &mir);
+
+ tcx.alloc_mir(mir)
+ })
+ } else {
+ span_bug!(span, "attempting to create MIR for non-tuple variant {:?}", v);
+ }
+}
+
///////////////////////////////////////////////////////////////////////////
// BuildMir -- walks a crate, looking for fn items and methods to build MIR from
-> Ty<'tcx> {
let closure_ty = tcx.body_tables(body_id).node_id_to_type(closure_expr_id);
- // We're just hard-coding the idea that the signature will be
- // &self or &mut self and hence will have a bound region with
- // number 0, hokey.
let region = ty::Region::ReFree(ty::FreeRegion {
scope: tcx.region_maps.item_extent(body_id.node_id),
- bound_region: ty::BoundRegion::BrAnon(0),
+ bound_region: ty::BoundRegion::BrEnv,
});
let region = tcx.mk_region(region);
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::hir;
-use rustc::hir::def_id::DefId;
-use rustc::mir::*;
-use rustc::mir::transform::MirSource;
-use rustc::ty::TyCtxt;
-use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::indexed_vec::{Idx};
-use std::fmt::Display;
-use std::fs;
-use std::io::{self, Write};
-use std::path::{PathBuf, Path};
-
-const INDENT: &'static str = " ";
-/// Alignment for lining up comments following MIR statements
-const ALIGN: usize = 40;
-
-/// If the session is properly configured, dumps a human-readable
-/// representation of the mir into:
-///
-/// ```text
-/// rustc.node<node_id>.<pass_name>.<disambiguator>
-/// ```
-///
-/// Output from this function is controlled by passing `-Z dump-mir=<filter>`,
-/// where `<filter>` takes the following forms:
-///
-/// - `all` -- dump MIR for all fns, all passes, all everything
-/// - `substring1&substring2,...` -- `&`-separated list of substrings
-/// that can appear in the pass-name or the `item_path_str` for the given
-/// node-id. If any one of the substrings match, the data is dumped out.
-pub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- pass_name: &str,
- disambiguator: &Display,
- src: MirSource,
- mir: &Mir<'tcx>) {
- let filters = match tcx.sess.opts.debugging_opts.dump_mir {
- None => return,
- Some(ref filters) => filters,
- };
- let node_id = src.item_id();
- let node_path = tcx.item_path_str(tcx.hir.local_def_id(node_id));
- let is_matched =
- filters.split("&")
- .any(|filter| {
- filter == "all" ||
- pass_name.contains(filter) ||
- node_path.contains(filter)
- });
- if !is_matched {
- return;
- }
-
- let promotion_id = match src {
- MirSource::Promoted(_, id) => format!("-{:?}", id),
- _ => String::new()
- };
-
- let mut file_path = PathBuf::new();
- if let Some(ref file_dir) = tcx.sess.opts.debugging_opts.dump_mir_dir {
- let p = Path::new(file_dir);
- file_path.push(p);
- };
- let file_name = format!("rustc.node{}{}.{}.{}.mir",
- node_id, promotion_id, pass_name, disambiguator);
- file_path.push(&file_name);
- let _ = fs::File::create(&file_path).and_then(|mut file| {
- writeln!(file, "// MIR for `{}`", node_path)?;
- writeln!(file, "// node_id = {}", node_id)?;
- writeln!(file, "// pass_name = {}", pass_name)?;
- writeln!(file, "// disambiguator = {}", disambiguator)?;
- writeln!(file, "")?;
- write_mir_fn(tcx, src, mir, &mut file)?;
- Ok(())
- });
-}
-
-/// Write out a human-readable textual representation for the given MIR.
-pub fn write_mir_pretty<'a, 'b, 'tcx, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
- iter: I,
- w: &mut Write)
- -> io::Result<()>
- where I: Iterator<Item=DefId>, 'tcx: 'a
-{
- let mut first = true;
- for def_id in iter.filter(DefId::is_local) {
- let mir = &tcx.item_mir(def_id);
-
- if first {
- first = false;
- } else {
- // Put empty lines between all items
- writeln!(w, "")?;
- }
-
- let id = tcx.hir.as_local_node_id(def_id).unwrap();
- let src = MirSource::from_node(tcx, id);
- write_mir_fn(tcx, src, mir, w)?;
-
- for (i, mir) in mir.promoted.iter_enumerated() {
- writeln!(w, "")?;
- write_mir_fn(tcx, MirSource::Promoted(id, i), mir, w)?;
- }
- }
- Ok(())
-}
-
-pub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- src: MirSource,
- mir: &Mir<'tcx>,
- w: &mut Write)
- -> io::Result<()> {
- write_mir_intro(tcx, src, mir, w)?;
- for block in mir.basic_blocks().indices() {
- write_basic_block(tcx, block, mir, w)?;
- if block.index() + 1 != mir.basic_blocks().len() {
- writeln!(w, "")?;
- }
- }
-
- writeln!(w, "}}")?;
- Ok(())
-}
-
-/// Write out a human-readable textual representation for the given basic block.
-fn write_basic_block(tcx: TyCtxt,
- block: BasicBlock,
- mir: &Mir,
- w: &mut Write)
- -> io::Result<()> {
- let data = &mir[block];
-
- // Basic block label at the top.
- writeln!(w, "{}{:?}: {{", INDENT, block)?;
-
- // List of statements in the middle.
- let mut current_location = Location { block: block, statement_index: 0 };
- for statement in &data.statements {
- let indented_mir = format!("{0}{0}{1:?};", INDENT, statement);
- writeln!(w, "{0:1$} // {2}",
- indented_mir,
- ALIGN,
- comment(tcx, statement.source_info))?;
-
- current_location.statement_index += 1;
- }
-
- // Terminator at the bottom.
- let indented_terminator = format!("{0}{0}{1:?};", INDENT, data.terminator().kind);
- writeln!(w, "{0:1$} // {2}",
- indented_terminator,
- ALIGN,
- comment(tcx, data.terminator().source_info))?;
-
- writeln!(w, "{}}}", INDENT)
-}
-
-fn comment(tcx: TyCtxt, SourceInfo { span, scope }: SourceInfo) -> String {
- format!("scope {} at {}", scope.index(), tcx.sess.codemap().span_to_string(span))
-}
-
-/// Prints user-defined variables in a scope tree.
-///
-/// Returns the total number of variables printed.
-fn write_scope_tree(tcx: TyCtxt,
- mir: &Mir,
- scope_tree: &FxHashMap<VisibilityScope, Vec<VisibilityScope>>,
- w: &mut Write,
- parent: VisibilityScope,
- depth: usize)
- -> io::Result<()> {
- let indent = depth * INDENT.len();
-
- let children = match scope_tree.get(&parent) {
- Some(childs) => childs,
- None => return Ok(()),
- };
-
- for &child in children {
- let data = &mir.visibility_scopes[child];
- assert_eq!(data.parent_scope, Some(parent));
- writeln!(w, "{0:1$}scope {2} {{", "", indent, child.index())?;
-
- // User variable types (including the user's name in a comment).
- for local in mir.vars_iter() {
- let var = &mir.local_decls[local];
- let (name, source_info) = if var.source_info.unwrap().scope == child {
- (var.name.unwrap(), var.source_info.unwrap())
- } else {
- // Not a variable or not declared in this scope.
- continue;
- };
-
- let mut_str = if var.mutability == Mutability::Mut {
- "mut "
- } else {
- ""
- };
-
- let indent = indent + INDENT.len();
- let indented_var = format!("{0:1$}let {2}{3:?}: {4};",
- INDENT,
- indent,
- mut_str,
- local,
- var.ty);
- writeln!(w, "{0:1$} // \"{2}\" in {3}",
- indented_var,
- ALIGN,
- name,
- comment(tcx, source_info))?;
- }
-
- write_scope_tree(tcx, mir, scope_tree, w, child, depth + 1)?;
-
- writeln!(w, "{0:1$}}}", "", depth * INDENT.len())?;
- }
-
- Ok(())
-}
-
-/// Write out a human-readable textual representation of the MIR's `fn` type and the types of its
-/// local variables (both user-defined bindings and compiler temporaries).
-fn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- src: MirSource,
- mir: &Mir,
- w: &mut Write)
- -> io::Result<()> {
- write_mir_sig(tcx, src, mir, w)?;
- writeln!(w, " {{")?;
-
- // construct a scope tree and write it out
- let mut scope_tree: FxHashMap<VisibilityScope, Vec<VisibilityScope>> = FxHashMap();
- for (index, scope_data) in mir.visibility_scopes.iter().enumerate() {
- if let Some(parent) = scope_data.parent_scope {
- scope_tree.entry(parent)
- .or_insert(vec![])
- .push(VisibilityScope::new(index));
- } else {
- // Only the argument scope has no parent, because it's the root.
- assert_eq!(index, ARGUMENT_VISIBILITY_SCOPE.index());
- }
- }
-
- // Print return pointer
- let indented_retptr = format!("{}let mut {:?}: {};",
- INDENT,
- RETURN_POINTER,
- mir.return_ty);
- writeln!(w, "{0:1$} // return pointer",
- indented_retptr,
- ALIGN)?;
-
- write_scope_tree(tcx, mir, &scope_tree, w, ARGUMENT_VISIBILITY_SCOPE, 1)?;
-
- write_temp_decls(mir, w)?;
-
- // Add an empty line before the first block is printed.
- writeln!(w, "")?;
-
- Ok(())
-}
-
-fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)
- -> io::Result<()>
-{
- match src {
- MirSource::Fn(_) => write!(w, "fn")?,
- MirSource::Const(_) => write!(w, "const")?,
- MirSource::Static(_, hir::MutImmutable) => write!(w, "static")?,
- MirSource::Static(_, hir::MutMutable) => write!(w, "static mut")?,
- MirSource::Promoted(_, i) => write!(w, "{:?} in", i)?
- }
-
- write!(w, " {}", tcx.node_path_str(src.item_id()))?;
-
- if let MirSource::Fn(_) = src {
- write!(w, "(")?;
-
- // fn argument types.
- for (i, arg) in mir.args_iter().enumerate() {
- if i != 0 {
- write!(w, ", ")?;
- }
- write!(w, "{:?}: {}", Lvalue::Local(arg), mir.local_decls[arg].ty)?;
- }
-
- write!(w, ") -> {}", mir.return_ty)
- } else {
- assert_eq!(mir.arg_count, 0);
- write!(w, ": {} =", mir.return_ty)
- }
-}
-
-fn write_temp_decls(mir: &Mir, w: &mut Write) -> io::Result<()> {
- // Compiler-introduced temporary types.
- for temp in mir.temps_iter() {
- writeln!(w, "{}let mut {:?}: {};", INDENT, temp, mir.local_decls[temp].ty)?;
- }
-
- Ok(())
-}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::hir;
+use rustc::hir::def_id::DefId;
+use rustc::infer;
+use rustc::middle::region::ROOT_CODE_EXTENT;
+use rustc::mir::*;
+use rustc::mir::transform::MirSource;
+use rustc::ty::{self, Ty};
+use rustc::ty::subst::{Kind, Subst};
+use rustc::ty::maps::Providers;
+
+use rustc_data_structures::indexed_vec::{IndexVec, Idx};
+
+use syntax::abi::Abi;
+use syntax::ast;
+use syntax_pos::Span;
+
+use std::cell::RefCell;
+use std::fmt;
+use std::iter;
+use std::mem;
+
+use transform::{add_call_guards, no_landing_pads, simplify};
+use util::elaborate_drops::{self, DropElaborator, DropStyle, DropFlagMode};
+use util::patch::MirPatch;
+
+pub fn provide(providers: &mut Providers) {
+ providers.mir_shims = make_shim;
+}
+
+fn make_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
+ instance: ty::InstanceDef<'tcx>)
+ -> &'tcx RefCell<Mir<'tcx>>
+{
+ debug!("make_shim({:?})", instance);
+ let did = instance.def_id();
+ let span = tcx.def_span(did);
+ let param_env =
+ tcx.construct_parameter_environment(span, did, ROOT_CODE_EXTENT);
+
+ let mut result = match instance {
+ ty::InstanceDef::Item(..) =>
+ bug!("item {:?} passed to make_shim", instance),
+ ty::InstanceDef::FnPtrShim(def_id, ty) => {
+ let trait_ = tcx.trait_of_item(def_id).unwrap();
+ let adjustment = match tcx.lang_items.fn_trait_kind(trait_) {
+ Some(ty::ClosureKind::FnOnce) => Adjustment::Identity,
+ Some(ty::ClosureKind::FnMut) |
+ Some(ty::ClosureKind::Fn) => Adjustment::Deref,
+ None => bug!("fn pointer {:?} is not an fn", ty)
+ };
+ // HACK: we need the "real" argument types for the MIR,
+ // but because our substs are (Self, Args), where Args
+ // is a tuple, we must include the *concrete* argument
+ // types in the MIR. They will be substituted again with
+ // the param-substs, but because they are concrete, this
+ // will not do any harm.
+ let sig = tcx.erase_late_bound_regions(&ty.fn_sig());
+ let arg_tys = sig.inputs();
+
+ build_call_shim(
+ tcx,
+ ¶m_env,
+ def_id,
+ adjustment,
+ CallKind::Indirect,
+ Some(arg_tys)
+ )
+ }
+ ty::InstanceDef::Virtual(def_id, _) => {
+ // We are translating a call back to our def-id, which
+ // trans::mir knows to turn to an actual virtual call.
+ build_call_shim(
+ tcx,
+ ¶m_env,
+ def_id,
+ Adjustment::Identity,
+ CallKind::Direct(def_id),
+ None
+ )
+ }
+ ty::InstanceDef::ClosureOnceShim { call_once } => {
+ let fn_mut = tcx.lang_items.fn_mut_trait().unwrap();
+ let call_mut = tcx.global_tcx()
+ .associated_items(fn_mut)
+ .find(|it| it.kind == ty::AssociatedKind::Method)
+ .unwrap().def_id;
+
+ build_call_shim(
+ tcx,
+ ¶m_env,
+ call_once,
+ Adjustment::RefMut,
+ CallKind::Direct(call_mut),
+ None
+ )
+ }
+ ty::InstanceDef::DropGlue(def_id, ty) => {
+ build_drop_shim(tcx, ¶m_env, def_id, ty)
+ }
+ ty::InstanceDef::Intrinsic(_) => {
+ bug!("creating shims from intrinsics ({:?}) is unsupported", instance)
+ }
+ };
+ debug!("make_shim({:?}) = untransformed {:?}", instance, result);
+ no_landing_pads::no_landing_pads(tcx, &mut result);
+ simplify::simplify_cfg(&mut result);
+ add_call_guards::add_call_guards(&mut result);
+ debug!("make_shim({:?}) = {:?}", instance, result);
+
+ let result = tcx.alloc_mir(result);
+ // Perma-borrow MIR from shims to prevent mutation.
+ mem::forget(result.borrow());
+ result
+}
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+enum Adjustment {
+ Identity,
+ Deref,
+ RefMut,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+enum CallKind {
+ Indirect,
+ Direct(DefId),
+}
+
+fn temp_decl(mutability: Mutability, ty: Ty) -> LocalDecl {
+ LocalDecl { mutability, ty, name: None, source_info: None }
+}
+
+fn local_decls_for_sig<'tcx>(sig: &ty::FnSig<'tcx>)
+ -> IndexVec<Local, LocalDecl<'tcx>>
+{
+ iter::once(temp_decl(Mutability::Mut, sig.output()))
+ .chain(sig.inputs().iter().map(
+ |ity| temp_decl(Mutability::Not, ity)))
+ .collect()
+}
+
+fn build_drop_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
+ param_env: &ty::ParameterEnvironment<'tcx>,
+ def_id: DefId,
+ ty: Option<Ty<'tcx>>)
+ -> Mir<'tcx>
+{
+ debug!("build_drop_shim(def_id={:?}, ty={:?})", def_id, ty);
+
+ let substs = if let Some(ty) = ty {
+ tcx.mk_substs(iter::once(Kind::from(ty)))
+ } else {
+ param_env.free_substs
+ };
+ let fn_ty = tcx.item_type(def_id).subst(tcx, substs);
+ let sig = tcx.erase_late_bound_regions(&fn_ty.fn_sig());
+ let span = tcx.def_span(def_id);
+
+ let source_info = SourceInfo { span, scope: ARGUMENT_VISIBILITY_SCOPE };
+
+ let return_block = BasicBlock::new(1);
+ let mut blocks = IndexVec::new();
+ let block = |blocks: &mut IndexVec<_, _>, kind| {
+ blocks.push(BasicBlockData {
+ statements: vec![],
+ terminator: Some(Terminator { source_info, kind }),
+ is_cleanup: false
+ })
+ };
+ block(&mut blocks, TerminatorKind::Goto { target: return_block });
+ block(&mut blocks, TerminatorKind::Return);
+
+ let mut mir = Mir::new(
+ blocks,
+ IndexVec::from_elem_n(
+ VisibilityScopeData { span: span, parent_scope: None }, 1
+ ),
+ IndexVec::new(),
+ sig.output(),
+ local_decls_for_sig(&sig),
+ sig.inputs().len(),
+ vec![],
+ span
+ );
+
+ if let Some(..) = ty {
+ let patch = {
+ let mut elaborator = DropShimElaborator {
+ mir: &mir,
+ patch: MirPatch::new(&mir),
+ tcx, param_env
+ };
+ let dropee = Lvalue::Projection(
+ box Projection {
+ base: Lvalue::Local(Local::new(1+0)),
+ elem: ProjectionElem::Deref
+ }
+ );
+ let resume_block = elaborator.patch.resume_block();
+ elaborate_drops::elaborate_drop(
+ &mut elaborator,
+ source_info,
+ false,
+ &dropee,
+ (),
+ return_block,
+ Some(resume_block),
+ START_BLOCK
+ );
+ elaborator.patch
+ };
+ patch.apply(&mut mir);
+ }
+
+ mir
+}
+
+pub struct DropShimElaborator<'a, 'tcx: 'a> {
+ mir: &'a Mir<'tcx>,
+ patch: MirPatch<'tcx>,
+ tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
+ param_env: &'a ty::ParameterEnvironment<'tcx>,
+}
+
+impl<'a, 'tcx> fmt::Debug for DropShimElaborator<'a, 'tcx> {
+ fn fmt(&self, _f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
+ Ok(())
+ }
+}
+
+impl<'a, 'tcx> DropElaborator<'a, 'tcx> for DropShimElaborator<'a, 'tcx> {
+ type Path = ();
+
+ fn patch(&mut self) -> &mut MirPatch<'tcx> { &mut self.patch }
+ fn mir(&self) -> &'a Mir<'tcx> { self.mir }
+ fn tcx(&self) -> ty::TyCtxt<'a, 'tcx, 'tcx> { self.tcx }
+ fn param_env(&self) -> &'a ty::ParameterEnvironment<'tcx> { self.param_env }
+
+ fn drop_style(&self, _path: Self::Path, mode: DropFlagMode) -> DropStyle {
+ if let DropFlagMode::Shallow = mode {
+ DropStyle::Static
+ } else {
+ DropStyle::Open
+ }
+ }
+
+ fn get_drop_flag(&mut self, _path: Self::Path) -> Option<Operand<'tcx>> {
+ None
+ }
+
+ fn clear_drop_flag(&mut self, _location: Location, _path: Self::Path, _mode: DropFlagMode) {
+ }
+
+ fn field_subpath(&self, _path: Self::Path, _field: Field) -> Option<Self::Path> {
+ None
+ }
+ fn deref_subpath(&self, _path: Self::Path) -> Option<Self::Path> {
+ None
+ }
+ fn downcast_subpath(&self, _path: Self::Path, _variant: usize) -> Option<Self::Path> {
+ Some(())
+ }
+}
+
+/// Build a "call" shim for `def_id`. The shim calls the
+/// function specified by `call_kind`, first adjusting its first
+/// argument according to `rcvr_adjustment`.
+///
+/// If `untuple_args` is a vec of types, the second argument of the
+/// function will be untupled as these types.
+fn build_call_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
+ param_env: &ty::ParameterEnvironment<'tcx>,
+ def_id: DefId,
+ rcvr_adjustment: Adjustment,
+ call_kind: CallKind,
+ untuple_args: Option<&[Ty<'tcx>]>)
+ -> Mir<'tcx>
+{
+ debug!("build_call_shim(def_id={:?}, rcvr_adjustment={:?}, \
+ call_kind={:?}, untuple_args={:?})",
+ def_id, rcvr_adjustment, call_kind, untuple_args);
+
+ let fn_ty = tcx.item_type(def_id).subst(tcx, param_env.free_substs);
+ let sig = tcx.erase_late_bound_regions(&fn_ty.fn_sig());
+ let span = tcx.def_span(def_id);
+
+ debug!("build_call_shim: sig={:?}", sig);
+
+ let mut local_decls = local_decls_for_sig(&sig);
+ let source_info = SourceInfo { span, scope: ARGUMENT_VISIBILITY_SCOPE };
+
+ let rcvr_arg = Local::new(1+0);
+ let rcvr_l = Lvalue::Local(rcvr_arg);
+ let mut statements = vec![];
+
+ let rcvr = match rcvr_adjustment {
+ Adjustment::Identity => Operand::Consume(rcvr_l),
+ Adjustment::Deref => Operand::Consume(Lvalue::Projection(
+ box Projection { base: rcvr_l, elem: ProjectionElem::Deref }
+ )),
+ Adjustment::RefMut => {
+ // let rcvr = &mut rcvr;
+ let re_erased = tcx.mk_region(ty::ReErased);
+ let ref_rcvr = local_decls.push(temp_decl(
+ Mutability::Not,
+ tcx.mk_ref(re_erased, ty::TypeAndMut {
+ ty: sig.inputs()[0],
+ mutbl: hir::Mutability::MutMutable
+ })
+ ));
+ statements.push(Statement {
+ source_info: source_info,
+ kind: StatementKind::Assign(
+ Lvalue::Local(ref_rcvr),
+ Rvalue::Ref(re_erased, BorrowKind::Mut, rcvr_l)
+ )
+ });
+ Operand::Consume(Lvalue::Local(ref_rcvr))
+ }
+ };
+
+ let (callee, mut args) = match call_kind {
+ CallKind::Indirect => (rcvr, vec![]),
+ CallKind::Direct(def_id) => (
+ Operand::Constant(Constant {
+ span: span,
+ ty: tcx.item_type(def_id).subst(tcx, param_env.free_substs),
+ literal: Literal::Item { def_id, substs: param_env.free_substs },
+ }),
+ vec![rcvr]
+ )
+ };
+
+ if let Some(untuple_args) = untuple_args {
+ args.extend(untuple_args.iter().enumerate().map(|(i, ity)| {
+ let arg_lv = Lvalue::Local(Local::new(1+1));
+ Operand::Consume(Lvalue::Projection(box Projection {
+ base: arg_lv,
+ elem: ProjectionElem::Field(Field::new(i), *ity)
+ }))
+ }));
+ } else {
+ args.extend((1..sig.inputs().len()).map(|i| {
+ Operand::Consume(Lvalue::Local(Local::new(1+i)))
+ }));
+ }
+
+ let mut blocks = IndexVec::new();
+ let block = |blocks: &mut IndexVec<_, _>, statements, kind, is_cleanup| {
+ blocks.push(BasicBlockData {
+ statements,
+ terminator: Some(Terminator { source_info, kind }),
+ is_cleanup
+ })
+ };
+
+ // BB #0
+ block(&mut blocks, statements, TerminatorKind::Call {
+ func: callee,
+ args: args,
+ destination: Some((Lvalue::Local(RETURN_POINTER),
+ BasicBlock::new(1))),
+ cleanup: if let Adjustment::RefMut = rcvr_adjustment {
+ Some(BasicBlock::new(3))
+ } else {
+ None
+ }
+ }, false);
+
+ if let Adjustment::RefMut = rcvr_adjustment {
+ // BB #1 - drop for Self
+ block(&mut blocks, vec![], TerminatorKind::Drop {
+ location: Lvalue::Local(rcvr_arg),
+ target: BasicBlock::new(2),
+ unwind: None
+ }, false);
+ }
+ // BB #1/#2 - return
+ block(&mut blocks, vec![], TerminatorKind::Return, false);
+ if let Adjustment::RefMut = rcvr_adjustment {
+ // BB #3 - drop if closure panics
+ block(&mut blocks, vec![], TerminatorKind::Drop {
+ location: Lvalue::Local(rcvr_arg),
+ target: BasicBlock::new(4),
+ unwind: None
+ }, true);
+
+ // BB #4 - resume
+ block(&mut blocks, vec![], TerminatorKind::Resume, true);
+ }
+
+ let mut mir = Mir::new(
+ blocks,
+ IndexVec::from_elem_n(
+ VisibilityScopeData { span: span, parent_scope: None }, 1
+ ),
+ IndexVec::new(),
+ sig.output(),
+ local_decls,
+ sig.inputs().len(),
+ vec![],
+ span
+ );
+ if let Abi::RustCall = sig.abi {
+ mir.spread_arg = Some(Local::new(sig.inputs().len()));
+ }
+ mir
+}
+
+pub fn build_adt_ctor<'a, 'gcx, 'tcx>(infcx: &infer::InferCtxt<'a, 'gcx, 'tcx>,
+ ctor_id: ast::NodeId,
+ fields: &[hir::StructField],
+ span: Span)
+ -> (Mir<'tcx>, MirSource)
+{
+ let tcx = infcx.tcx;
+ let def_id = tcx.hir.local_def_id(ctor_id);
+ let sig = match tcx.item_type(def_id).sty {
+ ty::TyFnDef(_, _, fty) => tcx.no_late_bound_regions(&fty)
+ .expect("LBR in ADT constructor signature"),
+ _ => bug!("unexpected type for ctor {:?}", def_id)
+ };
+ let sig = tcx.erase_regions(&sig);
+
+ let (adt_def, substs) = match sig.output().sty {
+ ty::TyAdt(adt_def, substs) => (adt_def, substs),
+ _ => bug!("unexpected type for ADT ctor {:?}", sig.output())
+ };
+
+ debug!("build_ctor: def_id={:?} sig={:?} fields={:?}", def_id, sig, fields);
+
+ let local_decls = local_decls_for_sig(&sig);
+
+ let source_info = SourceInfo {
+ span: span,
+ scope: ARGUMENT_VISIBILITY_SCOPE
+ };
+
+ let variant_no = if adt_def.is_enum() {
+ adt_def.variant_index_with_id(def_id)
+ } else {
+ 0
+ };
+
+ // return = ADT(arg0, arg1, ...); return
+ let start_block = BasicBlockData {
+ statements: vec![Statement {
+ source_info: source_info,
+ kind: StatementKind::Assign(
+ Lvalue::Local(RETURN_POINTER),
+ Rvalue::Aggregate(
+ AggregateKind::Adt(adt_def, variant_no, substs, None),
+ (1..sig.inputs().len()+1).map(|i| {
+ Operand::Consume(Lvalue::Local(Local::new(i)))
+ }).collect()
+ )
+ )
+ }],
+ terminator: Some(Terminator {
+ source_info: source_info,
+ kind: TerminatorKind::Return,
+ }),
+ is_cleanup: false
+ };
+
+ let mir = Mir::new(
+ IndexVec::from_elem_n(start_block, 1),
+ IndexVec::from_elem_n(
+ VisibilityScopeData { span: span, parent_scope: None }, 1
+ ),
+ IndexVec::new(),
+ sig.output(),
+ local_decls,
+ sig.inputs().len(),
+ vec![],
+ span
+ );
+ (mir, MirSource::Fn(ctor_id))
+}
impl<'tcx> MirPass<'tcx> for AddCallGuards {
fn run_pass<'a>(&mut self, _tcx: TyCtxt<'a, 'tcx, 'tcx>, _src: MirSource, mir: &mut Mir<'tcx>) {
- let pred_count: IndexVec<_, _> =
- mir.predecessors().iter().map(|ps| ps.len()).collect();
+ add_call_guards(mir);
+ }
+}
+
+pub fn add_call_guards(mir: &mut Mir) {
+ let pred_count: IndexVec<_, _> =
+ mir.predecessors().iter().map(|ps| ps.len()).collect();
- // We need a place to store the new blocks generated
- let mut new_blocks = Vec::new();
+ // We need a place to store the new blocks generated
+ let mut new_blocks = Vec::new();
- let cur_len = mir.basic_blocks().len();
+ let cur_len = mir.basic_blocks().len();
- for block in mir.basic_blocks_mut() {
- match block.terminator {
- Some(Terminator {
- kind: TerminatorKind::Call {
- destination: Some((_, ref mut destination)),
- cleanup: Some(_),
- ..
- }, source_info
- }) if pred_count[*destination] > 1 => {
- // It's a critical edge, break it
- let call_guard = BasicBlockData {
- statements: vec![],
- is_cleanup: block.is_cleanup,
- terminator: Some(Terminator {
- source_info: source_info,
- kind: TerminatorKind::Goto { target: *destination }
- })
- };
+ for block in mir.basic_blocks_mut() {
+ match block.terminator {
+ Some(Terminator {
+ kind: TerminatorKind::Call {
+ destination: Some((_, ref mut destination)),
+ cleanup: Some(_),
+ ..
+ }, source_info
+ }) if pred_count[*destination] > 1 => {
+ // It's a critical edge, break it
+ let call_guard = BasicBlockData {
+ statements: vec![],
+ is_cleanup: block.is_cleanup,
+ terminator: Some(Terminator {
+ source_info: source_info,
+ kind: TerminatorKind::Goto { target: *destination }
+ })
+ };
- // Get the index it will be when inserted into the MIR
- let idx = cur_len + new_blocks.len();
- new_blocks.push(call_guard);
- *destination = BasicBlock::new(idx);
- }
- _ => {}
+ // Get the index it will be when inserted into the MIR
+ let idx = cur_len + new_blocks.len();
+ new_blocks.push(call_guard);
+ *destination = BasicBlock::new(idx);
}
+ _ => {}
}
+ }
- debug!("Broke {} N edges", new_blocks.len());
+ debug!("Broke {} N edges", new_blocks.len());
- mir.basic_blocks_mut().extend(new_blocks);
- }
+ mir.basic_blocks_mut().extend(new_blocks);
}
impl Pass for AddCallGuards {}
//! (non-mutating) use of `SRC`. These restrictions are conservative and may be relaxed in the
//! future.
-use def_use::DefUseAnalysis;
use rustc::mir::{Constant, Local, LocalKind, Location, Lvalue, Mir, Operand, Rvalue, StatementKind};
use rustc::mir::transform::{MirPass, MirSource, Pass};
use rustc::mir::visit::MutVisitor;
use rustc::ty::TyCtxt;
+use util::def_use::DefUseAnalysis;
use transform::qualify_consts;
pub struct CopyPropagation;
use rustc::ty::TyCtxt;
use rustc::mir::*;
use rustc::mir::transform::{Pass, MirPass, MirPassHook, MirSource};
-use pretty;
+use util as mir_util;
pub struct Marker<'a>(pub &'a str);
pass: &Pass,
is_after: bool)
{
- pretty::dump_mir(
+ mir_util::dump_mir(
tcx,
&*pass.name(),
&Disambiguator {
}
}
+pub fn no_landing_pads<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &mut Mir<'tcx>) {
+ if tcx.sess.no_landing_pads() {
+ NoLandingPads.visit_mir(mir);
+ }
+}
+
impl<'tcx> MirPass<'tcx> for NoLandingPads {
fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
_: MirSource, mir: &mut Mir<'tcx>) {
- if tcx.sess.no_landing_pads() {
- self.visit_mir(mir);
- }
+ no_landing_pads(tcx, mir)
}
}
}
}
+pub fn simplify_cfg(mir: &mut Mir) {
+ CfgSimplifier::new(mir).simplify();
+ remove_dead_blocks(mir);
+
+ // FIXME: Should probably be moved into some kind of pass manager
+ mir.basic_blocks_mut().raw.shrink_to_fit();
+}
+
impl<'l, 'tcx> MirPass<'tcx> for SimplifyCfg<'l> {
fn run_pass<'a>(&mut self, _tcx: TyCtxt<'a, 'tcx, 'tcx>, _src: MirSource, mir: &mut Mir<'tcx>) {
debug!("SimplifyCfg({:?}) - simplifying {:?}", self.label, mir);
- CfgSimplifier::new(mir).simplify();
- remove_dead_blocks(mir);
-
- // FIXME: Should probably be moved into some kind of pass manager
- mir.basic_blocks_mut().raw.shrink_to_fit();
+ simplify_cfg(mir);
}
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Def-use analysis.
+
+use rustc::mir::{Local, Location, Lvalue, Mir};
+use rustc::mir::visit::{LvalueContext, MutVisitor, Visitor};
+use rustc_data_structures::indexed_vec::IndexVec;
+use std::marker::PhantomData;
+use std::mem;
+
+pub struct DefUseAnalysis<'tcx> {
+ info: IndexVec<Local, Info<'tcx>>,
+}
+
+#[derive(Clone)]
+pub struct Info<'tcx> {
+ pub defs_and_uses: Vec<Use<'tcx>>,
+}
+
+#[derive(Clone)]
+pub struct Use<'tcx> {
+ pub context: LvalueContext<'tcx>,
+ pub location: Location,
+}
+
+impl<'tcx> DefUseAnalysis<'tcx> {
+ pub fn new(mir: &Mir<'tcx>) -> DefUseAnalysis<'tcx> {
+ DefUseAnalysis {
+ info: IndexVec::from_elem_n(Info::new(), mir.local_decls.len()),
+ }
+ }
+
+ pub fn analyze(&mut self, mir: &Mir<'tcx>) {
+ let mut finder = DefUseFinder {
+ info: mem::replace(&mut self.info, IndexVec::new()),
+ };
+ finder.visit_mir(mir);
+ self.info = finder.info
+ }
+
+ pub fn local_info(&self, local: Local) -> &Info<'tcx> {
+ &self.info[local]
+ }
+
+ pub fn local_info_mut(&mut self, local: Local) -> &mut Info<'tcx> {
+ &mut self.info[local]
+ }
+
+ fn mutate_defs_and_uses<F>(&self, local: Local, mir: &mut Mir<'tcx>, mut callback: F)
+ where F: for<'a> FnMut(&'a mut Lvalue<'tcx>,
+ LvalueContext<'tcx>,
+ Location) {
+ for lvalue_use in &self.info[local].defs_and_uses {
+ MutateUseVisitor::new(local,
+ &mut callback,
+ mir).visit_location(mir, lvalue_use.location)
+ }
+ }
+
+ /// FIXME(pcwalton): This should update the def-use chains.
+ pub fn replace_all_defs_and_uses_with(&self,
+ local: Local,
+ mir: &mut Mir<'tcx>,
+ new_lvalue: Lvalue<'tcx>) {
+ self.mutate_defs_and_uses(local, mir, |lvalue, _, _| *lvalue = new_lvalue.clone())
+ }
+}
+
+struct DefUseFinder<'tcx> {
+ info: IndexVec<Local, Info<'tcx>>,
+}
+
+impl<'tcx> DefUseFinder<'tcx> {
+ fn lvalue_mut_info(&mut self, lvalue: &Lvalue<'tcx>) -> Option<&mut Info<'tcx>> {
+ let info = &mut self.info;
+
+ if let Lvalue::Local(local) = *lvalue {
+ Some(&mut info[local])
+ } else {
+ None
+ }
+ }
+}
+
+impl<'tcx> Visitor<'tcx> for DefUseFinder<'tcx> {
+ fn visit_lvalue(&mut self,
+ lvalue: &Lvalue<'tcx>,
+ context: LvalueContext<'tcx>,
+ location: Location) {
+ if let Some(ref mut info) = self.lvalue_mut_info(lvalue) {
+ info.defs_and_uses.push(Use {
+ context: context,
+ location: location,
+ })
+ }
+ self.super_lvalue(lvalue, context, location)
+ }
+}
+
+impl<'tcx> Info<'tcx> {
+ fn new() -> Info<'tcx> {
+ Info {
+ defs_and_uses: vec![],
+ }
+ }
+
+ pub fn def_count(&self) -> usize {
+ self.defs_and_uses.iter().filter(|lvalue_use| lvalue_use.context.is_mutating_use()).count()
+ }
+
+ pub fn def_count_not_including_drop(&self) -> usize {
+ self.defs_and_uses.iter().filter(|lvalue_use| {
+ lvalue_use.context.is_mutating_use() && !lvalue_use.context.is_drop()
+ }).count()
+ }
+
+ pub fn use_count(&self) -> usize {
+ self.defs_and_uses.iter().filter(|lvalue_use| {
+ lvalue_use.context.is_nonmutating_use()
+ }).count()
+ }
+}
+
+struct MutateUseVisitor<'tcx, F> {
+ query: Local,
+ callback: F,
+ phantom: PhantomData<&'tcx ()>,
+}
+
+impl<'tcx, F> MutateUseVisitor<'tcx, F> {
+ fn new(query: Local, callback: F, _: &Mir<'tcx>)
+ -> MutateUseVisitor<'tcx, F>
+ where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, LvalueContext<'tcx>, Location) {
+ MutateUseVisitor {
+ query: query,
+ callback: callback,
+ phantom: PhantomData,
+ }
+ }
+}
+
+impl<'tcx, F> MutVisitor<'tcx> for MutateUseVisitor<'tcx, F>
+ where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, LvalueContext<'tcx>, Location) {
+ fn visit_lvalue(&mut self,
+ lvalue: &mut Lvalue<'tcx>,
+ context: LvalueContext<'tcx>,
+ location: Location) {
+ if let Lvalue::Local(local) = *lvalue {
+ if local == self.query {
+ (self.callback)(lvalue, context, location)
+ }
+ }
+ self.super_lvalue(lvalue, context, location)
+ }
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::fmt;
+use rustc::hir;
+use rustc::mir::*;
+use rustc::middle::const_val::ConstInt;
+use rustc::middle::lang_items;
+use rustc::ty::{self, Ty};
+use rustc::ty::subst::{Kind, Substs};
+use rustc::ty::util::IntTypeExt;
+use rustc_data_structures::indexed_vec::Idx;
+use util::patch::MirPatch;
+
+use std::iter;
+
+#[derive(Debug, PartialEq, Eq, Copy, Clone)]
+pub enum DropFlagState {
+ Present, // i.e. initialized
+ Absent, // i.e. deinitialized or "moved"
+}
+
+impl DropFlagState {
+ pub fn value(self) -> bool {
+ match self {
+ DropFlagState::Present => true,
+ DropFlagState::Absent => false
+ }
+ }
+}
+
+#[derive(Debug)]
+pub enum DropStyle {
+ Dead,
+ Static,
+ Conditional,
+ Open,
+}
+
+#[derive(Debug)]
+pub enum DropFlagMode {
+ Shallow,
+ Deep
+}
+
+pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
+ type Path : Copy + fmt::Debug;
+
+ fn patch(&mut self) -> &mut MirPatch<'tcx>;
+ fn mir(&self) -> &'a Mir<'tcx>;
+ fn tcx(&self) -> ty::TyCtxt<'a, 'tcx, 'tcx>;
+ fn param_env(&self) -> &'a ty::ParameterEnvironment<'tcx>;
+
+ fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
+ fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
+ fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
+
+
+ fn field_subpath(&self, path: Self::Path, field: Field) -> Option<Self::Path>;
+ fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
+ fn downcast_subpath(&self, path: Self::Path, variant: usize) -> Option<Self::Path>;
+}
+
+#[derive(Debug)]
+struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
+ where D : DropElaborator<'b, 'tcx> + 'l
+{
+ elaborator: &'l mut D,
+
+ source_info: SourceInfo,
+ is_cleanup: bool,
+
+ lvalue: &'l Lvalue<'tcx>,
+ path: D::Path,
+ succ: BasicBlock,
+ unwind: Option<BasicBlock>,
+}
+
+pub fn elaborate_drop<'b, 'tcx, D>(
+ elaborator: &mut D,
+ source_info: SourceInfo,
+ is_cleanup: bool,
+ lvalue: &Lvalue<'tcx>,
+ path: D::Path,
+ succ: BasicBlock,
+ unwind: Option<BasicBlock>,
+ bb: BasicBlock)
+ where D: DropElaborator<'b, 'tcx>
+{
+ assert_eq!(unwind.is_none(), is_cleanup);
+ DropCtxt {
+ elaborator, source_info, is_cleanup, lvalue, path, succ, unwind
+ }.elaborate_drop(bb)
+}
+
+impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
+ where D: DropElaborator<'b, 'tcx>
+{
+ fn lvalue_ty(&self, lvalue: &Lvalue<'tcx>) -> Ty<'tcx> {
+ lvalue.ty(self.elaborator.mir(), self.tcx()).to_ty(self.tcx())
+ }
+
+ fn tcx(&self) -> ty::TyCtxt<'b, 'tcx, 'tcx> {
+ self.elaborator.tcx()
+ }
+
+ /// This elaborates a single drop instruction, located at `bb`, and
+ /// patches over it.
+ ///
+ /// The elaborated drop checks the drop flags to only drop what
+ /// is initialized.
+ ///
+ /// In addition, the relevant drop flags also need to be cleared
+ /// to avoid double-drops. However, in the middle of a complex
+ /// drop, one must avoid clearing some of the flags before they
+ /// are read, as that would cause a memory leak.
+ ///
+ /// In particular, when dropping an ADT, multiple fields may be
+ /// joined together under the `rest` subpath. They are all controlled
+ /// by the primary drop flag, but only the last rest-field dropped
+ /// should clear it (and it must also not clear anything else).
+ ///
+ /// FIXME: I think we should just control the flags externally
+ /// and then we do not need this machinery.
+ pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
+ debug!("elaborate_drop({:?})", self);
+ let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
+ debug!("elaborate_drop({:?}): live - {:?}", self, style);
+ match style {
+ DropStyle::Dead => {
+ self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
+ target: self.succ
+ });
+ }
+ DropStyle::Static => {
+ let loc = self.terminator_loc(bb);
+ self.elaborator.clear_drop_flag(loc, self.path, DropFlagMode::Deep);
+ self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
+ location: self.lvalue.clone(),
+ target: self.succ,
+ unwind: self.unwind
+ });
+ }
+ DropStyle::Conditional => {
+ let is_cleanup = self.is_cleanup; // FIXME(#6393)
+ let succ = self.succ;
+ let drop_bb = self.complete_drop(
+ is_cleanup, Some(DropFlagMode::Deep), succ);
+ self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
+ target: drop_bb
+ });
+ }
+ DropStyle::Open => {
+ let drop_bb = self.open_drop();
+ self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
+ target: drop_bb
+ });
+ }
+ }
+ }
+
+ /// Return the lvalue and move path for each field of `variant`,
+ /// (the move path is `None` if the field is a rest field).
+ fn move_paths_for_fields(&self,
+ base_lv: &Lvalue<'tcx>,
+ variant_path: D::Path,
+ variant: &'tcx ty::VariantDef,
+ substs: &'tcx Substs<'tcx>)
+ -> Vec<(Lvalue<'tcx>, Option<D::Path>)>
+ {
+ variant.fields.iter().enumerate().map(|(i, f)| {
+ let field = Field::new(i);
+ let subpath = self.elaborator.field_subpath(variant_path, field);
+
+ let field_ty =
+ self.tcx().normalize_associated_type_in_env(
+ &f.ty(self.tcx(), substs),
+ self.elaborator.param_env()
+ );
+ (base_lv.clone().field(field, field_ty), subpath)
+ }).collect()
+ }
+
+ fn drop_subpath(&mut self,
+ is_cleanup: bool,
+ lvalue: &Lvalue<'tcx>,
+ path: Option<D::Path>,
+ succ: BasicBlock,
+ unwind: Option<BasicBlock>)
+ -> BasicBlock
+ {
+ if let Some(path) = path {
+ debug!("drop_subpath: for std field {:?}", lvalue);
+
+ DropCtxt {
+ elaborator: self.elaborator,
+ source_info: self.source_info,
+ path, lvalue, succ, unwind, is_cleanup
+ }.elaborated_drop_block()
+ } else {
+ debug!("drop_subpath: for rest field {:?}", lvalue);
+
+ DropCtxt {
+ elaborator: self.elaborator,
+ source_info: self.source_info,
+ lvalue, succ, unwind, is_cleanup,
+ // Using `self.path` here to condition the drop on
+ // our own drop flag.
+ path: self.path
+ }.complete_drop(is_cleanup, None, succ)
+ }
+ }
+
+ /// Create one-half of the drop ladder for a list of fields, and return
+ /// the list of steps in it in reverse order.
+ ///
+ /// `unwind_ladder` is such a list of steps in reverse order,
+ /// which is called instead of the next step if the drop unwinds
+ /// (the first field is never reached). If it is `None`, all
+ /// unwind targets are left blank.
+ fn drop_halfladder<'a>(&mut self,
+ unwind_ladder: Option<&[BasicBlock]>,
+ succ: BasicBlock,
+ fields: &[(Lvalue<'tcx>, Option<D::Path>)],
+ is_cleanup: bool)
+ -> Vec<BasicBlock>
+ {
+ let mut unwind_succ = if is_cleanup {
+ None
+ } else {
+ self.unwind
+ };
+
+ let goto = TerminatorKind::Goto { target: succ };
+ let mut succ = self.new_block(is_cleanup, goto);
+
+ // Always clear the "master" drop flag at the bottom of the
+ // ladder. This is needed because the "master" drop flag
+ // protects the ADT's discriminant, which is invalidated
+ // after the ADT is dropped.
+ let succ_loc = Location { block: succ, statement_index: 0 };
+ self.elaborator.clear_drop_flag(succ_loc, self.path, DropFlagMode::Shallow);
+
+ fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
+ succ = self.drop_subpath(is_cleanup, lv, path, succ, unwind_succ);
+ unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
+ succ
+ }).collect()
+ }
+
+ /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
+ ///
+ /// For example, with 3 fields, the drop ladder is
+ ///
+ /// .d0:
+ /// ELAB(drop location.0 [target=.d1, unwind=.c1])
+ /// .d1:
+ /// ELAB(drop location.1 [target=.d2, unwind=.c2])
+ /// .d2:
+ /// ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
+ /// .c1:
+ /// ELAB(drop location.1 [target=.c2])
+ /// .c2:
+ /// ELAB(drop location.2 [target=`self.unwind`])
+ fn drop_ladder<'a>(&mut self,
+ fields: Vec<(Lvalue<'tcx>, Option<D::Path>)>)
+ -> (BasicBlock, Option<BasicBlock>)
+ {
+ debug!("drop_ladder({:?}, {:?})", self, fields);
+
+ let mut fields = fields;
+ fields.retain(|&(ref lvalue, _)| {
+ self.tcx().type_needs_drop_given_env(
+ self.lvalue_ty(lvalue), self.elaborator.param_env())
+ });
+
+ debug!("drop_ladder - fields needing drop: {:?}", fields);
+
+ let unwind_ladder = if self.is_cleanup {
+ None
+ } else {
+ let unwind = self.unwind.unwrap(); // FIXME(#6393)
+ Some(self.drop_halfladder(None, unwind, &fields, true))
+ };
+
+ let succ = self.succ; // FIXME(#6393)
+ let is_cleanup = self.is_cleanup;
+ let normal_ladder =
+ self.drop_halfladder(unwind_ladder.as_ref().map(|x| &**x),
+ succ, &fields, is_cleanup);
+
+ (normal_ladder.last().cloned().unwrap_or(succ),
+ unwind_ladder.and_then(|l| l.last().cloned()).or(self.unwind))
+ }
+
+ fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
+ -> BasicBlock
+ {
+ debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
+
+ let fields = tys.iter().enumerate().map(|(i, &ty)| {
+ (self.lvalue.clone().field(Field::new(i), ty),
+ self.elaborator.field_subpath(self.path, Field::new(i)))
+ }).collect();
+
+ self.drop_ladder(fields).0
+ }
+
+ fn open_drop_for_box<'a>(&mut self, ty: Ty<'tcx>) -> BasicBlock
+ {
+ debug!("open_drop_for_box({:?}, {:?})", self, ty);
+
+ let interior = self.lvalue.clone().deref();
+ let interior_path = self.elaborator.deref_subpath(self.path);
+
+ let succ = self.succ; // FIXME(#6393)
+ let is_cleanup = self.is_cleanup;
+ let succ = self.box_free_block(ty, succ, is_cleanup);
+ let unwind_succ = self.unwind.map(|u| {
+ self.box_free_block(ty, u, true)
+ });
+
+ self.drop_subpath(is_cleanup, &interior, interior_path, succ, unwind_succ)
+ }
+
+ fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
+ -> BasicBlock {
+ debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
+ if adt.variants.len() == 0 {
+ return self.elaborator.patch().new_block(BasicBlockData {
+ statements: vec![],
+ terminator: Some(Terminator {
+ source_info: self.source_info,
+ kind: TerminatorKind::Unreachable
+ }),
+ is_cleanup: self.is_cleanup
+ });
+ }
+
+ let contents_drop = if adt.is_union() {
+ (self.succ, self.unwind)
+ } else {
+ self.open_drop_for_adt_contents(adt, substs)
+ };
+
+ if adt.has_dtor(self.tcx()) {
+ self.destructor_call_block(contents_drop)
+ } else {
+ contents_drop.0
+ }
+ }
+
+ fn open_drop_for_adt_contents<'a>(&mut self, adt: &'tcx ty::AdtDef,
+ substs: &'tcx Substs<'tcx>)
+ -> (BasicBlock, Option<BasicBlock>) {
+ match adt.variants.len() {
+ 1 => {
+ let fields = self.move_paths_for_fields(
+ self.lvalue,
+ self.path,
+ &adt.variants[0],
+ substs
+ );
+ self.drop_ladder(fields)
+ }
+ _ => {
+ let is_cleanup = self.is_cleanup;
+ let succ = self.succ;
+ let unwind = self.unwind; // FIXME(#6393)
+
+ let mut values = Vec::with_capacity(adt.variants.len());
+ let mut normal_blocks = Vec::with_capacity(adt.variants.len());
+ let mut unwind_blocks = if is_cleanup {
+ None
+ } else {
+ Some(Vec::with_capacity(adt.variants.len()))
+ };
+ let mut otherwise = None;
+ let mut unwind_otherwise = None;
+ for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
+ let subpath = self.elaborator.downcast_subpath(
+ self.path, variant_index);
+ if let Some(variant_path) = subpath {
+ let base_lv = self.lvalue.clone().elem(
+ ProjectionElem::Downcast(adt, variant_index)
+ );
+ let fields = self.move_paths_for_fields(
+ &base_lv,
+ variant_path,
+ &adt.variants[variant_index],
+ substs);
+ values.push(discr);
+ if let Some(ref mut unwind_blocks) = unwind_blocks {
+ // We can't use the half-ladder from the original
+ // drop ladder, because this breaks the
+ // "funclet can't have 2 successor funclets"
+ // requirement from MSVC:
+ //
+ // switch unwind-switch
+ // / \ / \
+ // v1.0 v2.0 v2.0-unwind v1.0-unwind
+ // | | / |
+ // v1.1-unwind v2.1-unwind |
+ // ^ |
+ // \-------------------------------/
+ //
+ // Create a duplicate half-ladder to avoid that. We
+ // could technically only do this on MSVC, but I
+ // I want to minimize the divergence between MSVC
+ // and non-MSVC.
+
+ let unwind = unwind.unwrap();
+ let halfladder = self.drop_halfladder(
+ None, unwind, &fields, true);
+ unwind_blocks.push(
+ halfladder.last().cloned().unwrap_or(unwind)
+ );
+ }
+ let (normal, _) = self.drop_ladder(fields);
+ normal_blocks.push(normal);
+ } else {
+ // variant not found - drop the entire enum
+ if let None = otherwise {
+ otherwise = Some(self.complete_drop(
+ is_cleanup,
+ Some(DropFlagMode::Shallow),
+ succ));
+ unwind_otherwise = unwind.map(|unwind| self.complete_drop(
+ true,
+ Some(DropFlagMode::Shallow),
+ unwind
+ ));
+ }
+ }
+ }
+ if let Some(block) = otherwise {
+ normal_blocks.push(block);
+ if let Some(ref mut unwind_blocks) = unwind_blocks {
+ unwind_blocks.push(unwind_otherwise.unwrap());
+ }
+ } else {
+ values.pop();
+ }
+
+ (self.adt_switch_block(is_cleanup, adt, normal_blocks, &values, succ),
+ unwind_blocks.map(|unwind_blocks| {
+ self.adt_switch_block(
+ is_cleanup, adt, unwind_blocks, &values, unwind.unwrap()
+ )
+ }))
+ }
+ }
+ }
+
+ fn adt_switch_block(&mut self,
+ is_cleanup: bool,
+ adt: &'tcx ty::AdtDef,
+ blocks: Vec<BasicBlock>,
+ values: &[ConstInt],
+ succ: BasicBlock)
+ -> BasicBlock {
+ // If there are multiple variants, then if something
+ // is present within the enum the discriminant, tracked
+ // by the rest path, must be initialized.
+ //
+ // Additionally, we do not want to switch on the
+ // discriminant after it is free-ed, because that
+ // way lies only trouble.
+ let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
+ let discr = Lvalue::Local(self.new_temp(discr_ty));
+ let discr_rv = Rvalue::Discriminant(self.lvalue.clone());
+ let switch_block = self.elaborator.patch().new_block(BasicBlockData {
+ statements: vec![
+ Statement {
+ source_info: self.source_info,
+ kind: StatementKind::Assign(discr.clone(), discr_rv),
+ }
+ ],
+ terminator: Some(Terminator {
+ source_info: self.source_info,
+ kind: TerminatorKind::SwitchInt {
+ discr: Operand::Consume(discr),
+ switch_ty: discr_ty,
+ values: From::from(values.to_owned()),
+ targets: blocks,
+ }
+ }),
+ is_cleanup: is_cleanup,
+ });
+ self.drop_flag_test_block(is_cleanup, switch_block, succ)
+ }
+
+ fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Option<BasicBlock>))
+ -> BasicBlock
+ {
+ debug!("destructor_call_block({:?}, {:?})", self, succ);
+ let tcx = self.tcx();
+ let drop_trait = tcx.lang_items.drop_trait().unwrap();
+ let drop_fn = tcx.associated_items(drop_trait).next().unwrap();
+ let ty = self.lvalue_ty(self.lvalue);
+ let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
+
+ let re_erased = tcx.mk_region(ty::ReErased);
+ let ref_ty = tcx.mk_ref(re_erased, ty::TypeAndMut {
+ ty: ty,
+ mutbl: hir::Mutability::MutMutable
+ });
+ let ref_lvalue = self.new_temp(ref_ty);
+ let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
+
+ self.elaborator.patch().new_block(BasicBlockData {
+ statements: vec![Statement {
+ source_info: self.source_info,
+ kind: StatementKind::Assign(
+ Lvalue::Local(ref_lvalue),
+ Rvalue::Ref(re_erased, BorrowKind::Mut, self.lvalue.clone())
+ )
+ }],
+ terminator: Some(Terminator {
+ kind: TerminatorKind::Call {
+ func: Operand::item(tcx, drop_fn.def_id, substs,
+ self.source_info.span),
+ args: vec![Operand::Consume(Lvalue::Local(ref_lvalue))],
+ destination: Some((unit_temp, succ)),
+ cleanup: unwind,
+ },
+ source_info: self.source_info
+ }),
+ is_cleanup: self.is_cleanup,
+ })
+ }
+
+ /// The slow-path - create an "open", elaborated drop for a type
+ /// which is moved-out-of only partially, and patch `bb` to a jump
+ /// to it. This must not be called on ADTs with a destructor,
+ /// as these can't be moved-out-of, except for `Box<T>`, which is
+ /// special-cased.
+ ///
+ /// This creates a "drop ladder" that drops the needed fields of the
+ /// ADT, both in the success case or if one of the destructors fail.
+ fn open_drop<'a>(&mut self) -> BasicBlock {
+ let ty = self.lvalue_ty(self.lvalue);
+ let is_cleanup = self.is_cleanup; // FIXME(#6393)
+ let succ = self.succ;
+ match ty.sty {
+ ty::TyClosure(def_id, substs) => {
+ let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
+ self.open_drop_for_tuple(&tys)
+ }
+ ty::TyTuple(tys, _) => {
+ self.open_drop_for_tuple(tys)
+ }
+ ty::TyAdt(def, _) if def.is_box() => {
+ self.open_drop_for_box(ty.boxed_ty())
+ }
+ ty::TyAdt(def, substs) => {
+ self.open_drop_for_adt(def, substs)
+ }
+ ty::TyDynamic(..) => {
+ self.complete_drop(is_cleanup, Some(DropFlagMode::Deep), succ)
+ }
+ ty::TyArray(..) | ty::TySlice(..) => {
+ // FIXME(#34708): handle partially-dropped
+ // array/slice elements.
+ self.complete_drop(is_cleanup, Some(DropFlagMode::Deep), succ)
+ }
+ _ => bug!("open drop from non-ADT `{:?}`", ty)
+ }
+ }
+
+ /// Return a basic block that drop an lvalue using the context
+ /// and path in `c`. If `mode` is something, also clear `c`
+ /// according to it.
+ ///
+ /// if FLAG(self.path)
+ /// if let Some(mode) = mode: FLAG(self.path)[mode] = false
+ /// drop(self.lv)
+ fn complete_drop<'a>(&mut self,
+ is_cleanup: bool,
+ drop_mode: Option<DropFlagMode>,
+ succ: BasicBlock) -> BasicBlock
+ {
+ debug!("complete_drop({:?},{:?})", self, drop_mode);
+
+ let drop_block = self.drop_block(is_cleanup, succ);
+ if let Some(mode) = drop_mode {
+ let block_start = Location { block: drop_block, statement_index: 0 };
+ self.elaborator.clear_drop_flag(block_start, self.path, mode);
+ }
+
+ self.drop_flag_test_block(is_cleanup, drop_block, succ)
+ }
+
+ fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
+ debug!("elaborated_drop_block({:?})", self);
+ let is_cleanup = self.is_cleanup; // FIXME(#6393)
+ let succ = self.succ;
+ let blk = self.drop_block(is_cleanup, succ);
+ self.elaborate_drop(blk);
+ blk
+ }
+
+ fn box_free_block<'a>(
+ &mut self,
+ ty: Ty<'tcx>,
+ target: BasicBlock,
+ is_cleanup: bool
+ ) -> BasicBlock {
+ let block = self.unelaborated_free_block(ty, target, is_cleanup);
+ self.drop_flag_test_block(is_cleanup, block, target)
+ }
+
+ fn unelaborated_free_block<'a>(
+ &mut self,
+ ty: Ty<'tcx>,
+ target: BasicBlock,
+ is_cleanup: bool
+ ) -> BasicBlock {
+ let tcx = self.tcx();
+ let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
+ let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
+ let substs = tcx.mk_substs(iter::once(Kind::from(ty)));
+
+ let call = TerminatorKind::Call {
+ func: Operand::item(tcx, free_func, substs, self.source_info.span),
+ args: vec![Operand::Consume(self.lvalue.clone())],
+ destination: Some((unit_temp, target)),
+ cleanup: None
+ }; // FIXME(#6393)
+ let free_block = self.new_block(is_cleanup, call);
+
+ let block_start = Location { block: free_block, statement_index: 0 };
+ self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
+ free_block
+ }
+
+ fn drop_block<'a>(&mut self, is_cleanup: bool, succ: BasicBlock) -> BasicBlock {
+ let block = TerminatorKind::Drop {
+ location: self.lvalue.clone(),
+ target: succ,
+ unwind: if is_cleanup { None } else { self.unwind }
+ };
+ self.new_block(is_cleanup, block)
+ }
+
+ fn drop_flag_test_block(&mut self,
+ is_cleanup: bool,
+ on_set: BasicBlock,
+ on_unset: BasicBlock)
+ -> BasicBlock
+ {
+ let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
+ debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
+ self, is_cleanup, on_set, style);
+
+ match style {
+ DropStyle::Dead => on_unset,
+ DropStyle::Static => on_set,
+ DropStyle::Conditional | DropStyle::Open => {
+ let flag = self.elaborator.get_drop_flag(self.path).unwrap();
+ let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
+ self.new_block(is_cleanup, term)
+ }
+ }
+ }
+
+ fn new_block<'a>(&mut self,
+ is_cleanup: bool,
+ k: TerminatorKind<'tcx>)
+ -> BasicBlock
+ {
+ self.elaborator.patch().new_block(BasicBlockData {
+ statements: vec![],
+ terminator: Some(Terminator {
+ source_info: self.source_info, kind: k
+ }),
+ is_cleanup: is_cleanup
+ })
+ }
+
+ fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
+ self.elaborator.patch().new_temp(ty)
+ }
+
+ fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
+ let mir = self.elaborator.mir();
+ self.elaborator.patch().terminator_loc(mir, bb)
+ }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use dot;
+use rustc::hir::def_id::DefId;
+use rustc::mir::*;
+use rustc::ty::TyCtxt;
+use std::fmt::Debug;
+use std::io::{self, Write};
+use syntax::ast::NodeId;
+
+use rustc_data_structures::indexed_vec::Idx;
+
+/// Write a graphviz DOT graph of a list of MIRs.
+pub fn write_mir_graphviz<'a, 'b, 'tcx, W, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
+ iter: I,
+ w: &mut W)
+ -> io::Result<()>
+ where W: Write, I: Iterator<Item=DefId>
+{
+ for def_id in iter {
+ let nodeid = tcx.hir.as_local_node_id(def_id).unwrap();
+ let mir = &tcx.item_mir(def_id);
+
+ writeln!(w, "digraph Mir_{} {{", nodeid)?;
+
+ // Global graph properties
+ writeln!(w, r#" graph [fontname="monospace"];"#)?;
+ writeln!(w, r#" node [fontname="monospace"];"#)?;
+ writeln!(w, r#" edge [fontname="monospace"];"#)?;
+
+ // Graph label
+ write_graph_label(tcx, nodeid, mir, w)?;
+
+ // Nodes
+ for (block, _) in mir.basic_blocks().iter_enumerated() {
+ write_node(block, mir, w)?;
+ }
+
+ // Edges
+ for (source, _) in mir.basic_blocks().iter_enumerated() {
+ write_edges(source, mir, w)?;
+ }
+ writeln!(w, "}}")?
+ }
+ Ok(())
+}
+
+/// Write a graphviz HTML-styled label for the given basic block, with
+/// all necessary escaping already performed. (This is suitable for
+/// emitting directly, as is done in this module, or for use with the
+/// LabelText::HtmlStr from libgraphviz.)
+///
+/// `init` and `fini` are callbacks for emitting additional rows of
+/// data (using HTML enclosed with `<tr>` in the emitted text).
+pub fn write_node_label<W: Write, INIT, FINI>(block: BasicBlock,
+ mir: &Mir,
+ w: &mut W,
+ num_cols: u32,
+ init: INIT,
+ fini: FINI) -> io::Result<()>
+ where INIT: Fn(&mut W) -> io::Result<()>,
+ FINI: Fn(&mut W) -> io::Result<()>
+{
+ let data = &mir[block];
+
+ write!(w, r#"<table border="0" cellborder="1" cellspacing="0">"#)?;
+
+ // Basic block number at the top.
+ write!(w, r#"<tr><td {attrs} colspan="{colspan}">{blk}</td></tr>"#,
+ attrs=r#"bgcolor="gray" align="center""#,
+ colspan=num_cols,
+ blk=block.index())?;
+
+ init(w)?;
+
+ // List of statements in the middle.
+ if !data.statements.is_empty() {
+ write!(w, r#"<tr><td align="left" balign="left">"#)?;
+ for statement in &data.statements {
+ write!(w, "{}<br/>", escape(statement))?;
+ }
+ write!(w, "</td></tr>")?;
+ }
+
+ // Terminator head at the bottom, not including the list of successor blocks. Those will be
+ // displayed as labels on the edges between blocks.
+ let mut terminator_head = String::new();
+ data.terminator().kind.fmt_head(&mut terminator_head).unwrap();
+ write!(w, r#"<tr><td align="left">{}</td></tr>"#, dot::escape_html(&terminator_head))?;
+
+ fini(w)?;
+
+ // Close the table
+ writeln!(w, "</table>")
+}
+
+/// Write a graphviz DOT node for the given basic block.
+fn write_node<W: Write>(block: BasicBlock, mir: &Mir, w: &mut W) -> io::Result<()> {
+ // Start a new node with the label to follow, in one of DOT's pseudo-HTML tables.
+ write!(w, r#" {} [shape="none", label=<"#, node(block))?;
+ write_node_label(block, mir, w, 1, |_| Ok(()), |_| Ok(()))?;
+ // Close the node label and the node itself.
+ writeln!(w, ">];")
+}
+
+/// Write graphviz DOT edges with labels between the given basic block and all of its successors.
+fn write_edges<W: Write>(source: BasicBlock, mir: &Mir, w: &mut W) -> io::Result<()> {
+ let terminator = mir[source].terminator();
+ let labels = terminator.kind.fmt_successor_labels();
+
+ for (&target, label) in terminator.successors().iter().zip(labels) {
+ writeln!(w, r#" {} -> {} [label="{}"];"#, node(source), node(target), label)?;
+ }
+
+ Ok(())
+}
+
+/// Write the graphviz DOT label for the overall graph. This is essentially a block of text that
+/// will appear below the graph, showing the type of the `fn` this MIR represents and the types of
+/// all the variables and temporaries.
+fn write_graph_label<'a, 'tcx, W: Write>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ nid: NodeId,
+ mir: &Mir,
+ w: &mut W)
+ -> io::Result<()> {
+ write!(w, " label=<fn {}(", dot::escape_html(&tcx.node_path_str(nid)))?;
+
+ // fn argument types.
+ for (i, arg) in mir.args_iter().enumerate() {
+ if i > 0 {
+ write!(w, ", ")?;
+ }
+ write!(w, "{:?}: {}", Lvalue::Local(arg), escape(&mir.local_decls[arg].ty))?;
+ }
+
+ write!(w, ") -> {}", escape(mir.return_ty))?;
+ write!(w, r#"<br align="left"/>"#)?;
+
+ for local in mir.vars_and_temps_iter() {
+ let decl = &mir.local_decls[local];
+
+ write!(w, "let ")?;
+ if decl.mutability == Mutability::Mut {
+ write!(w, "mut ")?;
+ }
+
+ if let Some(name) = decl.name {
+ write!(w, r#"{:?}: {}; // {}<br align="left"/>"#,
+ Lvalue::Local(local), escape(&decl.ty), name)?;
+ } else {
+ write!(w, r#"let mut {:?}: {};<br align="left"/>"#,
+ Lvalue::Local(local), escape(&decl.ty))?;
+ }
+ }
+
+ writeln!(w, ">;")
+}
+
+fn node(block: BasicBlock) -> String {
+ format!("bb{}", block.index())
+}
+
+fn escape<T: Debug>(t: &T) -> String {
+ dot::escape_html(&format!("{:?}", t))
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub mod elaborate_drops;
+pub mod def_use;
+pub mod patch;
+
+mod graphviz;
+mod pretty;
+
+pub use self::pretty::{dump_mir, write_mir_pretty};
+pub use self::graphviz::{write_mir_graphviz};
+pub use self::graphviz::write_node_label as write_graphviz_node_label;
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::ty::Ty;
+use rustc::mir::*;
+use rustc_data_structures::indexed_vec::{IndexVec, Idx};
+
+/// This struct represents a patch to MIR, which can add
+/// new statements and basic blocks and patch over block
+/// terminators.
+pub struct MirPatch<'tcx> {
+ patch_map: IndexVec<BasicBlock, Option<TerminatorKind<'tcx>>>,
+ new_blocks: Vec<BasicBlockData<'tcx>>,
+ new_statements: Vec<(Location, StatementKind<'tcx>)>,
+ new_locals: Vec<LocalDecl<'tcx>>,
+ resume_block: BasicBlock,
+ next_local: usize,
+}
+
+impl<'tcx> MirPatch<'tcx> {
+ pub fn new(mir: &Mir<'tcx>) -> Self {
+ let mut result = MirPatch {
+ patch_map: IndexVec::from_elem(None, mir.basic_blocks()),
+ new_blocks: vec![],
+ new_statements: vec![],
+ new_locals: vec![],
+ next_local: mir.local_decls.len(),
+ resume_block: START_BLOCK
+ };
+
+ // make sure the MIR we create has a resume block. It is
+ // completely legal to convert jumps to the resume block
+ // to jumps to None, but we occasionally have to add
+ // instructions just before that.
+
+ let mut resume_block = None;
+ let mut resume_stmt_block = None;
+ for (bb, block) in mir.basic_blocks().iter_enumerated() {
+ if let TerminatorKind::Resume = block.terminator().kind {
+ if block.statements.len() > 0 {
+ resume_stmt_block = Some(bb);
+ } else {
+ resume_block = Some(bb);
+ }
+ break
+ }
+ }
+ let resume_block = resume_block.unwrap_or_else(|| {
+ result.new_block(BasicBlockData {
+ statements: vec![],
+ terminator: Some(Terminator {
+ source_info: SourceInfo {
+ span: mir.span,
+ scope: ARGUMENT_VISIBILITY_SCOPE
+ },
+ kind: TerminatorKind::Resume
+ }),
+ is_cleanup: true
+ })});
+ result.resume_block = resume_block;
+ if let Some(resume_stmt_block) = resume_stmt_block {
+ result.patch_terminator(resume_stmt_block, TerminatorKind::Goto {
+ target: resume_block
+ });
+ }
+ result
+ }
+
+ pub fn resume_block(&self) -> BasicBlock {
+ self.resume_block
+ }
+
+ pub fn is_patched(&self, bb: BasicBlock) -> bool {
+ self.patch_map[bb].is_some()
+ }
+
+ pub fn terminator_loc(&self, mir: &Mir<'tcx>, bb: BasicBlock) -> Location {
+ let offset = match bb.index().checked_sub(mir.basic_blocks().len()) {
+ Some(index) => self.new_blocks[index].statements.len(),
+ None => mir[bb].statements.len()
+ };
+ Location {
+ block: bb,
+ statement_index: offset
+ }
+ }
+
+ pub fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
+ let index = self.next_local;
+ self.next_local += 1;
+ self.new_locals.push(LocalDecl::new_temp(ty));
+ Local::new(index as usize)
+ }
+
+ pub fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock {
+ let block = BasicBlock::new(self.patch_map.len());
+ debug!("MirPatch: new_block: {:?}: {:?}", block, data);
+ self.new_blocks.push(data);
+ self.patch_map.push(None);
+ block
+ }
+
+ pub fn patch_terminator(&mut self, block: BasicBlock, new: TerminatorKind<'tcx>) {
+ assert!(self.patch_map[block].is_none());
+ debug!("MirPatch: patch_terminator({:?}, {:?})", block, new);
+ self.patch_map[block] = Some(new);
+ }
+
+ pub fn add_statement(&mut self, loc: Location, stmt: StatementKind<'tcx>) {
+ debug!("MirPatch: add_statement({:?}, {:?})", loc, stmt);
+ self.new_statements.push((loc, stmt));
+ }
+
+ pub fn add_assign(&mut self, loc: Location, lv: Lvalue<'tcx>, rv: Rvalue<'tcx>) {
+ self.add_statement(loc, StatementKind::Assign(lv, rv));
+ }
+
+ pub fn apply(self, mir: &mut Mir<'tcx>) {
+ debug!("MirPatch: {:?} new temps, starting from index {}: {:?}",
+ self.new_locals.len(), mir.local_decls.len(), self.new_locals);
+ debug!("MirPatch: {} new blocks, starting from index {}",
+ self.new_blocks.len(), mir.basic_blocks().len());
+ mir.basic_blocks_mut().extend(self.new_blocks);
+ mir.local_decls.extend(self.new_locals);
+ for (src, patch) in self.patch_map.into_iter_enumerated() {
+ if let Some(patch) = patch {
+ debug!("MirPatch: patching block {:?}", src);
+ mir[src].terminator_mut().kind = patch;
+ }
+ }
+
+ let mut new_statements = self.new_statements;
+ new_statements.sort_by(|u,v| u.0.cmp(&v.0));
+
+ let mut delta = 0;
+ let mut last_bb = START_BLOCK;
+ for (mut loc, stmt) in new_statements {
+ if loc.block != last_bb {
+ delta = 0;
+ last_bb = loc.block;
+ }
+ debug!("MirPatch: adding statement {:?} at loc {:?}+{}",
+ stmt, loc, delta);
+ loc.statement_index += delta;
+ let source_info = Self::source_info_for_index(
+ &mir[loc.block], loc
+ );
+ mir[loc.block].statements.insert(
+ loc.statement_index, Statement {
+ source_info: source_info,
+ kind: stmt
+ });
+ delta += 1;
+ }
+ }
+
+ pub fn source_info_for_index(data: &BasicBlockData, loc: Location) -> SourceInfo {
+ match data.statements.get(loc.statement_index) {
+ Some(stmt) => stmt.source_info,
+ None => data.terminator().source_info
+ }
+ }
+
+ pub fn source_info_for_location(&self, mir: &Mir, loc: Location) -> SourceInfo {
+ let data = match loc.block.index().checked_sub(mir.basic_blocks().len()) {
+ Some(new) => &self.new_blocks[new],
+ None => &mir[loc.block]
+ };
+ Self::source_info_for_index(data, loc)
+ }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::hir;
+use rustc::hir::def_id::DefId;
+use rustc::mir::*;
+use rustc::mir::transform::MirSource;
+use rustc::ty::TyCtxt;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::indexed_vec::{Idx};
+use std::fmt::Display;
+use std::fs;
+use std::io::{self, Write};
+use std::path::{PathBuf, Path};
+
+const INDENT: &'static str = " ";
+/// Alignment for lining up comments following MIR statements
+const ALIGN: usize = 40;
+
+/// If the session is properly configured, dumps a human-readable
+/// representation of the mir into:
+///
+/// ```text
+/// rustc.node<node_id>.<pass_name>.<disambiguator>
+/// ```
+///
+/// Output from this function is controlled by passing `-Z dump-mir=<filter>`,
+/// where `<filter>` takes the following forms:
+///
+/// - `all` -- dump MIR for all fns, all passes, all everything
+/// - `substring1&substring2,...` -- `&`-separated list of substrings
+/// that can appear in the pass-name or the `item_path_str` for the given
+/// node-id. If any one of the substrings match, the data is dumped out.
+pub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ pass_name: &str,
+ disambiguator: &Display,
+ src: MirSource,
+ mir: &Mir<'tcx>) {
+ let filters = match tcx.sess.opts.debugging_opts.dump_mir {
+ None => return,
+ Some(ref filters) => filters,
+ };
+ let node_id = src.item_id();
+ let node_path = tcx.item_path_str(tcx.hir.local_def_id(node_id));
+ let is_matched =
+ filters.split("&")
+ .any(|filter| {
+ filter == "all" ||
+ pass_name.contains(filter) ||
+ node_path.contains(filter)
+ });
+ if !is_matched {
+ return;
+ }
+
+ let promotion_id = match src {
+ MirSource::Promoted(_, id) => format!("-{:?}", id),
+ _ => String::new()
+ };
+
+ let mut file_path = PathBuf::new();
+ if let Some(ref file_dir) = tcx.sess.opts.debugging_opts.dump_mir_dir {
+ let p = Path::new(file_dir);
+ file_path.push(p);
+ };
+ let file_name = format!("rustc.node{}{}.{}.{}.mir",
+ node_id, promotion_id, pass_name, disambiguator);
+ file_path.push(&file_name);
+ let _ = fs::File::create(&file_path).and_then(|mut file| {
+ writeln!(file, "// MIR for `{}`", node_path)?;
+ writeln!(file, "// node_id = {}", node_id)?;
+ writeln!(file, "// pass_name = {}", pass_name)?;
+ writeln!(file, "// disambiguator = {}", disambiguator)?;
+ writeln!(file, "")?;
+ write_mir_fn(tcx, src, mir, &mut file)?;
+ Ok(())
+ });
+}
+
+/// Write out a human-readable textual representation for the given MIR.
+pub fn write_mir_pretty<'a, 'b, 'tcx, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
+ iter: I,
+ w: &mut Write)
+ -> io::Result<()>
+ where I: Iterator<Item=DefId>, 'tcx: 'a
+{
+ let mut first = true;
+ for def_id in iter.filter(DefId::is_local) {
+ let mir = &tcx.item_mir(def_id);
+
+ if first {
+ first = false;
+ } else {
+ // Put empty lines between all items
+ writeln!(w, "")?;
+ }
+
+ let id = tcx.hir.as_local_node_id(def_id).unwrap();
+ let src = MirSource::from_node(tcx, id);
+ write_mir_fn(tcx, src, mir, w)?;
+
+ for (i, mir) in mir.promoted.iter_enumerated() {
+ writeln!(w, "")?;
+ write_mir_fn(tcx, MirSource::Promoted(id, i), mir, w)?;
+ }
+ }
+ Ok(())
+}
+
+pub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ src: MirSource,
+ mir: &Mir<'tcx>,
+ w: &mut Write)
+ -> io::Result<()> {
+ write_mir_intro(tcx, src, mir, w)?;
+ for block in mir.basic_blocks().indices() {
+ write_basic_block(tcx, block, mir, w)?;
+ if block.index() + 1 != mir.basic_blocks().len() {
+ writeln!(w, "")?;
+ }
+ }
+
+ writeln!(w, "}}")?;
+ Ok(())
+}
+
+/// Write out a human-readable textual representation for the given basic block.
+fn write_basic_block(tcx: TyCtxt,
+ block: BasicBlock,
+ mir: &Mir,
+ w: &mut Write)
+ -> io::Result<()> {
+ let data = &mir[block];
+
+ // Basic block label at the top.
+ writeln!(w, "{}{:?}: {{", INDENT, block)?;
+
+ // List of statements in the middle.
+ let mut current_location = Location { block: block, statement_index: 0 };
+ for statement in &data.statements {
+ let indented_mir = format!("{0}{0}{1:?};", INDENT, statement);
+ writeln!(w, "{0:1$} // {2}",
+ indented_mir,
+ ALIGN,
+ comment(tcx, statement.source_info))?;
+
+ current_location.statement_index += 1;
+ }
+
+ // Terminator at the bottom.
+ let indented_terminator = format!("{0}{0}{1:?};", INDENT, data.terminator().kind);
+ writeln!(w, "{0:1$} // {2}",
+ indented_terminator,
+ ALIGN,
+ comment(tcx, data.terminator().source_info))?;
+
+ writeln!(w, "{}}}", INDENT)
+}
+
+fn comment(tcx: TyCtxt, SourceInfo { span, scope }: SourceInfo) -> String {
+ format!("scope {} at {}", scope.index(), tcx.sess.codemap().span_to_string(span))
+}
+
+/// Prints user-defined variables in a scope tree.
+///
+/// Returns the total number of variables printed.
+fn write_scope_tree(tcx: TyCtxt,
+ mir: &Mir,
+ scope_tree: &FxHashMap<VisibilityScope, Vec<VisibilityScope>>,
+ w: &mut Write,
+ parent: VisibilityScope,
+ depth: usize)
+ -> io::Result<()> {
+ let indent = depth * INDENT.len();
+
+ let children = match scope_tree.get(&parent) {
+ Some(childs) => childs,
+ None => return Ok(()),
+ };
+
+ for &child in children {
+ let data = &mir.visibility_scopes[child];
+ assert_eq!(data.parent_scope, Some(parent));
+ writeln!(w, "{0:1$}scope {2} {{", "", indent, child.index())?;
+
+ // User variable types (including the user's name in a comment).
+ for local in mir.vars_iter() {
+ let var = &mir.local_decls[local];
+ let (name, source_info) = if var.source_info.unwrap().scope == child {
+ (var.name.unwrap(), var.source_info.unwrap())
+ } else {
+ // Not a variable or not declared in this scope.
+ continue;
+ };
+
+ let mut_str = if var.mutability == Mutability::Mut {
+ "mut "
+ } else {
+ ""
+ };
+
+ let indent = indent + INDENT.len();
+ let indented_var = format!("{0:1$}let {2}{3:?}: {4};",
+ INDENT,
+ indent,
+ mut_str,
+ local,
+ var.ty);
+ writeln!(w, "{0:1$} // \"{2}\" in {3}",
+ indented_var,
+ ALIGN,
+ name,
+ comment(tcx, source_info))?;
+ }
+
+ write_scope_tree(tcx, mir, scope_tree, w, child, depth + 1)?;
+
+ writeln!(w, "{0:1$}}}", "", depth * INDENT.len())?;
+ }
+
+ Ok(())
+}
+
+/// Write out a human-readable textual representation of the MIR's `fn` type and the types of its
+/// local variables (both user-defined bindings and compiler temporaries).
+fn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ src: MirSource,
+ mir: &Mir,
+ w: &mut Write)
+ -> io::Result<()> {
+ write_mir_sig(tcx, src, mir, w)?;
+ writeln!(w, " {{")?;
+
+ // construct a scope tree and write it out
+ let mut scope_tree: FxHashMap<VisibilityScope, Vec<VisibilityScope>> = FxHashMap();
+ for (index, scope_data) in mir.visibility_scopes.iter().enumerate() {
+ if let Some(parent) = scope_data.parent_scope {
+ scope_tree.entry(parent)
+ .or_insert(vec![])
+ .push(VisibilityScope::new(index));
+ } else {
+ // Only the argument scope has no parent, because it's the root.
+ assert_eq!(index, ARGUMENT_VISIBILITY_SCOPE.index());
+ }
+ }
+
+ // Print return pointer
+ let indented_retptr = format!("{}let mut {:?}: {};",
+ INDENT,
+ RETURN_POINTER,
+ mir.return_ty);
+ writeln!(w, "{0:1$} // return pointer",
+ indented_retptr,
+ ALIGN)?;
+
+ write_scope_tree(tcx, mir, &scope_tree, w, ARGUMENT_VISIBILITY_SCOPE, 1)?;
+
+ write_temp_decls(mir, w)?;
+
+ // Add an empty line before the first block is printed.
+ writeln!(w, "")?;
+
+ Ok(())
+}
+
+fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)
+ -> io::Result<()>
+{
+ match src {
+ MirSource::Fn(_) => write!(w, "fn")?,
+ MirSource::Const(_) => write!(w, "const")?,
+ MirSource::Static(_, hir::MutImmutable) => write!(w, "static")?,
+ MirSource::Static(_, hir::MutMutable) => write!(w, "static mut")?,
+ MirSource::Promoted(_, i) => write!(w, "{:?} in", i)?
+ }
+
+ write!(w, " {}", tcx.node_path_str(src.item_id()))?;
+
+ if let MirSource::Fn(_) = src {
+ write!(w, "(")?;
+
+ // fn argument types.
+ for (i, arg) in mir.args_iter().enumerate() {
+ if i != 0 {
+ write!(w, ", ")?;
+ }
+ write!(w, "{:?}: {}", Lvalue::Local(arg), mir.local_decls[arg].ty)?;
+ }
+
+ write!(w, ") -> {}", mir.return_ty)
+ } else {
+ assert_eq!(mir.arg_count, 0);
+ write!(w, ": {} =", mir.return_ty)
+ }
+}
+
+fn write_temp_decls(mir: &Mir, w: &mut Write) -> io::Result<()> {
+ // Compiler-introduced temporary types.
+ for temp in mir.temps_iter() {
+ writeln!(w, "{}let mut {:?}: {};", INDENT, temp, mir.local_decls[temp].ty)?;
+ }
+
+ Ok(())
+}
ItemKind::Mod(_) => {
// Ensure that `path` attributes on modules are recorded as used (c.f. #35584).
attr::first_attr_value_str_by_name(&item.attrs, "path");
- if let Some(attr) =
- item.attrs.iter().find(|attr| attr.name() == "warn_directory_ownership") {
+ if item.attrs.iter().any(|attr| attr.check_name("warn_directory_ownership")) {
let lint = lint::builtin::LEGACY_DIRECTORY_OWNERSHIP;
let msg = "cannot declare a new module at this location";
self.session.add_lint(lint, item.id, item.span, msg.to_string());
- attr::mark_used(attr);
}
}
ItemKind::Union(ref vdata, _) => {
use rustc::middle::cstore::LoadedMacro;
use rustc::hir::def::*;
-use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId};
+use rustc::hir::def_id::{CrateNum, BUILTIN_MACROS_CRATE, CRATE_DEF_INDEX, DefId};
use rustc::ty;
use std::cell::Cell;
let def_id = self.macro_defs[&expansion];
if let Some(id) = self.definitions.as_local_node_id(def_id) {
self.local_macro_def_scopes[&id]
+ } else if def_id.krate == BUILTIN_MACROS_CRATE {
+ // FIXME(jseyfried): This happens when `include!()`ing a `$crate::` path, c.f, #40469.
+ self.graph_root
} else {
let module_def_id = ty::DefIdTree::parent(&*self, def_id).unwrap();
self.get_extern_crate_root(module_def_id.krate)
privacy_errors: Vec<PrivacyError<'a>>,
ambiguity_errors: Vec<AmbiguityError<'a>>,
+ gated_errors: FxHashSet<Span>,
disallowed_shadowing: Vec<&'a LegacyBinding<'a>>,
arenas: &'a ResolverArenas<'a>,
privacy_errors: Vec::new(),
ambiguity_errors: Vec::new(),
+ gated_errors: FxHashSet(),
disallowed_shadowing: Vec::new(),
arenas: arenas,
if self.proc_macro_enabled { return; }
for attr in attrs {
- let maybe_binding = self.builtin_macros.get(&attr.name()).cloned().or_else(|| {
- let ident = Ident::with_empty_ctxt(attr.name());
+ let name = unwrap_or!(attr.name(), continue);
+ let maybe_binding = self.builtin_macros.get(&name).cloned().or_else(|| {
+ let ident = Ident::with_empty_ctxt(name);
self.resolve_lexical_macro_path_segment(ident, MacroNS, None).ok()
});
use syntax::ext::tt::macro_rules;
use syntax::feature_gate::{self, emit_feature_err, GateIssue};
use syntax::fold::{self, Folder};
+use syntax::parse::parser::PathStyle;
+use syntax::parse::token::{self, Token};
use syntax::ptr::P;
use syntax::symbol::{Symbol, keywords};
+use syntax::tokenstream::{TokenStream, TokenTree, Delimited};
use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::{Span, DUMMY_SP};
fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec<ast::Attribute>)
-> Option<ast::Attribute> {
for i in 0..attrs.len() {
+ let name = unwrap_or!(attrs[i].name(), continue);
+
if self.session.plugin_attributes.borrow().iter()
- .any(|&(ref attr_nm, _)| attrs[i].name() == &**attr_nm) {
+ .any(|&(ref attr_nm, _)| name == &**attr_nm) {
attr::mark_known(&attrs[i]);
}
- match self.builtin_macros.get(&attrs[i].name()).cloned() {
+ match self.builtin_macros.get(&name).cloned() {
Some(binding) => match *binding.get_macro(self) {
MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => {
return Some(attrs.remove(i))
// Check for legacy derives
for i in 0..attrs.len() {
- if attrs[i].name() == "derive" {
- let mut traits = match attrs[i].meta_item_list() {
- Some(traits) if !traits.is_empty() => traits.to_owned(),
- _ => continue,
+ let name = unwrap_or!(attrs[i].name(), continue);
+
+ if name == "derive" {
+ let result = attrs[i].parse_list(&self.session.parse_sess,
+ |parser| parser.parse_path(PathStyle::Mod));
+ let mut traits = match result {
+ Ok(traits) => traits,
+ Err(mut e) => {
+ e.cancel();
+ continue
+ }
};
for j in 0..traits.len() {
- let legacy_name = Symbol::intern(&match traits[j].word() {
- Some(..) => format!("derive_{}", traits[j].name().unwrap()),
- None => continue,
- });
+ if traits[j].segments.len() > 1 {
+ continue
+ }
+ let trait_name = traits[j].segments[0].identifier.name;
+ let legacy_name = Symbol::intern(&format!("derive_{}", trait_name));
if !self.builtin_macros.contains_key(&legacy_name) {
continue
}
if traits.is_empty() {
attrs.remove(i);
} else {
- attrs[i].value = ast::MetaItem {
- name: attrs[i].name(),
- span: attrs[i].span,
- node: ast::MetaItemKind::List(traits),
- };
+ let mut tokens = Vec::new();
+ for (i, path) in traits.iter().enumerate() {
+ if i > 0 {
+ tokens.push(TokenTree::Token(attrs[i].span, Token::Comma).into());
+ }
+ for (j, segment) in path.segments.iter().enumerate() {
+ if j > 0 {
+ tokens.push(TokenTree::Token(path.span, Token::ModSep).into());
+ }
+ let tok = Token::Ident(segment.identifier);
+ tokens.push(TokenTree::Token(path.span, tok).into());
+ }
+ }
+ attrs[i].tokens = TokenTree::Delimited(attrs[i].span, Delimited {
+ delim: token::Paren,
+ tts: TokenStream::concat(tokens).into(),
+ }).into();
}
return Some(ast::Attribute {
- value: ast::MetaItem {
- name: legacy_name,
- span: span,
- node: ast::MetaItemKind::Word,
- },
+ path: ast::Path::from_ident(span, Ident::with_empty_ctxt(legacy_name)),
+ tokens: TokenStream::empty(),
id: attr::mk_attr_id(),
style: ast::AttrStyle::Outer,
is_sugared_doc: false,
InvocationKind::Bang { ref mac, .. } => {
return self.resolve_macro_to_def(scope, &mac.node.path, MacroKind::Bang, force);
}
- InvocationKind::Derive { name, span, .. } => {
- let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
- return self.resolve_macro_to_def(scope, &path, MacroKind::Derive, force);
+ InvocationKind::Derive { ref path, .. } => {
+ return self.resolve_macro_to_def(scope, path, MacroKind::Derive, force);
}
};
- let (attr_name, path) = {
- let attr = attr.as_ref().unwrap();
- (attr.name(), ast::Path::from_ident(attr.span, Ident::with_empty_ctxt(attr.name())))
- };
- let mut determined = true;
+ let path = attr.as_ref().unwrap().path.clone();
+ let mut determinacy = Determinacy::Determined;
match self.resolve_macro_to_def(scope, &path, MacroKind::Attr, force) {
Ok(def) => return Ok(def),
- Err(Determinacy::Undetermined) => determined = false,
+ Err(Determinacy::Undetermined) => determinacy = Determinacy::Undetermined,
Err(Determinacy::Determined) if force => return Err(Determinacy::Determined),
Err(Determinacy::Determined) => {}
}
- for &(name, span) in traits {
- let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
- match self.resolve_macro(scope, &path, MacroKind::Derive, force) {
+ let attr_name = match path.segments.len() {
+ 1 => path.segments[0].identifier.name,
+ _ => return Err(determinacy),
+ };
+ for path in traits {
+ match self.resolve_macro(scope, path, MacroKind::Derive, force) {
Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext {
if inert_attrs.contains(&attr_name) {
// FIXME(jseyfried) Avoid `mem::replace` here.
}
return Err(Determinacy::Undetermined);
},
- Err(Determinacy::Undetermined) => determined = false,
+ Err(Determinacy::Undetermined) => determinacy = Determinacy::Undetermined,
Err(Determinacy::Determined) => {}
}
}
- Err(if determined { Determinacy::Determined } else { Determinacy::Undetermined })
+ Err(determinacy)
}
fn resolve_macro_to_def(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
self.current_module = invocation.module.get();
if path.len() > 1 {
- if !self.use_extern_macros {
+ if !self.use_extern_macros && self.gated_errors.insert(span) {
let msg = "non-ident macro paths are experimental";
let feature = "use_extern_macros";
emit_feature_err(&self.session.parse_sess, feature, span, GateIssue::Language, msg);
log = { path = "../liblog" }
rustc = { path = "../librustc" }
syntax = { path = "../libsyntax" }
-serialize = { path = "../libserialize" }
-syntax_pos = { path = "../libsyntax_pos" }
\ No newline at end of file
+syntax_pos = { path = "../libsyntax_pos" }
+rls-data = "0.1"
+rls-span = "0.1"
+# FIXME(#40527) should move rustc serialize out of tree
+rustc-serialize = "0.3"
use super::external_data::*;
use super::dump::Dump;
+use rls_data::{SpanData, CratePreludeData};
+
pub struct CsvDumper<'b, W: 'b> {
output: &'b mut W
}
fn span_extent_str(span: SpanData) -> String {
format!("file_name,\"{}\",file_line,{},file_col,{},byte_start,{},\
file_line_end,{},file_col_end,{},byte_end,{}",
- span.file_name, span.line_start, span.column_start, span.byte_start,
- span.line_end, span.column_end, span.byte_end)
+ span.file_name.to_str().unwrap(), span.line_start.0, span.column_start.0,
+ span.byte_start, span.line_end.0, span.column_end.0, span.byte_end)
}
use syntax::ast::{self, Attribute, NodeId};
use syntax_pos::Span;
+use rls_data::ExternalCrateData;
+
pub struct CrateData {
pub name: String,
pub number: u32,
/// Data for any entity in the Rust language. The actual data contained varies
/// with the kind of entity being queried. See the nested structs for details.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub enum Data {
/// Data for Enums.
EnumData(EnumData),
VariableRefData(VariableRefData),
}
-#[derive(Eq, PartialEq, Clone, Copy, Debug, RustcEncodable)]
+#[derive(Eq, PartialEq, Clone, Copy, Debug)]
pub enum Visibility {
Public,
Restricted,
}
/// Data for the prelude of a crate.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct CratePreludeData {
pub crate_name: String,
pub crate_root: String,
pub span: Span,
}
-/// Data for external crates in the prelude of a crate.
-#[derive(Debug, RustcEncodable)]
-pub struct ExternalCrateData {
- pub name: String,
- pub num: CrateNum,
- pub file_name: String,
-}
-
/// Data for enum declarations.
-#[derive(Clone, Debug, RustcEncodable)]
+#[derive(Clone, Debug)]
pub struct EnumData {
pub id: NodeId,
pub name: String,
}
/// Data for extern crates.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct ExternCrateData {
pub id: NodeId,
pub name: String,
}
/// Data about a function call.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct FunctionCallData {
pub span: Span,
pub scope: NodeId,
}
/// Data for all kinds of functions and methods.
-#[derive(Clone, Debug, RustcEncodable)]
+#[derive(Clone, Debug)]
pub struct FunctionData {
pub id: NodeId,
pub name: String,
}
/// Data about a function call.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct FunctionRefData {
pub span: Span,
pub scope: NodeId,
pub ref_id: DefId,
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct ImplData {
pub id: NodeId,
pub span: Span,
pub self_ref: Option<DefId>,
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
// FIXME: this struct should not exist. However, removing it requires heavy
// refactoring of dump_visitor.rs. See PR 31838 for more info.
pub struct ImplData2 {
pub self_ref: Option<TypeRefData>,
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct InheritanceData {
pub span: Span,
pub base_id: DefId,
}
/// Data about a macro declaration.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct MacroData {
pub span: Span,
pub name: String,
}
/// Data about a macro use.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct MacroUseData {
pub span: Span,
pub name: String,
}
/// Data about a method call.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct MethodCallData {
pub span: Span,
pub scope: NodeId,
}
/// Data for method declarations (methods with a body are treated as functions).
-#[derive(Clone, Debug, RustcEncodable)]
+#[derive(Clone, Debug)]
pub struct MethodData {
pub id: NodeId,
pub name: String,
}
/// Data for modules.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct ModData {
pub id: NodeId,
pub name: String,
}
/// Data for a reference to a module.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct ModRefData {
pub span: Span,
pub scope: NodeId,
pub qualname: String
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct StructData {
pub span: Span,
pub name: String,
pub attributes: Vec<Attribute>,
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct StructVariantData {
pub span: Span,
pub name: String,
pub attributes: Vec<Attribute>,
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct TraitData {
pub span: Span,
pub id: NodeId,
pub attributes: Vec<Attribute>,
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct TupleVariantData {
pub span: Span,
pub id: NodeId,
}
/// Data for a typedef.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct TypeDefData {
pub id: NodeId,
pub name: String,
}
/// Data for a reference to a type or trait.
-#[derive(Clone, Debug, RustcEncodable)]
+#[derive(Clone, Debug)]
pub struct TypeRefData {
pub span: Span,
pub scope: NodeId,
pub qualname: String,
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct UseData {
pub id: NodeId,
pub span: Span,
pub visibility: Visibility,
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct UseGlobData {
pub id: NodeId,
pub span: Span,
}
/// Data for local and global variables (consts and statics).
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct VariableData {
pub id: NodeId,
pub kind: VariableKind,
pub attributes: Vec<Attribute>,
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub enum VariableKind {
Static,
Const,
/// Data for the use of some item (e.g., the use of a local variable, which
/// will refer to that variables declaration (by ref_id)).
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct VariableRefData {
pub name: String,
pub span: Span,
/// Encodes information about the signature of a definition. This should have
/// enough information to create a nice display about a definition without
/// access to the source code.
-#[derive(Clone, Debug, RustcEncodable)]
+#[derive(Clone, Debug)]
pub struct Signature {
pub span: Span,
pub text: String,
/// An element of a signature. `start` and `end` are byte offsets into the `text`
/// of the parent `Signature`.
-#[derive(Clone, Debug, RustcEncodable)]
+#[derive(Clone, Debug)]
pub struct SigElement {
pub id: DefId,
pub start: usize,
use super::external_data::*;
+use rls_data::CratePreludeData;
+
pub trait Dump {
fn crate_prelude(&mut self, CratePreludeData) {}
fn enum_data(&mut self, EnumData) {}
use rustc::hir;
use rustc::hir::def::Def;
-use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
+use rustc::hir::def_id::{DefId, LOCAL_CRATE};
use rustc::hir::map::{Node, NodeItem};
use rustc::session::Session;
use rustc::ty::{self, TyCtxt, AssociatedItemContainer};
use super::span_utils::SpanUtils;
use super::recorder;
+use rls_data::ExternalCrateData;
+
macro_rules! down_cast_data {
($id:ident, $kind:ident, $sp:expr) => {
let $id = if let super::Data::$kind(data) = $id {
let lo_loc = self.span.sess.codemap().lookup_char_pos(c.span.lo);
ExternalCrateData {
name: c.name,
- num: CrateNum::from_u32(c.number),
+ num: c.number,
file_name: SpanUtils::make_path_string(&lo_loc.file.name),
}
}).collect();
use syntax::ast::{self, NodeId};
use syntax::codemap::CodeMap;
use syntax::print::pprust;
-use syntax::symbol::Symbol;
use syntax_pos::Span;
use data::{self, Visibility, SigElement};
+use rls_data::{SpanData, CratePreludeData, Attribute};
+use rls_span::{Column, Row};
+
// FIXME: this should be pub(crate), but the current snapshot doesn't allow it yet
pub trait Lower {
type Target;
}
}
-#[derive(Clone, Debug, RustcEncodable)]
-pub struct SpanData {
- pub file_name: String,
- pub byte_start: u32,
- pub byte_end: u32,
- /// 1-based.
- pub line_start: usize,
- pub line_end: usize,
- /// 1-based, character offset.
- pub column_start: usize,
- pub column_end: usize,
-}
-
-impl SpanData {
- pub fn from_span(span: Span, cm: &CodeMap) -> SpanData {
- let start = cm.lookup_char_pos(span.lo);
- let end = cm.lookup_char_pos(span.hi);
-
- SpanData {
- file_name: start.file.name.clone(),
- byte_start: span.lo.0,
- byte_end: span.hi.0,
- line_start: start.line,
- line_end: end.line,
- column_start: start.col.0 + 1,
- column_end: end.col.0 + 1,
- }
- }
-}
+pub fn span_from_span(span: Span, cm: &CodeMap) -> SpanData {
+ let start = cm.lookup_char_pos(span.lo);
+ let end = cm.lookup_char_pos(span.hi);
-/// Represent an arbitrary attribute on a code element
-#[derive(Clone, Debug, RustcEncodable)]
-pub struct Attribute {
- value: String,
- span: SpanData,
+ SpanData {
+ file_name: start.file.name.clone().into(),
+ byte_start: span.lo.0,
+ byte_end: span.hi.0,
+ line_start: Row::new_one_indexed(start.line as u32),
+ line_end: Row::new_one_indexed(end.line as u32),
+ column_start: Column::new_one_indexed(start.col.0 as u32 + 1),
+ column_end: Column::new_one_indexed(end.col.0 as u32 + 1),
+ }
}
impl Lower for Vec<ast::Attribute> {
type Target = Vec<Attribute>;
fn lower(self, tcx: TyCtxt) -> Vec<Attribute> {
- let doc = Symbol::intern("doc");
self.into_iter()
// Only retain real attributes. Doc comments are lowered separately.
- .filter(|attr| attr.name() != doc)
+ .filter(|attr| attr.path != "doc")
.map(|mut attr| {
// Remove the surrounding '#[..]' or '#![..]' of the pretty printed
// attribute. First normalize all inner attribute (#![..]) to outer
Attribute {
value: value,
- span: SpanData::from_span(attr.span, tcx.sess.codemap()),
+ span: span_from_span(attr.span, tcx.sess.codemap()),
}
}).collect()
}
}
-#[derive(Debug, RustcEncodable)]
-pub struct CratePreludeData {
- pub crate_name: String,
- pub crate_root: String,
- pub external_crates: Vec<data::ExternalCrateData>,
- pub span: SpanData,
-}
-
impl Lower for data::CratePreludeData {
type Target = CratePreludeData;
crate_name: self.crate_name,
crate_root: self.crate_root,
external_crates: self.external_crates,
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
}
}
}
/// Data for enum declarations.
-#[derive(Clone, Debug, RustcEncodable)]
+#[derive(Clone, Debug)]
pub struct EnumData {
pub id: DefId,
pub value: String,
name: self.name,
value: self.value,
qualname: self.qualname,
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
variants: self.variants.into_iter().map(|id| make_def_id(id, &tcx.hir)).collect(),
visibility: self.visibility,
}
/// Data for extern crates.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct ExternCrateData {
pub id: DefId,
pub name: String,
name: self.name,
crate_num: self.crate_num,
location: self.location,
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
}
}
}
/// Data about a function call.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct FunctionCallData {
pub span: SpanData,
pub scope: DefId,
fn lower(self, tcx: TyCtxt) -> FunctionCallData {
FunctionCallData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
ref_id: self.ref_id,
}
}
/// Data for all kinds of functions and methods.
-#[derive(Clone, Debug, RustcEncodable)]
+#[derive(Clone, Debug)]
pub struct FunctionData {
pub id: DefId,
pub name: String,
name: self.name,
qualname: self.qualname,
declaration: self.declaration,
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
value: self.value,
visibility: self.visibility,
}
/// Data about a function call.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct FunctionRefData {
pub span: SpanData,
pub scope: DefId,
fn lower(self, tcx: TyCtxt) -> FunctionRefData {
FunctionRefData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
ref_id: self.ref_id,
}
}
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct ImplData {
pub id: DefId,
pub span: SpanData,
fn lower(self, tcx: TyCtxt) -> ImplData {
ImplData {
id: make_def_id(self.id, &tcx.hir),
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
trait_ref: self.trait_ref,
self_ref: self.self_ref,
}
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct InheritanceData {
pub span: SpanData,
pub base_id: DefId,
fn lower(self, tcx: TyCtxt) -> InheritanceData {
InheritanceData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
base_id: self.base_id,
deriv_id: make_def_id(self.deriv_id, &tcx.hir)
}
}
/// Data about a macro declaration.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct MacroData {
pub span: SpanData,
pub name: String,
fn lower(self, tcx: TyCtxt) -> MacroData {
MacroData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
name: self.name,
qualname: self.qualname,
docs: self.docs,
}
/// Data about a macro use.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct MacroUseData {
pub span: SpanData,
pub name: String,
fn lower(self, tcx: TyCtxt) -> MacroUseData {
MacroUseData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
name: self.name,
qualname: self.qualname,
- callee_span: SpanData::from_span(self.callee_span, tcx.sess.codemap()),
+ callee_span: span_from_span(self.callee_span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
}
}
}
/// Data about a method call.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct MethodCallData {
pub span: SpanData,
pub scope: DefId,
fn lower(self, tcx: TyCtxt) -> MethodCallData {
MethodCallData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
ref_id: self.ref_id,
decl_id: self.decl_id,
}
/// Data for method declarations (methods with a body are treated as functions).
-#[derive(Clone, Debug, RustcEncodable)]
+#[derive(Clone, Debug)]
pub struct MethodData {
pub id: DefId,
pub name: String,
fn lower(self, tcx: TyCtxt) -> MethodData {
MethodData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
name: self.name,
scope: make_def_id(self.scope, &tcx.hir),
id: make_def_id(self.id, &tcx.hir),
}
/// Data for modules.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct ModData {
pub id: DefId,
pub name: String,
id: make_def_id(self.id, &tcx.hir),
name: self.name,
qualname: self.qualname,
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
filename: self.filename,
items: self.items.into_iter().map(|id| make_def_id(id, &tcx.hir)).collect(),
}
/// Data for a reference to a module.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct ModRefData {
pub span: SpanData,
pub scope: DefId,
fn lower(self, tcx: TyCtxt) -> ModRefData {
ModRefData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
ref_id: self.ref_id,
qualname: self.qualname,
}
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct StructData {
pub span: SpanData,
pub name: String,
fn lower(self, tcx: TyCtxt) -> StructData {
StructData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
name: self.name,
id: make_def_id(self.id, &tcx.hir),
ctor_id: make_def_id(self.ctor_id, &tcx.hir),
}
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct StructVariantData {
pub span: SpanData,
pub name: String,
fn lower(self, tcx: TyCtxt) -> StructVariantData {
StructVariantData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
name: self.name,
id: make_def_id(self.id, &tcx.hir),
qualname: self.qualname,
}
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct TraitData {
pub span: SpanData,
pub name: String,
fn lower(self, tcx: TyCtxt) -> TraitData {
TraitData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
name: self.name,
id: make_def_id(self.id, &tcx.hir),
qualname: self.qualname,
}
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct TupleVariantData {
pub span: SpanData,
pub id: DefId,
fn lower(self, tcx: TyCtxt) -> TupleVariantData {
TupleVariantData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
id: make_def_id(self.id, &tcx.hir),
name: self.name,
qualname: self.qualname,
}
/// Data for a typedef.
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct TypeDefData {
pub id: DefId,
pub name: String,
TypeDefData {
id: make_def_id(self.id, &tcx.hir),
name: self.name,
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
qualname: self.qualname,
value: self.value,
visibility: self.visibility,
}
/// Data for a reference to a type or trait.
-#[derive(Clone, Debug, RustcEncodable)]
+#[derive(Clone, Debug)]
pub struct TypeRefData {
pub span: SpanData,
pub scope: DefId,
fn lower(self, tcx: TyCtxt) -> TypeRefData {
TypeRefData {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
ref_id: self.ref_id,
qualname: self.qualname,
}
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct UseData {
pub id: DefId,
pub span: SpanData,
fn lower(self, tcx: TyCtxt) -> UseData {
UseData {
id: make_def_id(self.id, &tcx.hir),
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
name: self.name,
mod_id: self.mod_id,
scope: make_def_id(self.scope, &tcx.hir),
}
}
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct UseGlobData {
pub id: DefId,
pub span: SpanData,
fn lower(self, tcx: TyCtxt) -> UseGlobData {
UseGlobData {
id: make_def_id(self.id, &tcx.hir),
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
names: self.names,
scope: make_def_id(self.scope, &tcx.hir),
visibility: self.visibility,
}
/// Data for local and global variables (consts and statics).
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct VariableData {
pub id: DefId,
pub name: String,
kind: self.kind,
name: self.name,
qualname: self.qualname,
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
value: self.value,
type_value: self.type_value,
/// Data for the use of some item (e.g., the use of a local variable, which
/// will refer to that variables declaration (by ref_id)).
-#[derive(Debug, RustcEncodable)]
+#[derive(Debug)]
pub struct VariableRefData {
pub name: String,
pub span: SpanData,
fn lower(self, tcx: TyCtxt) -> VariableRefData {
VariableRefData {
name: self.name,
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
scope: make_def_id(self.scope, &tcx.hir),
ref_id: self.ref_id,
}
}
}
-#[derive(Clone, Debug, RustcEncodable)]
+#[derive(Clone, Debug)]
pub struct Signature {
pub span: SpanData,
pub text: String,
fn lower(self, tcx: TyCtxt) -> Signature {
Signature {
- span: SpanData::from_span(self.span, tcx.sess.codemap()),
+ span: span_from_span(self.span, tcx.sess.codemap()),
text: self.text,
ident_start: self.ident_start,
ident_end: self.ident_end,
use std::io::Write;
-use rustc::hir::def_id::DefId;
use rustc_serialize::json::as_json;
use external_data::*;
-use data::{VariableKind, Visibility, SigElement};
+use data::{VariableKind, Visibility};
use dump::Dump;
-use super::Format;
+use json_dumper::id_from_def_id;
+
+use rls_data::{Analysis, Import, ImportKind, Def, DefKind, CratePreludeData};
// A dumper to dump a restricted set of JSON information, designed for use with
// information here, and (for example) generate Rustdoc URLs, but don't need
// information for navigating the source of the crate.
// Relative to the regular JSON save-analysis info, this form is filtered to
-// remove non-visible items, but includes some extra info for items (e.g., the
-// parent field for finding the struct to which a field belongs).
+// remove non-visible items.
pub struct JsonApiDumper<'b, W: Write + 'b> {
output: &'b mut W,
result: Analysis,
macro_rules! impl_fn {
($fn_name: ident, $data_type: ident, $bucket: ident) => {
fn $fn_name(&mut self, data: $data_type) {
- if let Some(datum) = From::from(data) {
+ if let Some(datum) = data.into() {
self.result.$bucket.push(datum);
}
}
fn impl_data(&mut self, data: ImplData) {
if data.self_ref.is_some() {
- self.result.relations.push(From::from(data));
+ self.result.relations.push(data.into());
}
}
fn inheritance(&mut self, data: InheritanceData) {
- self.result.relations.push(From::from(data));
+ self.result.relations.push(data.into());
}
}
// method, but not the supplied method). In both cases, we are currently
// ignoring it.
-#[derive(Debug, RustcEncodable)]
-struct Analysis {
- kind: Format,
- prelude: Option<CratePreludeData>,
- imports: Vec<Import>,
- defs: Vec<Def>,
- relations: Vec<Relation>,
- // These two fields are dummies so that clients can parse the two kinds of
- // JSON data in the same way.
- refs: Vec<()>,
- macro_refs: Vec<()>,
-}
-
-impl Analysis {
- fn new() -> Analysis {
- Analysis {
- kind: Format::JsonApi,
- prelude: None,
- imports: vec![],
- defs: vec![],
- relations: vec![],
- refs: vec![],
- macro_refs: vec![],
- }
- }
-}
-
-// DefId::index is a newtype and so the JSON serialisation is ugly. Therefore
-// we use our own Id which is the same, but without the newtype.
-#[derive(Debug, RustcEncodable)]
-struct Id {
- krate: u32,
- index: u32,
-}
-
-impl From<DefId> for Id {
- fn from(id: DefId) -> Id {
- Id {
- krate: id.krate.as_u32(),
- index: id.index.as_u32(),
- }
- }
-}
-
-#[derive(Debug, RustcEncodable)]
-struct Import {
- kind: ImportKind,
- id: Id,
- span: SpanData,
- name: String,
- value: String,
-}
-
-#[derive(Debug, RustcEncodable)]
-enum ImportKind {
- Use,
- GlobUse,
-}
-
-impl From<UseData> for Option<Import> {
- fn from(data: UseData) -> Option<Import> {
- match data.visibility {
+impl Into<Option<Import>> for UseData {
+ fn into(self) -> Option<Import> {
+ match self.visibility {
Visibility::Public => Some(Import {
kind: ImportKind::Use,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
+ ref_id: self.mod_id.map(|id| id_from_def_id(id)),
+ span: self.span,
+ name: self.name,
value: String::new(),
}),
_ => None,
}
}
}
-impl From<UseGlobData> for Option<Import> {
- fn from(data: UseGlobData) -> Option<Import> {
- match data.visibility {
+impl Into<Option<Import>> for UseGlobData {
+ fn into(self) -> Option<Import> {
+ match self.visibility {
Visibility::Public => Some(Import {
kind: ImportKind::GlobUse,
- id: From::from(data.id),
- span: data.span,
+ ref_id: None,
+ span: self.span,
name: "*".to_owned(),
- value: data.names.join(", "),
+ value: self.names.join(", "),
}),
_ => None,
}
}
}
-#[derive(Debug, RustcEncodable)]
-struct Def {
- kind: DefKind,
- id: Id,
- span: SpanData,
- name: String,
- qualname: String,
- value: String,
- parent: Option<Id>,
- children: Vec<Id>,
- decl_id: Option<Id>,
- docs: String,
- sig: Option<JsonSignature>,
-}
-
-#[derive(Debug, RustcEncodable)]
-enum DefKind {
- // value = variant names
- Enum,
- // value = enum name + variant name + types
- Tuple,
- // value = [enum name +] name + fields
- Struct,
- // value = signature
- Trait,
- // value = type + generics
- Function,
- // value = type + generics
- Method,
- // No id, no value.
- Macro,
- // value = file_name
- Mod,
- // value = aliased type
- Type,
- // value = type and init expression (for all variable kinds).
- Static,
- Const,
- Field,
-}
-
-impl From<EnumData> for Option<Def> {
- fn from(data: EnumData) -> Option<Def> {
- match data.visibility {
+impl Into<Option<Def>> for EnumData {
+ fn into(self) -> Option<Def> {
+ match self.visibility {
Visibility::Public => Some(Def {
kind: DefKind::Enum,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
parent: None,
- children: data.variants.into_iter().map(|id| From::from(id)).collect(),
+ children: self.variants.into_iter().map(|id| id_from_def_id(id)).collect(),
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: vec![],
}),
_ => None,
}
}
}
-impl From<TupleVariantData> for Option<Def> {
- fn from(data: TupleVariantData) -> Option<Def> {
+impl Into<Option<Def>> for TupleVariantData {
+ fn into(self) -> Option<Def> {
Some(Def {
kind: DefKind::Tuple,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
- parent: data.parent.map(|id| From::from(id)),
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
+ parent: self.parent.map(|id| id_from_def_id(id)),
children: vec![],
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: vec![],
})
}
}
-impl From<StructVariantData> for Option<Def> {
- fn from(data: StructVariantData) -> Option<Def> {
+impl Into<Option<Def>> for StructVariantData {
+ fn into(self) -> Option<Def> {
Some(Def {
kind: DefKind::Struct,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
- parent: data.parent.map(|id| From::from(id)),
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
+ parent: self.parent.map(|id| id_from_def_id(id)),
children: vec![],
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: vec![],
})
}
}
-impl From<StructData> for Option<Def> {
- fn from(data: StructData) -> Option<Def> {
- match data.visibility {
+impl Into<Option<Def>> for StructData {
+ fn into(self) -> Option<Def> {
+ match self.visibility {
Visibility::Public => Some(Def {
kind: DefKind::Struct,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
parent: None,
- children: data.fields.into_iter().map(|id| From::from(id)).collect(),
+ children: self.fields.into_iter().map(|id| id_from_def_id(id)).collect(),
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: vec![],
}),
_ => None,
}
}
}
-impl From<TraitData> for Option<Def> {
- fn from(data: TraitData) -> Option<Def> {
- match data.visibility {
+impl Into<Option<Def>> for TraitData {
+ fn into(self) -> Option<Def> {
+ match self.visibility {
Visibility::Public => Some(Def {
kind: DefKind::Trait,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
- children: data.items.into_iter().map(|id| From::from(id)).collect(),
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
+ children: self.items.into_iter().map(|id| id_from_def_id(id)).collect(),
parent: None,
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: vec![],
}),
_ => None,
}
}
}
-impl From<FunctionData> for Option<Def> {
- fn from(data: FunctionData) -> Option<Def> {
- match data.visibility {
+impl Into<Option<Def>> for FunctionData {
+ fn into(self) -> Option<Def> {
+ match self.visibility {
Visibility::Public => Some(Def {
kind: DefKind::Function,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
children: vec![],
- parent: data.parent.map(|id| From::from(id)),
+ parent: self.parent.map(|id| id_from_def_id(id)),
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: vec![],
}),
_ => None,
}
}
}
-impl From<MethodData> for Option<Def> {
- fn from(data: MethodData) -> Option<Def> {
- match data.visibility {
+impl Into<Option<Def>> for MethodData {
+ fn into(self) -> Option<Def> {
+ match self.visibility {
Visibility::Public => Some(Def {
kind: DefKind::Method,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
children: vec![],
- parent: data.parent.map(|id| From::from(id)),
- decl_id: data.decl_id.map(|id| From::from(id)),
- docs: data.docs,
- sig: Some(From::from(data.sig)),
+ parent: self.parent.map(|id| id_from_def_id(id)),
+ decl_id: self.decl_id.map(|id| id_from_def_id(id)),
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: vec![],
}),
_ => None,
}
}
}
-impl From<MacroData> for Option<Def> {
- fn from(data: MacroData) -> Option<Def> {
+impl Into<Option<Def>> for MacroData {
+ fn into(self) -> Option<Def> {
Some(Def {
kind: DefKind::Macro,
- id: From::from(null_def_id()),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
+ id: id_from_def_id(null_def_id()),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
value: String::new(),
children: vec![],
parent: None,
decl_id: None,
- docs: data.docs,
+ docs: self.docs,
sig: None,
+ attributes: vec![],
})
}
}
-impl From<ModData> for Option<Def> {
- fn from(data:ModData) -> Option<Def> {
- match data.visibility {
+impl Into<Option<Def>> for ModData {
+ fn into(self) -> Option<Def> {
+ match self.visibility {
Visibility::Public => Some(Def {
kind: DefKind::Mod,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.filename,
- children: data.items.into_iter().map(|id| From::from(id)).collect(),
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.filename,
+ children: self.items.into_iter().map(|id| id_from_def_id(id)).collect(),
parent: None,
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: vec![],
}),
_ => None,
}
}
}
-impl From<TypeDefData> for Option<Def> {
- fn from(data: TypeDefData) -> Option<Def> {
- match data.visibility {
+impl Into<Option<Def>> for TypeDefData {
+ fn into(self) -> Option<Def> {
+ match self.visibility {
Visibility::Public => Some(Def {
kind: DefKind::Type,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
children: vec![],
- parent: data.parent.map(|id| From::from(id)),
+ parent: self.parent.map(|id| id_from_def_id(id)),
decl_id: None,
docs: String::new(),
- sig: data.sig.map(|s| From::from(s)),
+ sig: self.sig.map(|s| s.into()),
+ attributes: vec![],
}),
_ => None,
}
}
}
-impl From<VariableData> for Option<Def> {
- fn from(data: VariableData) -> Option<Def> {
- match data.visibility {
+impl Into<Option<Def>> for VariableData {
+ fn into(self) -> Option<Def> {
+ match self.visibility {
Visibility::Public => Some(Def {
- kind: match data.kind {
+ kind: match self.kind {
VariableKind::Static => DefKind::Static,
VariableKind::Const => DefKind::Const,
VariableKind::Local => { return None }
VariableKind::Field => DefKind::Field,
},
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
children: vec![],
- parent: data.parent.map(|id| From::from(id)),
+ parent: self.parent.map(|id| id_from_def_id(id)),
decl_id: None,
- docs: data.docs,
- sig: data.sig.map(|s| From::from(s)),
+ docs: self.docs,
+ sig: self.sig.map(|s| s.into()),
+ attributes: vec![],
}),
_ => None,
}
}
}
-
-#[derive(Debug, RustcEncodable)]
-struct Relation {
- span: SpanData,
- kind: RelationKind,
- from: Id,
- to: Id,
-}
-
-#[derive(Debug, RustcEncodable)]
-enum RelationKind {
- Impl,
- SuperTrait,
-}
-
-impl From<ImplData> for Relation {
- fn from(data: ImplData) -> Relation {
- Relation {
- span: data.span,
- kind: RelationKind::Impl,
- from: From::from(data.self_ref.unwrap_or(null_def_id())),
- to: From::from(data.trait_ref.unwrap_or(null_def_id())),
- }
- }
-}
-
-impl From<InheritanceData> for Relation {
- fn from(data: InheritanceData) -> Relation {
- Relation {
- span: data.span,
- kind: RelationKind::SuperTrait,
- from: From::from(data.base_id),
- to: From::from(data.deriv_id),
- }
- }
-}
-
-#[derive(Debug, RustcEncodable)]
-pub struct JsonSignature {
- span: SpanData,
- text: String,
- ident_start: usize,
- ident_end: usize,
- defs: Vec<JsonSigElement>,
- refs: Vec<JsonSigElement>,
-}
-
-impl From<Signature> for JsonSignature {
- fn from(data: Signature) -> JsonSignature {
- JsonSignature {
- span: data.span,
- text: data.text,
- ident_start: data.ident_start,
- ident_end: data.ident_end,
- defs: data.defs.into_iter().map(|s| From::from(s)).collect(),
- refs: data.refs.into_iter().map(|s| From::from(s)).collect(),
- }
- }
-}
-
-#[derive(Debug, RustcEncodable)]
-pub struct JsonSigElement {
- id: Id,
- start: usize,
- end: usize,
-}
-
-impl From<SigElement> for JsonSigElement {
- fn from(data: SigElement) -> JsonSigElement {
- JsonSigElement {
- id: From::from(data.id),
- start: data.start,
- end: data.end,
- }
- }
-}
use rustc::hir::def_id::DefId;
use rustc_serialize::json::as_json;
+use rls_data::{self, Id, Analysis, Import, ImportKind, Def, DefKind, Ref, RefKind, MacroRef,
+ Relation, RelationKind, Signature, SigElement, CratePreludeData};
+use rls_span::{Column, Row};
+
+use external_data;
use external_data::*;
-use data::{VariableKind, SigElement};
+use data::{self, VariableKind};
use dump::Dump;
-use super::Format;
pub struct JsonDumper<'b, W: Write + 'b> {
output: &'b mut W,
macro_rules! impl_fn {
($fn_name: ident, $data_type: ident, $bucket: ident) => {
fn $fn_name(&mut self, data: $data_type) {
- self.result.$bucket.push(From::from(data));
+ self.result.$bucket.push(data.into());
}
}
}
impl_fn!(macro_use, MacroUseData, macro_refs);
fn mod_data(&mut self, data: ModData) {
- let id: Id = From::from(data.id);
+ let id: Id = id_from_def_id(data.id);
let mut def = Def {
kind: DefKind::Mod,
id: id,
- span: data.span,
+ span: data.span.into(),
name: data.name,
qualname: data.qualname,
value: data.filename,
- children: data.items.into_iter().map(|id| From::from(id)).collect(),
+ parent: None,
+ children: data.items.into_iter().map(|id| id_from_def_id(id)).collect(),
decl_id: None,
docs: data.docs,
- sig: Some(From::from(data.sig)),
- attributes: data.attributes,
+ sig: Some(data.sig.into()),
+ attributes: data.attributes.into_iter().map(|a| a.into()).collect(),
};
- if def.span.file_name != def.value {
+ if def.span.file_name.to_str().unwrap() != def.value {
// If the module is an out-of-line defintion, then we'll make the
// defintion the first character in the module's file and turn the
// the declaration into a reference to it.
ref_id: id,
};
self.result.refs.push(rf);
- def.span = SpanData {
- file_name: def.value.clone(),
+ def.span = rls_data::SpanData {
+ file_name: def.value.clone().into(),
byte_start: 0,
byte_end: 0,
- line_start: 1,
- line_end: 1,
- column_start: 1,
- column_end: 1,
+ line_start: Row::new_one_indexed(1),
+ line_end: Row::new_one_indexed(1),
+ column_start: Column::new_one_indexed(1),
+ column_end: Column::new_one_indexed(1),
}
}
fn impl_data(&mut self, data: ImplData) {
if data.self_ref.is_some() {
- self.result.relations.push(From::from(data));
+ self.result.relations.push(data.into());
}
}
fn inheritance(&mut self, data: InheritanceData) {
- self.result.relations.push(From::from(data));
+ self.result.relations.push(data.into());
}
}
// method, but not the supplied method). In both cases, we are currently
// ignoring it.
-#[derive(Debug, RustcEncodable)]
-struct Analysis {
- kind: Format,
- prelude: Option<CratePreludeData>,
- imports: Vec<Import>,
- defs: Vec<Def>,
- refs: Vec<Ref>,
- macro_refs: Vec<MacroRef>,
- relations: Vec<Relation>,
-}
-
-impl Analysis {
- fn new() -> Analysis {
- Analysis {
- kind: Format::Json,
- prelude: None,
- imports: vec![],
- defs: vec![],
- refs: vec![],
- macro_refs: vec![],
- relations: vec![],
- }
- }
-}
-
// DefId::index is a newtype and so the JSON serialisation is ugly. Therefore
// we use our own Id which is the same, but without the newtype.
-#[derive(Clone, Copy, Debug, RustcEncodable)]
-struct Id {
- krate: u32,
- index: u32,
-}
-
-impl From<DefId> for Id {
- fn from(id: DefId) -> Id {
- Id {
- krate: id.krate.as_u32(),
- index: id.index.as_u32(),
- }
+pub fn id_from_def_id(id: DefId) -> Id {
+ Id {
+ krate: id.krate.as_u32(),
+ index: id.index.as_u32(),
}
}
-#[derive(Debug, RustcEncodable)]
-struct Import {
- kind: ImportKind,
- ref_id: Option<Id>,
- span: SpanData,
- name: String,
- value: String,
-}
-
-#[derive(Debug, RustcEncodable)]
-enum ImportKind {
- ExternCrate,
- Use,
- GlobUse,
-}
-
-impl From<ExternCrateData> for Import {
- fn from(data: ExternCrateData) -> Import {
+impl Into<Import> for ExternCrateData {
+ fn into(self) -> Import {
Import {
kind: ImportKind::ExternCrate,
ref_id: None,
- span: data.span,
- name: data.name,
+ span: self.span,
+ name: self.name,
value: String::new(),
}
}
}
-impl From<UseData> for Import {
- fn from(data: UseData) -> Import {
+impl Into<Import> for UseData {
+ fn into(self) -> Import {
Import {
kind: ImportKind::Use,
- ref_id: data.mod_id.map(|id| From::from(id)),
- span: data.span,
- name: data.name,
+ ref_id: self.mod_id.map(|id| id_from_def_id(id)),
+ span: self.span,
+ name: self.name,
value: String::new(),
}
}
}
-impl From<UseGlobData> for Import {
- fn from(data: UseGlobData) -> Import {
+impl Into<Import> for UseGlobData {
+ fn into(self) -> Import {
Import {
kind: ImportKind::GlobUse,
ref_id: None,
- span: data.span,
+ span: self.span,
name: "*".to_owned(),
- value: data.names.join(", "),
+ value: self.names.join(", "),
}
}
}
-#[derive(Debug, RustcEncodable)]
-struct Def {
- kind: DefKind,
- id: Id,
- span: SpanData,
- name: String,
- qualname: String,
- value: String,
- children: Vec<Id>,
- decl_id: Option<Id>,
- docs: String,
- sig: Option<JsonSignature>,
- attributes: Vec<Attribute>,
-}
-
-#[derive(Debug, RustcEncodable)]
-enum DefKind {
- // value = variant names
- Enum,
- // value = enum name + variant name + types
- Tuple,
- // value = [enum name +] name + fields
- Struct,
- // value = signature
- Trait,
- // value = type + generics
- Function,
- // value = type + generics
- Method,
- // No id, no value.
- Macro,
- // value = file_name
- Mod,
- // value = aliased type
- Type,
- // value = type and init expression (for all variable kinds).
- Local,
- Static,
- Const,
- Field,
-}
-
-impl From<EnumData> for Def {
- fn from(data: EnumData) -> Def {
+impl Into<Def> for EnumData {
+ fn into(self) -> Def {
Def {
kind: DefKind::Enum,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
- children: data.variants.into_iter().map(|id| From::from(id)).collect(),
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
+ parent: None,
+ children: self.variants.into_iter().map(|id| id_from_def_id(id)).collect(),
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
- attributes: data.attributes,
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: self.attributes,
}
}
}
-impl From<TupleVariantData> for Def {
- fn from(data: TupleVariantData) -> Def {
+impl Into<Def> for TupleVariantData {
+ fn into(self) -> Def {
Def {
kind: DefKind::Tuple,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
+ parent: None,
children: vec![],
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
- attributes: data.attributes,
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: self.attributes,
}
}
}
-impl From<StructVariantData> for Def {
- fn from(data: StructVariantData) -> Def {
+impl Into<Def> for StructVariantData {
+ fn into(self) -> Def {
Def {
kind: DefKind::Struct,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
+ parent: None,
children: vec![],
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
- attributes: data.attributes,
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: self.attributes,
}
}
}
-impl From<StructData> for Def {
- fn from(data: StructData) -> Def {
+impl Into<Def> for StructData {
+ fn into(self) -> Def {
Def {
kind: DefKind::Struct,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
- children: data.fields.into_iter().map(|id| From::from(id)).collect(),
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
+ parent: None,
+ children: self.fields.into_iter().map(|id| id_from_def_id(id)).collect(),
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
- attributes: data.attributes,
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: self.attributes,
}
}
}
-impl From<TraitData> for Def {
- fn from(data: TraitData) -> Def {
+impl Into<Def> for TraitData {
+ fn into(self) -> Def {
Def {
kind: DefKind::Trait,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
- children: data.items.into_iter().map(|id| From::from(id)).collect(),
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
+ parent: None,
+ children: self.items.into_iter().map(|id| id_from_def_id(id)).collect(),
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
- attributes: data.attributes,
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: self.attributes,
}
}
}
-impl From<FunctionData> for Def {
- fn from(data: FunctionData) -> Def {
+impl Into<Def> for FunctionData {
+ fn into(self) -> Def {
Def {
kind: DefKind::Function,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
+ parent: None,
children: vec![],
decl_id: None,
- docs: data.docs,
- sig: Some(From::from(data.sig)),
- attributes: data.attributes,
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: self.attributes,
}
}
}
-impl From<MethodData> for Def {
- fn from(data: MethodData) -> Def {
+impl Into<Def> for MethodData {
+ fn into(self) -> Def {
Def {
kind: DefKind::Method,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
+ parent: None,
children: vec![],
- decl_id: data.decl_id.map(|id| From::from(id)),
- docs: data.docs,
- sig: Some(From::from(data.sig)),
- attributes: data.attributes,
+ decl_id: self.decl_id.map(|id| id_from_def_id(id)),
+ docs: self.docs,
+ sig: Some(self.sig.into()),
+ attributes: self.attributes,
}
}
}
-impl From<MacroData> for Def {
- fn from(data: MacroData) -> Def {
+impl Into<Def> for MacroData {
+ fn into(self) -> Def {
Def {
kind: DefKind::Macro,
- id: From::from(null_def_id()),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
+ id: id_from_def_id(null_def_id()),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
value: String::new(),
+ parent: None,
children: vec![],
decl_id: None,
- docs: data.docs,
+ docs: self.docs,
sig: None,
attributes: vec![],
}
}
}
-impl From<TypeDefData> for Def {
- fn from(data: TypeDefData) -> Def {
+impl Into<Def> for TypeDefData {
+ fn into(self) -> Def {
Def {
kind: DefKind::Type,
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.value,
+ parent: None,
children: vec![],
decl_id: None,
docs: String::new(),
- sig: data.sig.map(|s| From::from(s)),
- attributes: data.attributes,
+ sig: self.sig.map(|s| s.into()),
+ attributes: self.attributes,
}
}
}
-impl From<VariableData> for Def {
- fn from(data: VariableData) -> Def {
+impl Into<Def> for VariableData {
+ fn into(self) -> Def {
Def {
- kind: match data.kind {
+ kind: match self.kind {
VariableKind::Static => DefKind::Static,
VariableKind::Const => DefKind::Const,
VariableKind::Local => DefKind::Local,
VariableKind::Field => DefKind::Field,
},
- id: From::from(data.id),
- span: data.span,
- name: data.name,
- qualname: data.qualname,
- value: data.type_value,
+ id: id_from_def_id(self.id),
+ span: self.span,
+ name: self.name,
+ qualname: self.qualname,
+ value: self.type_value,
+ parent: None,
children: vec![],
decl_id: None,
- docs: data.docs,
+ docs: self.docs,
sig: None,
- attributes: data.attributes,
+ attributes: self.attributes,
}
}
}
-#[derive(Debug, RustcEncodable)]
-enum RefKind {
- Function,
- Mod,
- Type,
- Variable,
-}
-
-#[derive(Debug, RustcEncodable)]
-struct Ref {
- kind: RefKind,
- span: SpanData,
- ref_id: Id,
-}
-
-impl From<FunctionRefData> for Ref {
- fn from(data: FunctionRefData) -> Ref {
+impl Into<Ref> for FunctionRefData {
+ fn into(self) -> Ref {
Ref {
kind: RefKind::Function,
- span: data.span,
- ref_id: From::from(data.ref_id),
+ span: self.span,
+ ref_id: id_from_def_id(self.ref_id),
}
}
}
-impl From<FunctionCallData> for Ref {
- fn from(data: FunctionCallData) -> Ref {
+impl Into<Ref> for FunctionCallData {
+ fn into(self) -> Ref {
Ref {
kind: RefKind::Function,
- span: data.span,
- ref_id: From::from(data.ref_id),
+ span: self.span,
+ ref_id: id_from_def_id(self.ref_id),
}
}
}
-impl From<MethodCallData> for Ref {
- fn from(data: MethodCallData) -> Ref {
+impl Into<Ref> for MethodCallData {
+ fn into(self) -> Ref {
Ref {
kind: RefKind::Function,
- span: data.span,
- ref_id: From::from(data.ref_id.or(data.decl_id).unwrap_or(null_def_id())),
+ span: self.span,
+ ref_id: id_from_def_id(self.ref_id.or(self.decl_id).unwrap_or(null_def_id())),
}
}
}
-impl From<ModRefData> for Ref {
- fn from(data: ModRefData) -> Ref {
+impl Into<Ref> for ModRefData {
+ fn into(self) -> Ref {
Ref {
kind: RefKind::Mod,
- span: data.span,
- ref_id: From::from(data.ref_id.unwrap_or(null_def_id())),
+ span: self.span,
+ ref_id: id_from_def_id(self.ref_id.unwrap_or(null_def_id())),
}
}
}
-impl From<TypeRefData> for Ref {
- fn from(data: TypeRefData) -> Ref {
+impl Into<Ref> for TypeRefData {
+ fn into(self) -> Ref {
Ref {
kind: RefKind::Type,
- span: data.span,
- ref_id: From::from(data.ref_id.unwrap_or(null_def_id())),
+ span: self.span,
+ ref_id: id_from_def_id(self.ref_id.unwrap_or(null_def_id())),
}
}
}
-impl From<VariableRefData> for Ref {
- fn from(data: VariableRefData) -> Ref {
+impl Into<Ref> for VariableRefData {
+ fn into(self) -> Ref {
Ref {
kind: RefKind::Variable,
- span: data.span,
- ref_id: From::from(data.ref_id),
+ span: self.span,
+ ref_id: id_from_def_id(self.ref_id),
}
}
}
-#[derive(Debug, RustcEncodable)]
-struct MacroRef {
- span: SpanData,
- qualname: String,
- callee_span: SpanData,
-}
-
-impl From<MacroUseData> for MacroRef {
- fn from(data: MacroUseData) -> MacroRef {
+impl Into<MacroRef> for MacroUseData {
+ fn into(self) -> MacroRef {
MacroRef {
- span: data.span,
- qualname: data.qualname,
- callee_span: data.callee_span,
+ span: self.span,
+ qualname: self.qualname,
+ callee_span: self.callee_span.into(),
}
}
}
-#[derive(Debug, RustcEncodable)]
-struct Relation {
- span: SpanData,
- kind: RelationKind,
- from: Id,
- to: Id,
-}
-
-#[derive(Debug, RustcEncodable)]
-enum RelationKind {
- Impl,
- SuperTrait,
-}
-
-impl From<ImplData> for Relation {
- fn from(data: ImplData) -> Relation {
+impl Into<Relation> for ImplData {
+ fn into(self) -> Relation {
Relation {
- span: data.span,
+ span: self.span,
kind: RelationKind::Impl,
- from: From::from(data.self_ref.unwrap_or(null_def_id())),
- to: From::from(data.trait_ref.unwrap_or(null_def_id())),
+ from: id_from_def_id(self.self_ref.unwrap_or(null_def_id())),
+ to: id_from_def_id(self.trait_ref.unwrap_or(null_def_id())),
}
}
}
-impl From<InheritanceData> for Relation {
- fn from(data: InheritanceData) -> Relation {
+impl Into<Relation> for InheritanceData {
+ fn into(self) -> Relation {
Relation {
- span: data.span,
+ span: self.span,
kind: RelationKind::SuperTrait,
- from: From::from(data.base_id),
- to: From::from(data.deriv_id),
+ from: id_from_def_id(self.base_id),
+ to: id_from_def_id(self.deriv_id),
}
}
}
-#[derive(Debug, RustcEncodable)]
-pub struct JsonSignature {
- span: SpanData,
- text: String,
- ident_start: usize,
- ident_end: usize,
- defs: Vec<JsonSigElement>,
- refs: Vec<JsonSigElement>,
-}
-
-impl From<Signature> for JsonSignature {
- fn from(data: Signature) -> JsonSignature {
- JsonSignature {
- span: data.span,
- text: data.text,
- ident_start: data.ident_start,
- ident_end: data.ident_end,
- defs: data.defs.into_iter().map(|s| From::from(s)).collect(),
- refs: data.refs.into_iter().map(|s| From::from(s)).collect(),
+impl Into<Signature> for external_data::Signature {
+ fn into(self) -> Signature {
+ Signature {
+ span: self.span,
+ text: self.text,
+ ident_start: self.ident_start,
+ ident_end: self.ident_end,
+ defs: self.defs.into_iter().map(|s| s.into()).collect(),
+ refs: self.refs.into_iter().map(|s| s.into()).collect(),
}
}
}
-#[derive(Debug, RustcEncodable)]
-pub struct JsonSigElement {
- id: Id,
- start: usize,
- end: usize,
-}
-
-impl From<SigElement> for JsonSigElement {
- fn from(data: SigElement) -> JsonSigElement {
- JsonSigElement {
- id: From::from(data.id),
- start: data.start,
- end: data.end,
+impl Into<SigElement> for data::SigElement {
+ fn into(self) -> SigElement {
+ SigElement {
+ id: id_from_def_id(self.id),
+ start: self.start,
+ end: self.end,
}
}
}
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;
-extern crate serialize as rustc_serialize;
+extern crate rustc_serialize;
extern crate syntax_pos;
+extern crate rls_data;
+extern crate rls_span;
+
mod csv_dumper;
mod json_api_dumper;
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
use syntax::parse::lexer::comments::strip_doc_comment_decoration;
use syntax::parse::token;
-use syntax::symbol::{Symbol, keywords};
+use syntax::symbol::keywords;
use syntax::visit::{self, Visitor};
use syntax::print::pprust::{ty_to_string, arg_to_string};
use syntax::codemap::MacroAttribute;
}
fn docs_for_attrs(attrs: &[Attribute]) -> String {
- let doc = Symbol::intern("doc");
let mut result = String::new();
for attr in attrs {
- if attr.name() == doc {
+ if attr.check_name("doc") {
if let Some(val) = attr.value_str() {
if attr.is_sugared_doc {
result.push_str(&strip_doc_comment_decoration(&val.as_str()));
},
};
- if let Err(e) = rustc::util::fs::create_dir_racy(&root_path) {
+ if let Err(e) = std::fs::create_dir_all(&root_path) {
tcx.sess.err(&format!("Could not create directory {}: {}",
root_path.display(),
e));
pub use self::attr_impl::ArgAttribute;
#[allow(non_upper_case_globals)]
+#[allow(unused)]
mod attr_impl {
// The subset of llvm::Attribute needed for arguments, packed into a bitfield.
bitflags! {
self.kind == ArgKind::Ignore
}
- /// Get the LLVM type for an lvalue of the original Rust type of
- /// this argument/return, i.e. the result of `type_of::type_of`.
- pub fn memory_ty(&self, ccx: &CrateContext) -> Type {
- if self.original_ty == Type::i1(ccx) {
- Type::i8(ccx)
- } else {
- self.original_ty
- }
- }
-
/// Store a direct/indirect value described by this ArgType into a
/// lvalue for the original Rust type of this argument/return.
/// Can be used for both storing formal arguments into Rust variables
fn_ty
}
- pub fn unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+ pub fn new_vtable<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
sig: ty::FnSig<'tcx>,
extra_args: &[Ty<'tcx>]) -> FnType {
+ let mut fn_ty = FnType::unadjusted(ccx, sig, extra_args);
+ // Don't pass the vtable, it's not an argument of the virtual fn.
+ fn_ty.args[1].ignore();
+ fn_ty.adjust_for_abi(ccx, sig);
+ fn_ty
+ }
+
+ fn unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+ sig: ty::FnSig<'tcx>,
+ extra_args: &[Ty<'tcx>]) -> FnType {
use self::Abi::*;
let cconv = match ccx.sess().target.target.adjust_abi(sig.abi) {
RustIntrinsic | PlatformIntrinsic |
}
}
- pub fn adjust_for_abi<'a, 'tcx>(&mut self,
- ccx: &CrateContext<'a, 'tcx>,
- sig: ty::FnSig<'tcx>) {
+ fn adjust_for_abi<'a, 'tcx>(&mut self,
+ ccx: &CrateContext<'a, 'tcx>,
+ sig: ty::FnSig<'tcx>) {
let abi = sig.abi;
if abi == Abi::Unadjusted { return }
}
}
-/// Yield information about how to dispatch a case of the
-/// discriminant-like value returned by `trans_switch`.
-///
-/// This should ideally be less tightly tied to `_match`.
-pub fn trans_case<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, value: Disr) -> ValueRef {
- let l = bcx.ccx.layout_of(t);
- match *l {
- layout::CEnum { discr, .. }
- | layout::General { discr, .. }=> {
- C_integral(Type::from_integer(bcx.ccx, discr), value.0, true)
- }
- layout::RawNullablePointer { .. } |
- layout::StructWrappedNullablePointer { .. } => {
- assert!(value == Disr(0) || value == Disr(1));
- C_bool(bcx.ccx, value != Disr(0))
- }
- _ => {
- bug!("{} does not have a discriminant. Represented as {:#?}", t, l);
- }
- }
-}
-
/// Set the discriminant for a new value of the given case of the given
/// representation.
pub fn trans_set_discr<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, val: ValueRef, to: Disr) {
}
fn field(&self, attr: &ast::Attribute, name: &str) -> ast::Name {
- for item in attr.meta_item_list().unwrap_or(&[]) {
+ for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
if item.check_name(name) {
if let Some(value) = item.value_str() {
return value;
use syntax::ast;
use context::CrateContext;
+
/// Mark LLVM function to use provided inline heuristic.
#[inline]
pub fn inline(val: ValueRef, inline: InlineAttr) {
use context::SharedCrateContext;
use monomorphize::Instance;
use symbol_map::SymbolMap;
+use back::symbol_names::symbol_name;
use util::nodemap::FxHashMap;
use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE};
use rustc::session::config;
.exported_symbols(cnum)
.iter()
.map(|&def_id| {
- let name = Instance::mono(scx, def_id).symbol_name(scx);
+ let name = symbol_name(Instance::mono(scx.tcx(), def_id), scx);
let export_level = if special_runtime_crate {
// We can probably do better here by just ensuring that
// it has hidden visibility rather than public
}
}
- let instance = Instance::mono(scx, def_id);
+ let instance = Instance::mono(scx.tcx(), def_id);
symbol_map.get(TransItem::Fn(instance))
.map(str::to_owned)
- .unwrap_or_else(|| instance.symbol_name(scx))
+ .unwrap_or_else(|| symbol_name(instance, scx))
}
format!("h{:016x}", hasher.finish())
}
-impl<'a, 'tcx> Instance<'tcx> {
- pub fn symbol_name(self, scx: &SharedCrateContext<'a, 'tcx>) -> String {
- let Instance { def: def_id, substs } = self;
+pub fn symbol_name<'a, 'tcx>(instance: Instance<'tcx>,
+ scx: &SharedCrateContext<'a, 'tcx>) -> String {
+ let def_id = instance.def_id();
+ let substs = instance.substs;
- debug!("symbol_name(def_id={:?}, substs={:?})",
- def_id, substs);
+ debug!("symbol_name(def_id={:?}, substs={:?})",
+ def_id, substs);
- let node_id = scx.tcx().hir.as_local_node_id(def_id);
+ let node_id = scx.tcx().hir.as_local_node_id(def_id);
- if let Some(id) = node_id {
- if scx.sess().plugin_registrar_fn.get() == Some(id) {
- let svh = &scx.link_meta().crate_hash;
- let idx = def_id.index;
- return scx.sess().generate_plugin_registrar_symbol(svh, idx);
- }
- if scx.sess().derive_registrar_fn.get() == Some(id) {
- let svh = &scx.link_meta().crate_hash;
- let idx = def_id.index;
- return scx.sess().generate_derive_registrar_symbol(svh, idx);
- }
+ if let Some(id) = node_id {
+ if scx.sess().plugin_registrar_fn.get() == Some(id) {
+ let svh = &scx.link_meta().crate_hash;
+ let idx = def_id.index;
+ return scx.sess().generate_plugin_registrar_symbol(svh, idx);
}
-
- // FIXME(eddyb) Precompute a custom symbol name based on attributes.
- let attrs = scx.tcx().get_attrs(def_id);
- let is_foreign = if let Some(id) = node_id {
- match scx.tcx().hir.get(id) {
- hir_map::NodeForeignItem(_) => true,
- _ => false
- }
- } else {
- scx.sess().cstore.is_foreign_item(def_id)
- };
-
- if let Some(name) = weak_lang_items::link_name(&attrs) {
- return name.to_string();
+ if scx.sess().derive_registrar_fn.get() == Some(id) {
+ let svh = &scx.link_meta().crate_hash;
+ let idx = def_id.index;
+ return scx.sess().generate_derive_registrar_symbol(svh, idx);
}
+ }
- if is_foreign {
- if let Some(name) = attr::first_attr_value_str_by_name(&attrs, "link_name") {
- return name.to_string();
- }
- // Don't mangle foreign items.
- return scx.tcx().item_name(def_id).as_str().to_string();
+ // FIXME(eddyb) Precompute a custom symbol name based on attributes.
+ let attrs = scx.tcx().get_attrs(def_id);
+ let is_foreign = if let Some(id) = node_id {
+ match scx.tcx().hir.get(id) {
+ hir_map::NodeForeignItem(_) => true,
+ _ => false
}
+ } else {
+ scx.sess().cstore.is_foreign_item(def_id)
+ };
- if let Some(name) = attr::find_export_name_attr(scx.sess().diagnostic(), &attrs) {
- // Use provided name
+ if let Some(name) = weak_lang_items::link_name(&attrs) {
+ return name.to_string();
+ }
+
+ if is_foreign {
+ if let Some(name) = attr::first_attr_value_str_by_name(&attrs, "link_name") {
return name.to_string();
}
+ // Don't mangle foreign items.
+ return scx.tcx().item_name(def_id).as_str().to_string();
+ }
- if attr::contains_name(&attrs, "no_mangle") {
- // Don't mangle
- return scx.tcx().item_name(def_id).as_str().to_string();
- }
+ if let Some(name) = attr::find_export_name_attr(scx.sess().diagnostic(), &attrs) {
+ // Use provided name
+ return name.to_string();
+ }
- let def_path = scx.tcx().def_path(def_id);
-
- // We want to compute the "type" of this item. Unfortunately, some
- // kinds of items (e.g., closures) don't have an entry in the
- // item-type array. So walk back up the find the closest parent
- // that DOES have an entry.
- let mut ty_def_id = def_id;
- let instance_ty;
- loop {
- let key = scx.tcx().def_key(ty_def_id);
- match key.disambiguated_data.data {
- DefPathData::TypeNs(_) |
- DefPathData::ValueNs(_) => {
- instance_ty = scx.tcx().item_type(ty_def_id);
- break;
- }
- _ => {
- // if we're making a symbol for something, there ought
- // to be a value or type-def or something in there
- // *somewhere*
- ty_def_id.index = key.parent.unwrap_or_else(|| {
- bug!("finding type for {:?}, encountered def-id {:?} with no \
- parent", def_id, ty_def_id);
- });
- }
+ if attr::contains_name(&attrs, "no_mangle") {
+ // Don't mangle
+ return scx.tcx().item_name(def_id).as_str().to_string();
+ }
+
+ let def_path = scx.tcx().def_path(def_id);
+
+ // We want to compute the "type" of this item. Unfortunately, some
+ // kinds of items (e.g., closures) don't have an entry in the
+ // item-type array. So walk back up the find the closest parent
+ // that DOES have an entry.
+ let mut ty_def_id = def_id;
+ let instance_ty;
+ loop {
+ let key = scx.tcx().def_key(ty_def_id);
+ match key.disambiguated_data.data {
+ DefPathData::TypeNs(_) |
+ DefPathData::ValueNs(_) => {
+ instance_ty = scx.tcx().item_type(ty_def_id);
+ break;
+ }
+ _ => {
+ // if we're making a symbol for something, there ought
+ // to be a value or type-def or something in there
+ // *somewhere*
+ ty_def_id.index = key.parent.unwrap_or_else(|| {
+ bug!("finding type for {:?}, encountered def-id {:?} with no \
+ parent", def_id, ty_def_id);
+ });
}
}
+ }
- // Erase regions because they may not be deterministic when hashed
- // and should not matter anyhow.
- let instance_ty = scx.tcx().erase_regions(&instance_ty);
+ // Erase regions because they may not be deterministic when hashed
+ // and should not matter anyhow.
+ let instance_ty = scx.tcx().erase_regions(&instance_ty);
- let hash = get_symbol_hash(scx, &def_path, instance_ty, Some(substs));
+ let hash = get_symbol_hash(scx, &def_path, instance_ty, Some(substs));
- let mut buffer = SymbolPathBuffer {
- names: Vec::with_capacity(def_path.data.len())
- };
+ let mut buffer = SymbolPathBuffer {
+ names: Vec::with_capacity(def_path.data.len())
+ };
- item_path::with_forced_absolute_paths(|| {
- scx.tcx().push_item_path(&mut buffer, def_id);
- });
+ item_path::with_forced_absolute_paths(|| {
+ scx.tcx().push_item_path(&mut buffer, def_id);
+ });
- mangle(buffer.names.into_iter(), &hash)
- }
+ mangle(buffer.names.into_iter(), &hash)
}
struct SymbolPathBuffer {
use back::symbol_export::{self, ExportedSymbols};
use llvm::{Linkage, ValueRef, Vector, get_param};
use llvm;
-use rustc::hir::def_id::{DefId, LOCAL_CRATE};
+use rustc::hir::def_id::LOCAL_CRATE;
use middle::lang_items::StartFnLangItem;
-use rustc::ty::subst::Substs;
-use rustc::mir::tcx::LvalueTy;
-use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt};
-use rustc::ty::adjustment::CustomCoerceUnsized;
use rustc::dep_graph::{AssertDepGraphSafe, DepNode, WorkProduct};
use rustc::hir::map as hir_map;
use rustc::util::common::time;
use session::config::{self, NoDebugInfo};
use rustc_incremental::IncrementalHashesMap;
use session::{self, DataTypeKind, Session};
-use abi::{self, FnType};
+use abi;
use mir::lvalue::LvalueRef;
-use adt;
use attributes;
use builder::Builder;
-use callee::{Callee};
+use callee;
use common::{C_bool, C_bytes_in_context, C_i32, C_uint};
use collector::{self, TransItemCollectionMode};
use common::{C_struct_in_context, C_u64, C_undef};
use common::CrateContext;
-use common::{fulfill_obligation};
use common::{type_is_zero_size, val_ty};
use common;
use consts;
use debuginfo;
use declare;
use machine;
-use machine::{llalign_of_min, llsize_of};
+use machine::llsize_of;
use meth;
use mir;
use monomorphize::{self, Instance};
use type_::Type;
use type_of;
use value::Value;
-use Disr;
use util::nodemap::{NodeSet, FxHashMap, FxHashSet};
use libc::c_uint;
use std::rc::Rc;
use std::str;
use std::i32;
-use syntax_pos::{Span, DUMMY_SP};
+use syntax_pos::Span;
use syntax::attr;
use rustc::hir;
use rustc::ty::layout::{self, Layout};
}
}
-pub fn custom_coerce_unsize_info<'scx, 'tcx>(scx: &SharedCrateContext<'scx, 'tcx>,
- source_ty: Ty<'tcx>,
- target_ty: Ty<'tcx>)
- -> CustomCoerceUnsized {
- let trait_ref = ty::Binder(ty::TraitRef {
- def_id: scx.tcx().lang_items.coerce_unsized_trait().unwrap(),
- substs: scx.tcx().mk_substs_trait(source_ty, &[target_ty])
- });
-
- match fulfill_obligation(scx, DUMMY_SP, trait_ref) {
- traits::VtableImpl(traits::VtableImplData { impl_def_id, .. }) => {
- scx.tcx().custom_coerce_unsized_kind(impl_def_id)
- }
- vtable => {
- bug!("invalid CoerceUnsized vtable: {:?}", vtable);
- }
- }
-}
-
pub fn cast_shift_expr_rhs(
cx: &Builder, op: hir::BinOp_, lhs: ValueRef, rhs: ValueRef
) -> ValueRef {
// a char is a Unicode codepoint, and so takes values from 0
// to 0x10FFFF inclusive only.
b.load_range_assert(ptr, 0, 0x10FFFF + 1, llvm::False, alignment.to_align())
- } else if (t.is_region_ptr() || t.is_box()) && !common::type_is_fat_ptr(ccx, t) {
+ } else if (t.is_region_ptr() || t.is_box() || t.is_fn())
+ && !common::type_is_fat_ptr(ccx, t)
+ {
b.load_nonnull(ptr, alignment.to_align())
} else {
b.load(ptr, alignment.to_align())
}
pub fn call_memset<'a, 'tcx>(b: &Builder<'a, 'tcx>,
- ptr: ValueRef,
- fill_byte: ValueRef,
- size: ValueRef,
- align: ValueRef,
- volatile: bool) -> ValueRef {
+ ptr: ValueRef,
+ fill_byte: ValueRef,
+ size: ValueRef,
+ align: ValueRef,
+ volatile: bool) -> ValueRef {
let ptr_width = &b.ccx.sess().target.target.target_pointer_width[..];
let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
let llintrinsicfn = b.ccx.get_intrinsic(&intrinsic_key);
let _s = if ccx.sess().trans_stats() {
let mut instance_name = String::new();
DefPathBasedNames::new(ccx.tcx(), true, true)
- .push_def_path(instance.def, &mut instance_name);
+ .push_def_path(instance.def_id(), &mut instance_name);
Some(StatRecorder::new(ccx, instance_name))
} else {
None
// release builds.
info!("trans_instance({})", instance);
- let fn_ty = common::def_ty(ccx.shared(), instance.def, instance.substs);
+ let fn_ty = common::instance_ty(ccx.shared(), &instance);
let sig = common::ty_fn_sig(ccx, fn_ty);
let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&sig);
attributes::emit_uwtable(lldecl, true);
}
- let mir = ccx.tcx().item_mir(instance.def);
+ let mir = ccx.tcx().instance_mir(instance.def);
mir::trans_mir(ccx, lldecl, &mir, instance, sig);
}
-pub fn trans_ctor_shim<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
- def_id: DefId,
- substs: &'tcx Substs<'tcx>,
- disr: Disr,
- llfn: ValueRef) {
- attributes::inline(llfn, attributes::InlineAttr::Hint);
- attributes::set_frame_pointer_elimination(ccx, llfn);
-
- let ctor_ty = common::def_ty(ccx.shared(), def_id, substs);
- let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&ctor_ty.fn_sig());
- let fn_ty = FnType::new(ccx, sig, &[]);
-
- let bcx = Builder::new_block(ccx, llfn, "entry-block");
- if !fn_ty.ret.is_ignore() {
- // But if there are no nested returns, we skip the indirection
- // and have a single retslot
- let dest = if fn_ty.ret.is_indirect() {
- get_param(llfn, 0)
- } else {
- // We create an alloca to hold a pointer of type `ret.original_ty`
- // which will hold the pointer to the right alloca which has the
- // final ret value
- bcx.alloca(fn_ty.ret.memory_ty(ccx), "sret_slot")
- };
- // Can return unsized value
- let mut dest_val = LvalueRef::new_sized_ty(dest, sig.output(), Alignment::AbiAligned);
- dest_val.ty = LvalueTy::Downcast {
- adt_def: sig.output().ty_adt_def().unwrap(),
- substs: substs,
- variant_index: disr.0 as usize,
- };
- let mut llarg_idx = fn_ty.ret.is_indirect() as usize;
- let mut arg_idx = 0;
- for (i, arg_ty) in sig.inputs().iter().enumerate() {
- let (lldestptr, _) = dest_val.trans_field_ptr(&bcx, i);
- let arg = &fn_ty.args[arg_idx];
- arg_idx += 1;
- if common::type_is_fat_ptr(bcx.ccx, arg_ty) {
- let meta = &fn_ty.args[arg_idx];
- arg_idx += 1;
- arg.store_fn_arg(&bcx, &mut llarg_idx, get_dataptr(&bcx, lldestptr));
- meta.store_fn_arg(&bcx, &mut llarg_idx, get_meta(&bcx, lldestptr));
- } else {
- arg.store_fn_arg(&bcx, &mut llarg_idx, lldestptr);
- }
- }
- adt::trans_set_discr(&bcx, sig.output(), dest, disr);
-
- if fn_ty.ret.is_indirect() {
- bcx.ret_void();
- return;
- }
-
- if let Some(cast_ty) = fn_ty.ret.cast {
- bcx.ret(bcx.load(
- bcx.pointercast(dest, cast_ty.ptr_to()),
- Some(llalign_of_min(ccx, fn_ty.ret.ty))
- ));
- } else {
- bcx.ret(bcx.load(dest, None))
- }
- } else {
- bcx.ret_void();
- }
-}
-
pub fn llvm_linkage_by_name(name: &str) -> Option<Linkage> {
// Use the names from src/llvm/docs/LangRef.rst here. Most types are only
// applicable to variable declarations and may not really make sense for
}
/// Create the `main` function which will initialise the rust runtime and call
-/// users’ main function.
+/// users main function.
pub fn maybe_create_entry_wrapper(ccx: &CrateContext) {
let (main_def_id, span) = match *ccx.sess().entry_fn.borrow() {
Some((id, span)) => {
ccx.tcx().sess.span_fatal(span, "compilation successful");
}
- let instance = Instance::mono(ccx.shared(), main_def_id);
+ let instance = Instance::mono(ccx.tcx(), main_def_id);
if !ccx.codegen_unit().contains_item(&TransItem::Fn(instance)) {
// We want to create the wrapper in the same codegen unit as Rust's main
return;
}
- let main_llfn = Callee::def(ccx, main_def_id, instance.substs).reify(ccx);
+ let main_llfn = callee::get_fn(ccx, instance);
let et = ccx.sess().entry_type.get().unwrap();
match et {
let (start_fn, args) = if use_start_lang_item {
let start_def_id = ccx.tcx().require_lang_item(StartFnLangItem);
- let empty_substs = ccx.tcx().intern_substs(&[]);
- let start_fn = Callee::def(ccx, start_def_id, empty_substs).reify(ccx);
+ let start_instance = Instance::mono(ccx.tcx(), start_def_id);
+ let start_fn = callee::get_fn(ccx, start_instance);
(start_fn, vec![bld.pointercast(rust_main, Type::i8p(ccx).ptr_to()), get_param(llfn, 0),
get_param(llfn, 1)])
} else {
//! and methods are represented as just a fn ptr and not a full
//! closure.
-pub use self::CalleeData::*;
-
-use llvm::{self, ValueRef, get_params};
+use llvm::{self, ValueRef};
use rustc::hir::def_id::DefId;
-use rustc::ty::subst::{Substs, Subst};
-use rustc::traits;
-use abi::{Abi, FnType};
+use rustc::ty::subst::Substs;
use attributes;
-use base;
-use builder::Builder;
use common::{self, CrateContext};
-use cleanup::CleanupScope;
-use mir::lvalue::LvalueRef;
+use monomorphize;
use consts;
-use common::def_ty;
use declare;
-use value::Value;
-use meth;
use monomorphize::Instance;
use trans_item::TransItem;
use type_of;
-use Disr;
-use rustc::ty::{self, Ty, TypeFoldable};
-use rustc::hir;
-use std::iter;
-
-use syntax_pos::DUMMY_SP;
-
-use mir::lvalue::Alignment;
-
-#[derive(Debug)]
-pub enum CalleeData {
- /// Constructor for enum variant/tuple-like-struct.
- NamedTupleConstructor(Disr),
-
- /// Function pointer.
- Fn(ValueRef),
-
- Intrinsic,
-
- /// Trait object found in the vtable at that index.
- Virtual(usize)
-}
-
-#[derive(Debug)]
-pub struct Callee<'tcx> {
- pub data: CalleeData,
- pub ty: Ty<'tcx>
-}
-
-impl<'tcx> Callee<'tcx> {
- /// Function pointer.
- pub fn ptr(llfn: ValueRef, ty: Ty<'tcx>) -> Callee<'tcx> {
- Callee {
- data: Fn(llfn),
- ty: ty
- }
- }
-
- /// Function or method definition.
- pub fn def<'a>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId, substs: &'tcx Substs<'tcx>)
- -> Callee<'tcx> {
- let tcx = ccx.tcx();
-
- if let Some(trait_id) = tcx.trait_of_item(def_id) {
- return Callee::trait_method(ccx, trait_id, def_id, substs);
- }
-
- let fn_ty = def_ty(ccx.shared(), def_id, substs);
- if let ty::TyFnDef(.., f) = fn_ty.sty {
- if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
- return Callee {
- data: Intrinsic,
- ty: fn_ty
- };
- }
- }
-
- // FIXME(eddyb) Detect ADT constructors more efficiently.
- if let Some(adt_def) = fn_ty.fn_ret().skip_binder().ty_adt_def() {
- if let Some(i) = adt_def.variants.iter().position(|v| def_id == v.did) {
- return Callee {
- data: NamedTupleConstructor(Disr::for_variant(tcx, adt_def, i)),
- ty: fn_ty
- };
- }
- }
-
- let (llfn, ty) = get_fn(ccx, def_id, substs);
- Callee::ptr(llfn, ty)
- }
-
- /// Trait method, which has to be resolved to an impl method.
- pub fn trait_method<'a>(ccx: &CrateContext<'a, 'tcx>,
- trait_id: DefId,
- def_id: DefId,
- substs: &'tcx Substs<'tcx>)
- -> Callee<'tcx> {
- let tcx = ccx.tcx();
-
- let trait_ref = ty::TraitRef::from_method(tcx, trait_id, substs);
- let trait_ref = tcx.normalize_associated_type(&ty::Binder(trait_ref));
- match common::fulfill_obligation(ccx.shared(), DUMMY_SP, trait_ref) {
- traits::VtableImpl(vtable_impl) => {
- let name = tcx.item_name(def_id);
- let (def_id, substs) = traits::find_method(tcx, name, substs, &vtable_impl);
-
- // Translate the function, bypassing Callee::def.
- // That is because default methods have the same ID as the
- // trait method used to look up the impl method that ended
- // up here, so calling Callee::def would infinitely recurse.
- let (llfn, ty) = get_fn(ccx, def_id, substs);
- Callee::ptr(llfn, ty)
- }
- traits::VtableClosure(vtable_closure) => {
- // The substitutions should have no type parameters remaining
- // after passing through fulfill_obligation
- let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap();
- let instance = Instance::new(def_id, substs);
- let llfn = trans_closure_method(
- ccx,
- vtable_closure.closure_def_id,
- vtable_closure.substs,
- instance,
- trait_closure_kind);
-
- let method_ty = def_ty(ccx.shared(), def_id, substs);
- Callee::ptr(llfn, method_ty)
- }
- traits::VtableFnPointer(vtable_fn_pointer) => {
- let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap();
- let instance = Instance::new(def_id, substs);
- let llfn = trans_fn_pointer_shim(ccx, instance,
- trait_closure_kind,
- vtable_fn_pointer.fn_ty);
-
- let method_ty = def_ty(ccx.shared(), def_id, substs);
- Callee::ptr(llfn, method_ty)
- }
- traits::VtableObject(ref data) => {
- Callee {
- data: Virtual(tcx.get_vtable_index_of_object_method(data, def_id)),
- ty: def_ty(ccx.shared(), def_id, substs)
- }
- }
- vtable => {
- bug!("resolved vtable bad vtable {:?} in trans", vtable);
- }
- }
- }
-
- /// Get the abi::FnType for a direct call. Mainly deals with the fact
- /// that a Virtual call doesn't take the vtable, like its shim does.
- /// The extra argument types are for variadic (extern "C") functions.
- pub fn direct_fn_type<'a>(&self, ccx: &CrateContext<'a, 'tcx>,
- extra_args: &[Ty<'tcx>]) -> FnType {
- let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&self.ty.fn_sig());
- let mut fn_ty = FnType::unadjusted(ccx, sig, extra_args);
- if let Virtual(_) = self.data {
- // Don't pass the vtable, it's not an argument of the virtual fn.
- fn_ty.args[1].ignore();
- }
- fn_ty.adjust_for_abi(ccx, sig);
- fn_ty
- }
-
- /// Turn the callee into a function pointer.
- pub fn reify<'a>(self, ccx: &CrateContext<'a, 'tcx>) -> ValueRef {
- match self.data {
- Fn(llfn) => llfn,
- Virtual(_) => meth::trans_object_shim(ccx, self),
- NamedTupleConstructor(disr) => match self.ty.sty {
- ty::TyFnDef(def_id, substs, _) => {
- let instance = Instance::new(def_id, substs);
- if let Some(&llfn) = ccx.instances().borrow().get(&instance) {
- return llfn;
- }
-
- let sym = ccx.symbol_map().get_or_compute(ccx.shared(),
- TransItem::Fn(instance));
- assert!(!ccx.codegen_unit().contains_item(&TransItem::Fn(instance)));
- let lldecl = declare::define_internal_fn(ccx, &sym, self.ty);
- base::trans_ctor_shim(ccx, def_id, substs, disr, lldecl);
- ccx.instances().borrow_mut().insert(instance, lldecl);
-
- lldecl
- }
- _ => bug!("expected fn item type, found {}", self.ty)
- },
- Intrinsic => bug!("intrinsic {} getting reified", self.ty)
- }
- }
-}
-
-fn trans_closure_method<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>,
- def_id: DefId,
- substs: ty::ClosureSubsts<'tcx>,
- method_instance: Instance<'tcx>,
- trait_closure_kind: ty::ClosureKind)
- -> ValueRef
-{
- // If this is a closure, redirect to it.
- let (llfn, _) = get_fn(ccx, def_id, substs.substs);
-
- // If the closure is a Fn closure, but a FnOnce is needed (etc),
- // then adapt the self type
- let llfn_closure_kind = ccx.tcx().closure_kind(def_id);
-
- debug!("trans_closure_adapter_shim(llfn_closure_kind={:?}, \
- trait_closure_kind={:?}, llfn={:?})",
- llfn_closure_kind, trait_closure_kind, Value(llfn));
-
- match needs_fn_once_adapter_shim(llfn_closure_kind, trait_closure_kind) {
- Ok(true) => trans_fn_once_adapter_shim(ccx,
- def_id,
- substs,
- method_instance,
- llfn),
- Ok(false) => llfn,
- Err(()) => {
- bug!("trans_closure_adapter_shim: cannot convert {:?} to {:?}",
- llfn_closure_kind,
- trait_closure_kind);
- }
- }
-}
-
-pub fn needs_fn_once_adapter_shim(actual_closure_kind: ty::ClosureKind,
- trait_closure_kind: ty::ClosureKind)
- -> Result<bool, ()>
-{
- match (actual_closure_kind, trait_closure_kind) {
- (ty::ClosureKind::Fn, ty::ClosureKind::Fn) |
- (ty::ClosureKind::FnMut, ty::ClosureKind::FnMut) |
- (ty::ClosureKind::FnOnce, ty::ClosureKind::FnOnce) => {
- // No adapter needed.
- Ok(false)
- }
- (ty::ClosureKind::Fn, ty::ClosureKind::FnMut) => {
- // The closure fn `llfn` is a `fn(&self, ...)`. We want a
- // `fn(&mut self, ...)`. In fact, at trans time, these are
- // basically the same thing, so we can just return llfn.
- Ok(false)
- }
- (ty::ClosureKind::Fn, ty::ClosureKind::FnOnce) |
- (ty::ClosureKind::FnMut, ty::ClosureKind::FnOnce) => {
- // The closure fn `llfn` is a `fn(&self, ...)` or `fn(&mut
- // self, ...)`. We want a `fn(self, ...)`. We can produce
- // this by doing something like:
- //
- // fn call_once(self, ...) { call_mut(&self, ...) }
- // fn call_once(mut self, ...) { call_mut(&mut self, ...) }
- //
- // These are both the same at trans time.
- Ok(true)
- }
- _ => Err(()),
- }
-}
-
-fn trans_fn_once_adapter_shim<'a, 'tcx>(
- ccx: &'a CrateContext<'a, 'tcx>,
- def_id: DefId,
- substs: ty::ClosureSubsts<'tcx>,
- method_instance: Instance<'tcx>,
- llreffn: ValueRef)
- -> ValueRef
-{
- if let Some(&llfn) = ccx.instances().borrow().get(&method_instance) {
- return llfn;
- }
-
- debug!("trans_fn_once_adapter_shim(def_id={:?}, substs={:?}, llreffn={:?})",
- def_id, substs, Value(llreffn));
-
- let tcx = ccx.tcx();
-
- // Find a version of the closure type. Substitute static for the
- // region since it doesn't really matter.
- let closure_ty = tcx.mk_closure_from_closure_substs(def_id, substs);
- let ref_closure_ty = tcx.mk_imm_ref(tcx.mk_region(ty::ReErased), closure_ty);
-
- // Make a version with the type of by-ref closure.
- let sig = tcx.closure_type(def_id).subst(tcx, substs.substs);
- let sig = tcx.erase_late_bound_regions_and_normalize(&sig);
- assert_eq!(sig.abi, Abi::RustCall);
- let llref_fn_ty = tcx.mk_fn_ptr(ty::Binder(tcx.mk_fn_sig(
- iter::once(ref_closure_ty).chain(sig.inputs().iter().cloned()),
- sig.output(),
- sig.variadic,
- sig.unsafety,
- Abi::RustCall
- )));
- debug!("trans_fn_once_adapter_shim: llref_fn_ty={:?}",
- llref_fn_ty);
-
-
- // Make a version of the closure type with the same arguments, but
- // with argument #0 being by value.
- let sig = tcx.mk_fn_sig(
- iter::once(closure_ty).chain(sig.inputs().iter().cloned()),
- sig.output(),
- sig.variadic,
- sig.unsafety,
- Abi::RustCall
- );
-
- let fn_ty = FnType::new(ccx, sig, &[]);
- let llonce_fn_ty = tcx.mk_fn_ptr(ty::Binder(sig));
-
- // Create the by-value helper.
- let function_name = method_instance.symbol_name(ccx.shared());
- let lloncefn = declare::define_internal_fn(ccx, &function_name, llonce_fn_ty);
- attributes::set_frame_pointer_elimination(ccx, lloncefn);
-
- let orig_fn_ty = fn_ty;
- let mut bcx = Builder::new_block(ccx, lloncefn, "entry-block");
-
- let callee = Callee {
- data: Fn(llreffn),
- ty: llref_fn_ty
- };
-
- // the first argument (`self`) will be the (by value) closure env.
-
- let mut llargs = get_params(lloncefn);
- let fn_ret = callee.ty.fn_ret();
- let fn_ty = callee.direct_fn_type(bcx.ccx, &[]);
- let self_idx = fn_ty.ret.is_indirect() as usize;
- let env_arg = &orig_fn_ty.args[0];
- let env = if env_arg.is_indirect() {
- LvalueRef::new_sized_ty(llargs[self_idx], closure_ty, Alignment::AbiAligned)
- } else {
- let scratch = LvalueRef::alloca(&bcx, closure_ty, "self");
- let mut llarg_idx = self_idx;
- env_arg.store_fn_arg(&bcx, &mut llarg_idx, scratch.llval);
- scratch
- };
-
- debug!("trans_fn_once_adapter_shim: env={:?}", env);
- // Adjust llargs such that llargs[self_idx..] has the call arguments.
- // For zero-sized closures that means sneaking in a new argument.
- if env_arg.is_ignore() {
- llargs.insert(self_idx, env.llval);
- } else {
- llargs[self_idx] = env.llval;
- }
-
- // Call the by-ref closure body with `self` in a cleanup scope,
- // to drop `self` when the body returns, or in case it unwinds.
- let self_scope = CleanupScope::schedule_drop_mem(&bcx, env);
-
- let llfn = callee.reify(bcx.ccx);
- let llret;
- if let Some(landing_pad) = self_scope.landing_pad {
- let normal_bcx = bcx.build_sibling_block("normal-return");
- llret = bcx.invoke(llfn, &llargs[..], normal_bcx.llbb(), landing_pad, None);
- bcx = normal_bcx;
- } else {
- llret = bcx.call(llfn, &llargs[..], None);
- }
- fn_ty.apply_attrs_callsite(llret);
-
- if fn_ret.0.is_never() {
- bcx.unreachable();
- } else {
- self_scope.trans(&bcx);
-
- if fn_ty.ret.is_indirect() || fn_ty.ret.is_ignore() {
- bcx.ret_void();
- } else {
- bcx.ret(llret);
- }
- }
-
- ccx.instances().borrow_mut().insert(method_instance, lloncefn);
-
- lloncefn
-}
-
-/// Translates an adapter that implements the `Fn` trait for a fn
-/// pointer. This is basically the equivalent of something like:
-///
-/// ```
-/// impl<'a> Fn(&'a int) -> &'a int for fn(&int) -> &int {
-/// extern "rust-abi" fn call(&self, args: (&'a int,)) -> &'a int {
-/// (*self)(args.0)
-/// }
-/// }
-/// ```
-///
-/// but for the bare function type given.
-fn trans_fn_pointer_shim<'a, 'tcx>(
- ccx: &'a CrateContext<'a, 'tcx>,
- method_instance: Instance<'tcx>,
- closure_kind: ty::ClosureKind,
- bare_fn_ty: Ty<'tcx>)
- -> ValueRef
-{
- let tcx = ccx.tcx();
-
- // Normalize the type for better caching.
- let bare_fn_ty = tcx.normalize_associated_type(&bare_fn_ty);
-
- // If this is an impl of `Fn` or `FnMut` trait, the receiver is `&self`.
- let is_by_ref = match closure_kind {
- ty::ClosureKind::Fn | ty::ClosureKind::FnMut => true,
- ty::ClosureKind::FnOnce => false,
- };
-
- let llfnpointer = match bare_fn_ty.sty {
- ty::TyFnDef(def_id, substs, _) => {
- // Function definitions have to be turned into a pointer.
- let llfn = Callee::def(ccx, def_id, substs).reify(ccx);
- if !is_by_ref {
- // A by-value fn item is ignored, so the shim has
- // the same signature as the original function.
- return llfn;
- }
- Some(llfn)
- }
- _ => None
- };
-
- let bare_fn_ty_maybe_ref = if is_by_ref {
- tcx.mk_imm_ref(tcx.mk_region(ty::ReErased), bare_fn_ty)
- } else {
- bare_fn_ty
- };
-
- // Check if we already trans'd this shim.
- if let Some(&llval) = ccx.fn_pointer_shims().borrow().get(&bare_fn_ty_maybe_ref) {
- return llval;
- }
-
- debug!("trans_fn_pointer_shim(bare_fn_ty={:?})",
- bare_fn_ty);
-
- // Construct the "tuply" version of `bare_fn_ty`. It takes two arguments: `self`,
- // which is the fn pointer, and `args`, which is the arguments tuple.
- let sig = bare_fn_ty.fn_sig();
- let sig = tcx.erase_late_bound_regions_and_normalize(&sig);
- assert_eq!(sig.unsafety, hir::Unsafety::Normal);
- assert_eq!(sig.abi, Abi::Rust);
- let tuple_input_ty = tcx.intern_tup(sig.inputs(), false);
- let sig = tcx.mk_fn_sig(
- [bare_fn_ty_maybe_ref, tuple_input_ty].iter().cloned(),
- sig.output(),
- false,
- hir::Unsafety::Normal,
- Abi::RustCall
- );
- let fn_ty = FnType::new(ccx, sig, &[]);
- let tuple_fn_ty = tcx.mk_fn_ptr(ty::Binder(sig));
- debug!("tuple_fn_ty: {:?}", tuple_fn_ty);
-
- //
- let function_name = method_instance.symbol_name(ccx.shared());
- let llfn = declare::define_internal_fn(ccx, &function_name, tuple_fn_ty);
- attributes::set_frame_pointer_elimination(ccx, llfn);
- //
- let bcx = Builder::new_block(ccx, llfn, "entry-block");
-
- let mut llargs = get_params(llfn);
-
- let self_arg = llargs.remove(fn_ty.ret.is_indirect() as usize);
- let llfnpointer = llfnpointer.unwrap_or_else(|| {
- // the first argument (`self`) will be ptr to the fn pointer
- if is_by_ref {
- bcx.load(self_arg, None)
- } else {
- self_arg
- }
- });
-
- let callee = Callee {
- data: Fn(llfnpointer),
- ty: bare_fn_ty
- };
- let fn_ret = callee.ty.fn_ret();
- let fn_ty = callee.direct_fn_type(ccx, &[]);
- let llret = bcx.call(llfnpointer, &llargs, None);
- fn_ty.apply_attrs_callsite(llret);
-
- if fn_ret.0.is_never() {
- bcx.unreachable();
- } else {
- if fn_ty.ret.is_indirect() || fn_ty.ret.is_ignore() {
- bcx.ret_void();
- } else {
- bcx.ret(llret);
- }
- }
-
- ccx.fn_pointer_shims().borrow_mut().insert(bare_fn_ty_maybe_ref, llfn);
-
- llfn
-}
+use rustc::ty::TypeFoldable;
/// Translates a reference to a fn/method item, monomorphizing and
/// inlining as it goes.
/// # Parameters
///
/// - `ccx`: the crate context
-/// - `def_id`: def id of the fn or method item being referenced
-/// - `substs`: values for each of the fn/method's parameters
-fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
- def_id: DefId,
- substs: &'tcx Substs<'tcx>)
- -> (ValueRef, Ty<'tcx>) {
+/// - `instance`: the instance to be instantiated
+pub fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+ instance: Instance<'tcx>)
+ -> ValueRef
+{
let tcx = ccx.tcx();
- debug!("get_fn(def_id={:?}, substs={:?})", def_id, substs);
-
- assert!(!substs.needs_infer());
- assert!(!substs.has_escaping_regions());
- assert!(!substs.has_param_types());
+ debug!("get_fn(instance={:?})", instance);
- let substs = tcx.normalize_associated_type(&substs);
- let instance = Instance::new(def_id, substs);
- let fn_ty = common::def_ty(ccx.shared(), def_id, substs);
+ assert!(!instance.substs.needs_infer());
+ assert!(!instance.substs.has_escaping_regions());
+ assert!(!instance.substs.has_param_types());
+ let fn_ty = common::instance_ty(ccx.shared(), &instance);
if let Some(&llfn) = ccx.instances().borrow().get(&instance) {
- return (llfn, fn_ty);
+ return llfn;
}
let sym = ccx.symbol_map().get_or_compute(ccx.shared(),
assert_eq!(common::val_ty(llfn), llptrty);
debug!("get_fn: not casting pointer!");
- let attrs = ccx.tcx().get_attrs(def_id);
+ if common::is_inline_instance(tcx, &instance) {
+ attributes::inline(llfn, attributes::InlineAttr::Hint);
+ }
+ let attrs = instance.def.attrs(ccx.tcx());
attributes::from_fn_attrs(ccx, &attrs, llfn);
let is_local_def = ccx.shared().translation_items().borrow()
llvm::LLVMRustSetLinkage(llfn, llvm::Linkage::ExternalLinkage);
}
}
- if ccx.use_dll_storage_attrs() && ccx.sess().cstore.is_dllimport_foreign_item(def_id) {
+ if ccx.use_dll_storage_attrs() &&
+ ccx.sess().cstore.is_dllimport_foreign_item(instance.def_id())
+ {
unsafe {
llvm::LLVMSetDLLStorageClass(llfn, llvm::DLLStorageClass::DllImport);
}
ccx.instances().borrow_mut().insert(instance, llfn);
- (llfn, fn_ty)
+ llfn
+}
+
+pub fn resolve_and_get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
+ def_id: DefId,
+ substs: &'tcx Substs<'tcx>)
+ -> ValueRef
+{
+ get_fn(ccx, monomorphize::resolve(ccx.shared(), def_id, substs))
}
+++ /dev/null
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! ## The Cleanup module
-//!
-//! The cleanup module tracks what values need to be cleaned up as scopes
-//! are exited, either via panic or just normal control flow.
-//!
-//! Cleanup items can be scheduled into any of the scopes on the stack.
-//! Typically, when a scope is finished, we generate the cleanup code. This
-//! corresponds to a normal exit from a block (for example, an expression
-//! completing evaluation successfully without panic).
-
-use llvm::BasicBlockRef;
-use base;
-use mir::lvalue::LvalueRef;
-use rustc::mir::tcx::LvalueTy;
-use builder::Builder;
-use common::Funclet;
-use glue;
-use type_::Type;
-
-pub struct CleanupScope<'tcx> {
- // Cleanup to run upon scope exit.
- cleanup: Option<DropValue<'tcx>>,
-
- // Computed on creation if compiling with landing pads (!sess.no_landing_pads)
- pub landing_pad: Option<BasicBlockRef>,
-}
-
-#[derive(Copy, Clone)]
-pub struct DropValue<'tcx> {
- val: LvalueRef<'tcx>,
- skip_dtor: bool,
-}
-
-impl<'tcx> DropValue<'tcx> {
- fn trans<'a>(&self, funclet: Option<&'a Funclet>, bcx: &Builder<'a, 'tcx>) {
- glue::call_drop_glue(bcx, self.val, self.skip_dtor, funclet)
- }
-
- /// Creates a landing pad for the top scope. The landing pad will perform all cleanups necessary
- /// for an unwind and then `resume` to continue error propagation:
- ///
- /// landing_pad -> ... cleanups ... -> [resume]
- ///
- /// This should only be called once per function, as it creates an alloca for the landingpad.
- fn get_landing_pad<'a>(&self, bcx: &Builder<'a, 'tcx>) -> BasicBlockRef {
- debug!("get_landing_pad");
- let bcx = bcx.build_sibling_block("cleanup_unwind");
- let llpersonality = bcx.ccx.eh_personality();
- bcx.set_personality_fn(llpersonality);
-
- if base::wants_msvc_seh(bcx.sess()) {
- let pad = bcx.cleanup_pad(None, &[]);
- let funclet = Some(Funclet::new(pad));
- self.trans(funclet.as_ref(), &bcx);
-
- bcx.cleanup_ret(pad, None);
- } else {
- // The landing pad return type (the type being propagated). Not sure
- // what this represents but it's determined by the personality
- // function and this is what the EH proposal example uses.
- let llretty = Type::struct_(bcx.ccx, &[Type::i8p(bcx.ccx), Type::i32(bcx.ccx)], false);
-
- // The only landing pad clause will be 'cleanup'
- let llretval = bcx.landing_pad(llretty, llpersonality, 1, bcx.llfn());
-
- // The landing pad block is a cleanup
- bcx.set_cleanup(llretval);
-
- // Insert cleanup instructions into the cleanup block
- self.trans(None, &bcx);
-
- if !bcx.sess().target.target.options.custom_unwind_resume {
- bcx.resume(llretval);
- } else {
- let exc_ptr = bcx.extract_value(llretval, 0);
- bcx.call(bcx.ccx.eh_unwind_resume(), &[exc_ptr], None);
- bcx.unreachable();
- }
- }
-
- bcx.llbb()
- }
-}
-
-impl<'a, 'tcx> CleanupScope<'tcx> {
- /// Schedules a (deep) drop of `val`, which is a pointer to an instance of `ty`
- pub fn schedule_drop_mem(
- bcx: &Builder<'a, 'tcx>, val: LvalueRef<'tcx>
- ) -> CleanupScope<'tcx> {
- if let LvalueTy::Downcast { .. } = val.ty {
- bug!("Cannot drop downcast ty yet");
- }
- if !bcx.ccx.shared().type_needs_drop(val.ty.to_ty(bcx.tcx())) {
- return CleanupScope::noop();
- }
- let drop = DropValue {
- val: val,
- skip_dtor: false,
- };
-
- CleanupScope::new(bcx, drop)
- }
-
- /// Issue #23611: Schedules a (deep) drop of the contents of
- /// `val`, which is a pointer to an instance of struct/enum type
- /// `ty`. The scheduled code handles extracting the discriminant
- /// and dropping the contents associated with that variant
- /// *without* executing any associated drop implementation.
- pub fn schedule_drop_adt_contents(
- bcx: &Builder<'a, 'tcx>, val: LvalueRef<'tcx>
- ) -> CleanupScope<'tcx> {
- if let LvalueTy::Downcast { .. } = val.ty {
- bug!("Cannot drop downcast ty yet");
- }
- // `if` below could be "!contents_needs_drop"; skipping drop
- // is just an optimization, so sound to be conservative.
- if !bcx.ccx.shared().type_needs_drop(val.ty.to_ty(bcx.tcx())) {
- return CleanupScope::noop();
- }
-
- let drop = DropValue {
- val: val,
- skip_dtor: true,
- };
-
- CleanupScope::new(bcx, drop)
- }
-
- fn new(bcx: &Builder<'a, 'tcx>, drop_val: DropValue<'tcx>) -> CleanupScope<'tcx> {
- CleanupScope {
- cleanup: Some(drop_val),
- landing_pad: if !bcx.sess().no_landing_pads() {
- Some(drop_val.get_landing_pad(bcx))
- } else {
- None
- },
- }
- }
-
- pub fn noop() -> CleanupScope<'tcx> {
- CleanupScope {
- cleanup: None,
- landing_pad: None,
- }
- }
-
- pub fn trans(self, bcx: &'a Builder<'a, 'tcx>) {
- if let Some(cleanup) = self.cleanup {
- cleanup.trans(None, &bcx);
- }
- }
-}
use rustc::hir::map as hir_map;
use rustc::hir::def_id::DefId;
-use rustc::middle::lang_items::{BoxFreeFnLangItem, ExchangeMallocFnLangItem};
+use rustc::middle::lang_items::{ExchangeMallocFnLangItem};
use rustc::traits;
-use rustc::ty::subst::{Kind, Substs, Subst};
+use rustc::ty::subst::{Substs, Subst};
use rustc::ty::{self, TypeFoldable, TyCtxt};
use rustc::ty::adjustment::CustomCoerceUnsized;
use rustc::mir::{self, Location};
-use rustc::mir::visit as mir_visit;
use rustc::mir::visit::Visitor as MirVisitor;
-use syntax::abi::Abi;
-use syntax_pos::DUMMY_SP;
-use base::custom_coerce_unsize_info;
-use callee::needs_fn_once_adapter_shim;
use context::SharedCrateContext;
-use common::{def_ty, fulfill_obligation};
-use glue::{self, DropGlueKind};
+use common::{def_ty, instance_ty};
use monomorphize::{self, Instance};
use util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
use trans_item::{TransItem, DefPathBasedNames, InstantiationMode};
-use std::iter;
-
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
pub enum TransItemCollectionMode {
Eager,
let recursion_depth_reset;
match starting_point {
- TransItem::DropGlue(t) => {
- find_drop_glue_neighbors(scx, t, &mut neighbors);
- recursion_depth_reset = None;
- }
TransItem::Static(node_id) => {
let def_id = scx.tcx().hir.local_def_id(node_id);
+ let instance = Instance::mono(scx.tcx(), def_id);
// Sanity check whether this ended up being collected accidentally
- debug_assert!(should_trans_locally(scx.tcx(), def_id));
+ debug_assert!(should_trans_locally(scx.tcx(), &instance));
- let ty = def_ty(scx, def_id, Substs::empty());
- let ty = glue::get_drop_glue_type(scx, ty);
- neighbors.push(TransItem::DropGlue(DropGlueKind::Ty(ty)));
+ let ty = instance_ty(scx, &instance);
+ visit_drop_use(scx, ty, true, &mut neighbors);
recursion_depth_reset = None;
- collect_neighbours(scx, Instance::mono(scx, def_id), &mut neighbors);
+ collect_neighbours(scx, instance, &mut neighbors);
}
TransItem::Fn(instance) => {
// Sanity check whether this ended up being collected accidentally
- debug_assert!(should_trans_locally(scx.tcx(), instance.def));
+ debug_assert!(should_trans_locally(scx.tcx(), &instance));
// Keep track of the monomorphization recursion depth
recursion_depth_reset = Some(check_recursion_limit(scx.tcx(),
instance: Instance<'tcx>,
recursion_depths: &mut DefIdMap<usize>)
-> (DefId, usize) {
- let recursion_depth = recursion_depths.get(&instance.def)
- .map(|x| *x)
- .unwrap_or(0);
+ let def_id = instance.def_id();
+ let recursion_depth = recursion_depths.get(&def_id).cloned().unwrap_or(0);
debug!(" => recursion depth={}", recursion_depth);
+ let recursion_depth = if Some(def_id) == tcx.lang_items.drop_in_place_fn() {
+ // HACK: drop_in_place creates tight monomorphization loops. Give
+ // it more margin.
+ recursion_depth / 4
+ } else {
+ recursion_depth
+ };
+
// Code that needs to instantiate the same function recursively
// more than the recursion limit is assumed to be causing an
// infinite expansion.
if recursion_depth > tcx.sess.recursion_limit.get() {
let error = format!("reached the recursion limit while instantiating `{}`",
instance);
- if let Some(node_id) = tcx.hir.as_local_node_id(instance.def) {
+ if let Some(node_id) = tcx.hir.as_local_node_id(def_id) {
tcx.sess.span_fatal(tcx.hir.span(node_id), &error);
} else {
tcx.sess.fatal(&error);
}
}
- recursion_depths.insert(instance.def, recursion_depth + 1);
+ recursion_depths.insert(def_id, recursion_depth + 1);
- (instance.def, recursion_depth)
+ (def_id, recursion_depth)
}
fn check_type_length_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let instance_name = instance.to_string();
let msg = format!("reached the type-length limit while instantiating `{:.64}...`",
instance_name);
- let mut diag = if let Some(node_id) = tcx.hir.as_local_node_id(instance.def) {
+ let mut diag = if let Some(node_id) = tcx.hir.as_local_node_id(instance.def_id()) {
tcx.sess.struct_span_fatal(tcx.hir.span(node_id), &msg)
} else {
tcx.sess.struct_fatal(&msg)
self.output);
}
}
+ mir::Rvalue::Cast(mir::CastKind::ReifyFnPointer, ref operand, _) => {
+ let fn_ty = operand.ty(self.mir, self.scx.tcx());
+ let fn_ty = monomorphize::apply_param_substs(
+ self.scx,
+ self.param_substs,
+ &fn_ty);
+ visit_fn_use(self.scx, fn_ty, false, &mut self.output);
+ }
mir::Rvalue::Cast(mir::CastKind::ClosureFnPointer, ref operand, _) => {
let source_ty = operand.ty(self.mir, self.scx.tcx());
match source_ty.sty {
ty::TyClosure(def_id, substs) => {
- let closure_trans_item =
- create_fn_trans_item(self.scx,
- def_id,
- substs.substs,
- self.param_substs);
- self.output.push(closure_trans_item);
+ let instance = monomorphize::resolve_closure(
+ self.scx, def_id, substs, ty::ClosureKind::FnOnce);
+ self.output.push(create_fn_trans_item(instance));
}
_ => bug!(),
}
}
mir::Rvalue::Box(..) => {
- let exchange_malloc_fn_def_id =
- self.scx
- .tcx()
- .lang_items
- .require(ExchangeMallocFnLangItem)
- .unwrap_or_else(|e| self.scx.sess().fatal(&e));
-
- if should_trans_locally(self.scx.tcx(), exchange_malloc_fn_def_id) {
- let empty_substs = self.scx.empty_substs_for_def_id(exchange_malloc_fn_def_id);
- let exchange_malloc_fn_trans_item =
- create_fn_trans_item(self.scx,
- exchange_malloc_fn_def_id,
- empty_substs,
- self.param_substs);
-
- self.output.push(exchange_malloc_fn_trans_item);
+ let tcx = self.scx.tcx();
+ let exchange_malloc_fn_def_id = tcx
+ .lang_items
+ .require(ExchangeMallocFnLangItem)
+ .unwrap_or_else(|e| self.scx.sess().fatal(&e));
+ let instance = Instance::mono(tcx, exchange_malloc_fn_def_id);
+ if should_trans_locally(tcx, &instance) {
+ self.output.push(create_fn_trans_item(instance));
}
}
_ => { /* not interesting */ }
self.super_rvalue(rvalue, location);
}
- fn visit_lvalue(&mut self,
- lvalue: &mir::Lvalue<'tcx>,
- context: mir_visit::LvalueContext<'tcx>,
- location: Location) {
- debug!("visiting lvalue {:?}", *lvalue);
-
- if let mir_visit::LvalueContext::Drop = context {
- let ty = lvalue.ty(self.mir, self.scx.tcx())
- .to_ty(self.scx.tcx());
-
- let ty = monomorphize::apply_param_substs(self.scx,
- self.param_substs,
- &ty);
- assert!(ty.is_normalized_for_trans());
- let ty = glue::get_drop_glue_type(self.scx, ty);
- self.output.push(TransItem::DropGlue(DropGlueKind::Ty(ty)));
- }
-
- self.super_lvalue(lvalue, context, location);
- }
-
- fn visit_operand(&mut self, operand: &mir::Operand<'tcx>, location: Location) {
- debug!("visiting operand {:?}", *operand);
-
- let callee = match *operand {
- mir::Operand::Constant(ref constant) => {
- if let ty::TyFnDef(def_id, substs, _) = constant.ty.sty {
- // This is something that can act as a callee, proceed
- Some((def_id, substs))
- } else {
- // This is not a callee, but we still have to look for
- // references to `const` items
- if let mir::Literal::Item { def_id, substs } = constant.literal {
- let substs = monomorphize::apply_param_substs(self.scx,
- self.param_substs,
- &substs);
-
- let instance = Instance::new(def_id, substs).resolve_const(self.scx);
- collect_neighbours(self.scx, instance, self.output);
- }
-
- None
- }
- }
- _ => None
- };
+ fn visit_constant(&mut self, constant: &mir::Constant<'tcx>, location: Location) {
+ debug!("visiting constant {:?} @ {:?}", *constant, location);
- if let Some((callee_def_id, callee_substs)) = callee {
- debug!(" => operand is callable");
-
- // `callee_def_id` might refer to a trait method instead of a
- // concrete implementation, so we have to find the actual
- // implementation. For example, the call might look like
- //
- // std::cmp::partial_cmp(0i32, 1i32)
- //
- // Calling do_static_dispatch() here will map the def_id of
- // `std::cmp::partial_cmp` to the def_id of `i32::partial_cmp<i32>`
- let dispatched = do_static_dispatch(self.scx,
- callee_def_id,
- callee_substs,
- self.param_substs);
-
- if let StaticDispatchResult::Dispatched {
- def_id: callee_def_id,
- substs: callee_substs,
- fn_once_adjustment,
- } = dispatched {
- // if we have a concrete impl (which we might not have
- // in the case of something compiler generated like an
- // object shim or a closure that is handled differently),
- // we check if the callee is something that will actually
- // result in a translation item ...
- if can_result_in_trans_item(self.scx.tcx(), callee_def_id) {
- // ... and create one if it does.
- let trans_item = create_fn_trans_item(self.scx,
- callee_def_id,
- callee_substs,
- self.param_substs);
- self.output.push(trans_item);
-
- // This call will instantiate an FnOnce adapter, which drops
- // the closure environment. Therefore we need to make sure
- // that we collect the drop-glue for the environment type.
- if let Some(env_ty) = fn_once_adjustment {
- let env_ty = glue::get_drop_glue_type(self.scx, env_ty);
- if self.scx.type_needs_drop(env_ty) {
- let dg = DropGlueKind::Ty(env_ty);
- self.output.push(TransItem::DropGlue(dg));
- }
- }
- }
- }
+ if let ty::TyFnDef(..) = constant.ty.sty {
+ // function definitions are zero-sized, and only generate
+ // IR when they are called/reified.
+ self.super_constant(constant, location);
+ return
}
- self.super_operand(operand, location);
-
- fn can_result_in_trans_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> bool {
- match tcx.item_type(def_id).sty {
- ty::TyFnDef(def_id, _, _) => {
- // Some constructors also have type TyFnDef but they are
- // always instantiated inline and don't result in a
- // translation item. Same for FFI functions.
- if let Some(hir_map::NodeForeignItem(_)) = tcx.hir.get_if_local(def_id) {
- return false;
- }
- }
- ty::TyClosure(..) => {}
- _ => return false
- }
-
- should_trans_locally(tcx, def_id)
+ if let mir::Literal::Item { def_id, substs } = constant.literal {
+ let substs = monomorphize::apply_param_substs(self.scx,
+ self.param_substs,
+ &substs);
+ let instance = monomorphize::resolve(self.scx, def_id, substs);
+ collect_neighbours(self.scx, instance, self.output);
}
+
+ self.super_constant(constant, location);
}
- // This takes care of the "drop_in_place" intrinsic for which we otherwise
- // we would not register drop-glues.
fn visit_terminator_kind(&mut self,
block: mir::BasicBlock,
kind: &mir::TerminatorKind<'tcx>,
location: Location) {
let tcx = self.scx.tcx();
match *kind {
- mir::TerminatorKind::Call {
- func: mir::Operand::Constant(ref constant),
- ref args,
- ..
- } => {
- match constant.ty.sty {
- ty::TyFnDef(def_id, _, bare_fn_ty)
- if is_drop_in_place_intrinsic(tcx, def_id, bare_fn_ty) => {
- let operand_ty = args[0].ty(self.mir, tcx);
- if let ty::TyRawPtr(mt) = operand_ty.sty {
- let operand_ty = monomorphize::apply_param_substs(self.scx,
- self.param_substs,
- &mt.ty);
- let ty = glue::get_drop_glue_type(self.scx, operand_ty);
- self.output.push(TransItem::DropGlue(DropGlueKind::Ty(ty)));
- } else {
- bug!("Has the drop_in_place() intrinsic's signature changed?")
- }
- }
- _ => { /* Nothing to do. */ }
- }
+ mir::TerminatorKind::Call { ref func, .. } => {
+ let callee_ty = func.ty(self.mir, tcx);
+ let callee_ty = monomorphize::apply_param_substs(
+ self.scx, self.param_substs, &callee_ty);
+ visit_fn_use(self.scx, callee_ty, true, &mut self.output);
+ }
+ mir::TerminatorKind::Drop { ref location, .. } |
+ mir::TerminatorKind::DropAndReplace { ref location, .. } => {
+ let ty = location.ty(self.mir, self.scx.tcx())
+ .to_ty(self.scx.tcx());
+ let ty = monomorphize::apply_param_substs(self.scx,
+ self.param_substs,
+ &ty);
+ visit_drop_use(self.scx, ty, true, self.output);
}
- _ => { /* Nothing to do. */ }
+ mir::TerminatorKind::Goto { .. } |
+ mir::TerminatorKind::SwitchInt { .. } |
+ mir::TerminatorKind::Resume |
+ mir::TerminatorKind::Return |
+ mir::TerminatorKind::Unreachable |
+ mir::TerminatorKind::Assert { .. } => {}
}
self.super_terminator_kind(block, kind, location);
-
- fn is_drop_in_place_intrinsic<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
- bare_fn_ty: ty::PolyFnSig<'tcx>)
- -> bool {
- (bare_fn_ty.abi() == Abi::RustIntrinsic ||
- bare_fn_ty.abi() == Abi::PlatformIntrinsic) &&
- tcx.item_name(def_id) == "drop_in_place"
- }
}
}
-// Returns true if we should translate an instance in the local crate.
-// Returns false if we can just link to the upstream crate and therefore don't
-// need a translation item.
-fn should_trans_locally<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> bool {
- if let ty::TyFnDef(_, _, sig) = tcx.item_type(def_id).sty {
- if let Some(adt_def) = sig.output().skip_binder().ty_adt_def() {
- if adt_def.variants.iter().any(|v| def_id == v.did) {
- // HACK: ADT constructors are translated in-place and
- // do not have a trans-item.
- return false;
- }
- }
- }
-
- if def_id.is_local() {
- true
- } else {
- if tcx.sess.cstore.is_exported_symbol(def_id) ||
- tcx.sess.cstore.is_foreign_item(def_id) {
- // We can link to the item in question, no instance needed in this
- // crate
- false
- } else {
- if !tcx.sess.cstore.is_item_mir_available(def_id) {
- bug!("Cannot create local trans-item for {:?}", def_id)
- }
- true
- }
- }
+fn visit_drop_use<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+ ty: ty::Ty<'tcx>,
+ is_direct_call: bool,
+ output: &mut Vec<TransItem<'tcx>>)
+{
+ let instance = monomorphize::resolve_drop_in_place(scx, ty);
+ visit_instance_use(scx, instance, is_direct_call, output);
}
-fn find_drop_glue_neighbors<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
- dg: DropGlueKind<'tcx>,
- output: &mut Vec<TransItem<'tcx>>) {
- let ty = match dg {
- DropGlueKind::Ty(ty) => ty,
- DropGlueKind::TyContents(_) => {
- // We already collected the neighbors of this item via the
- // DropGlueKind::Ty variant.
- return
- }
- };
-
- debug!("find_drop_glue_neighbors: {}", type_to_string(scx.tcx(), ty));
-
- // Make sure the BoxFreeFn lang-item gets translated if there is a boxed value.
- if ty.is_box() {
- let def_id = scx.tcx().require_lang_item(BoxFreeFnLangItem);
- if should_trans_locally(scx.tcx(), def_id) {
- let box_free_fn_trans_item =
- create_fn_trans_item(scx,
- def_id,
- scx.tcx().mk_substs(iter::once(Kind::from(ty.boxed_ty()))),
- scx.tcx().intern_substs(&[]));
- output.push(box_free_fn_trans_item);
- }
+fn visit_fn_use<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+ ty: ty::Ty<'tcx>,
+ is_direct_call: bool,
+ output: &mut Vec<TransItem<'tcx>>)
+{
+ if let ty::TyFnDef(def_id, substs, _) = ty.sty {
+ let instance = monomorphize::resolve(scx, def_id, substs);
+ visit_instance_use(scx, instance, is_direct_call, output);
}
+}
- // If the type implements Drop, also add a translation item for the
- // monomorphized Drop::drop() implementation.
- let destructor = match ty.sty {
- ty::TyAdt(def, _) => def.destructor(scx.tcx()),
- _ => None
- };
-
- if let (Some(destructor), false) = (destructor, ty.is_box()) {
- use rustc::ty::ToPolyTraitRef;
-
- let drop_trait_def_id = scx.tcx()
- .lang_items
- .drop_trait()
- .unwrap();
-
- let self_type_substs = scx.tcx().mk_substs_trait(ty, &[]);
-
- let trait_ref = ty::TraitRef {
- def_id: drop_trait_def_id,
- substs: self_type_substs,
- }.to_poly_trait_ref();
-
- let substs = match fulfill_obligation(scx, DUMMY_SP, trait_ref) {
- traits::VtableImpl(data) => data.substs,
- _ => bug!()
- };
-
- if should_trans_locally(scx.tcx(), destructor.did) {
- let trans_item = create_fn_trans_item(scx,
- destructor.did,
- substs,
- scx.tcx().intern_substs(&[]));
- output.push(trans_item);
- }
-
- // This type has a Drop implementation, we'll need the contents-only
- // version of the glue too.
- output.push(TransItem::DropGlue(DropGlueKind::TyContents(ty)));
+fn visit_instance_use<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+ instance: ty::Instance<'tcx>,
+ is_direct_call: bool,
+ output: &mut Vec<TransItem<'tcx>>)
+{
+ debug!("visit_item_use({:?}, is_direct_call={:?})", instance, is_direct_call);
+ if !should_trans_locally(scx.tcx(), &instance) {
+ return
}
- // Finally add the types of nested values
- match ty.sty {
- ty::TyBool |
- ty::TyChar |
- ty::TyInt(_) |
- ty::TyUint(_) |
- ty::TyStr |
- ty::TyFloat(_) |
- ty::TyRawPtr(_) |
- ty::TyRef(..) |
- ty::TyFnDef(..) |
- ty::TyFnPtr(_) |
- ty::TyNever |
- ty::TyDynamic(..) => {
- /* nothing to do */
- }
- ty::TyAdt(def, _) if def.is_box() => {
- let inner_type = glue::get_drop_glue_type(scx, ty.boxed_ty());
- if scx.type_needs_drop(inner_type) {
- output.push(TransItem::DropGlue(DropGlueKind::Ty(inner_type)));
- }
- }
- ty::TyAdt(def, substs) => {
- for field in def.all_fields() {
- let field_type = def_ty(scx, field.did, substs);
- let field_type = glue::get_drop_glue_type(scx, field_type);
-
- if scx.type_needs_drop(field_type) {
- output.push(TransItem::DropGlue(DropGlueKind::Ty(field_type)));
- }
+ match instance.def {
+ ty::InstanceDef::Intrinsic(def_id) => {
+ if !is_direct_call {
+ bug!("intrinsic {:?} being reified", def_id);
}
}
- ty::TyClosure(def_id, substs) => {
- for upvar_ty in substs.upvar_tys(def_id, scx.tcx()) {
- let upvar_ty = glue::get_drop_glue_type(scx, upvar_ty);
- if scx.type_needs_drop(upvar_ty) {
- output.push(TransItem::DropGlue(DropGlueKind::Ty(upvar_ty)));
- }
- }
- }
- ty::TySlice(inner_type) |
- ty::TyArray(inner_type, _) => {
- let inner_type = glue::get_drop_glue_type(scx, inner_type);
- if scx.type_needs_drop(inner_type) {
- output.push(TransItem::DropGlue(DropGlueKind::Ty(inner_type)));
+ ty::InstanceDef::Virtual(..) |
+ ty::InstanceDef::DropGlue(_, None) => {
+ // don't need to emit shim if we are calling directly.
+ if !is_direct_call {
+ output.push(create_fn_trans_item(instance));
}
}
- ty::TyTuple(args, _) => {
- for arg in args {
- let arg = glue::get_drop_glue_type(scx, arg);
- if scx.type_needs_drop(arg) {
- output.push(TransItem::DropGlue(DropGlueKind::Ty(arg)));
+ ty::InstanceDef::DropGlue(_, Some(ty)) => {
+ match ty.sty {
+ ty::TyArray(ety, _) |
+ ty::TySlice(ety)
+ if is_direct_call =>
+ {
+ // drop of arrays/slices is translated in-line.
+ visit_drop_use(scx, ety, false, output);
}
- }
- }
- ty::TyProjection(_) |
- ty::TyParam(_) |
- ty::TyInfer(_) |
- ty::TyAnon(..) |
- ty::TyError => {
- bug!("encountered unexpected type");
+ _ => {}
+ };
+ output.push(create_fn_trans_item(instance));
}
- }
-}
-
-fn do_static_dispatch<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
- fn_def_id: DefId,
- fn_substs: &'tcx Substs<'tcx>,
- param_substs: &'tcx Substs<'tcx>)
- -> StaticDispatchResult<'tcx> {
- debug!("do_static_dispatch(fn_def_id={}, fn_substs={:?}, param_substs={:?})",
- def_id_to_string(scx.tcx(), fn_def_id),
- fn_substs,
- param_substs);
-
- if let Some(trait_def_id) = scx.tcx().trait_of_item(fn_def_id) {
- debug!(" => trait method, attempting to find impl");
- do_static_trait_method_dispatch(scx,
- &scx.tcx().associated_item(fn_def_id),
- trait_def_id,
- fn_substs,
- param_substs)
- } else {
- debug!(" => regular function");
- // The function is not part of an impl or trait, no dispatching
- // to be done
- StaticDispatchResult::Dispatched {
- def_id: fn_def_id,
- substs: fn_substs,
- fn_once_adjustment: None,
+ ty::InstanceDef::ClosureOnceShim { .. } |
+ ty::InstanceDef::Item(..) |
+ ty::InstanceDef::FnPtrShim(..) => {
+ output.push(create_fn_trans_item(instance));
}
}
}
-enum StaticDispatchResult<'tcx> {
- // The call could be resolved statically as going to the method with
- // `def_id` and `substs`.
- Dispatched {
- def_id: DefId,
- substs: &'tcx Substs<'tcx>,
-
- // If this is a call to a closure that needs an FnOnce adjustment,
- // this contains the new self type of the call (= type of the closure
- // environment)
- fn_once_adjustment: Option<ty::Ty<'tcx>>,
- },
- // This goes to somewhere that we don't know at compile-time
- Unknown
-}
-
-// Given a trait-method and substitution information, find out the actual
-// implementation of the trait method.
-fn do_static_trait_method_dispatch<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
- trait_method: &ty::AssociatedItem,
- trait_id: DefId,
- callee_substs: &'tcx Substs<'tcx>,
- param_substs: &'tcx Substs<'tcx>)
- -> StaticDispatchResult<'tcx> {
- let tcx = scx.tcx();
- debug!("do_static_trait_method_dispatch(trait_method={}, \
- trait_id={}, \
- callee_substs={:?}, \
- param_substs={:?}",
- def_id_to_string(scx.tcx(), trait_method.def_id),
- def_id_to_string(scx.tcx(), trait_id),
- callee_substs,
- param_substs);
-
- let rcvr_substs = monomorphize::apply_param_substs(scx,
- param_substs,
- &callee_substs);
- let trait_ref = ty::TraitRef::from_method(tcx, trait_id, rcvr_substs);
- let vtbl = fulfill_obligation(scx, DUMMY_SP, ty::Binder(trait_ref));
-
- // Now that we know which impl is being used, we can dispatch to
- // the actual function:
- match vtbl {
- traits::VtableImpl(impl_data) => {
- let (def_id, substs) = traits::find_method(tcx,
- trait_method.name,
- rcvr_substs,
- &impl_data);
- StaticDispatchResult::Dispatched {
- def_id: def_id,
- substs: substs,
- fn_once_adjustment: None,
- }
- }
- traits::VtableClosure(closure_data) => {
- let closure_def_id = closure_data.closure_def_id;
- let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap();
- let actual_closure_kind = tcx.closure_kind(closure_def_id);
-
- let needs_fn_once_adapter_shim =
- match needs_fn_once_adapter_shim(actual_closure_kind,
- trait_closure_kind) {
- Ok(true) => true,
- _ => false,
- };
-
- let fn_once_adjustment = if needs_fn_once_adapter_shim {
- Some(tcx.mk_closure_from_closure_substs(closure_def_id,
- closure_data.substs))
- } else {
- None
- };
-
- StaticDispatchResult::Dispatched {
- def_id: closure_def_id,
- substs: closure_data.substs.substs,
- fn_once_adjustment: fn_once_adjustment,
- }
- }
- traits::VtableFnPointer(ref data) => {
- // If we know the destination of this fn-pointer, we'll have to make
- // sure that this destination actually gets instantiated.
- if let ty::TyFnDef(def_id, substs, _) = data.fn_ty.sty {
- // The destination of the pointer might be something that needs
- // further dispatching, such as a trait method, so we do that.
- do_static_dispatch(scx, def_id, substs, param_substs)
+// Returns true if we should translate an instance in the local crate.
+// Returns false if we can just link to the upstream crate and therefore don't
+// need a translation item.
+fn should_trans_locally<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, instance: &Instance<'tcx>)
+ -> bool {
+ let def_id = match instance.def {
+ ty::InstanceDef::Item(def_id) => def_id,
+ ty::InstanceDef::ClosureOnceShim { .. } |
+ ty::InstanceDef::Virtual(..) |
+ ty::InstanceDef::FnPtrShim(..) |
+ ty::InstanceDef::DropGlue(..) |
+ ty::InstanceDef::Intrinsic(_) => return true
+ };
+ match tcx.hir.get_if_local(def_id) {
+ Some(hir_map::NodeForeignItem(..)) => {
+ false // foreign items are linked against, not translated.
+ }
+ Some(_) => true,
+ None => {
+ if tcx.sess.cstore.is_exported_symbol(def_id) ||
+ tcx.sess.cstore.is_foreign_item(def_id)
+ {
+ // We can link to the item in question, no instance needed
+ // in this crate
+ false
} else {
- StaticDispatchResult::Unknown
+ if !tcx.sess.cstore.is_item_mir_available(def_id) {
+ bug!("Cannot create local trans-item for {:?}", def_id)
+ }
+ true
}
}
- // Trait object shims are always instantiated in-place, and as they are
- // just an ABI-adjusting indirect call they do not have any dependencies.
- traits::VtableObject(..) => {
- StaticDispatchResult::Unknown
- }
- _ => {
- bug!("static call to invalid vtable: {:?}", vtbl)
- }
}
}
&ty::TyAdt(target_adt_def, target_substs)) => {
assert_eq!(source_adt_def, target_adt_def);
- let kind = custom_coerce_unsize_info(scx, source_ty, target_ty);
+ let kind =
+ monomorphize::custom_coerce_unsize_info(scx, source_ty, target_ty);
let coerce_index = match kind {
CustomCoerceUnsized::Struct(i) => i
}
}
-fn create_fn_trans_item<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
- def_id: DefId,
- fn_substs: &'tcx Substs<'tcx>,
- param_substs: &'tcx Substs<'tcx>)
- -> TransItem<'tcx> {
- let tcx = scx.tcx();
-
- debug!("create_fn_trans_item(def_id={}, fn_substs={:?}, param_substs={:?})",
- def_id_to_string(tcx, def_id),
- fn_substs,
- param_substs);
-
- // We only get here, if fn_def_id either designates a local item or
- // an inlineable external item. Non-inlineable external items are
- // ignored because we don't want to generate any code for them.
- let concrete_substs = monomorphize::apply_param_substs(scx,
- param_substs,
- &fn_substs);
- assert!(concrete_substs.is_normalized_for_trans(),
- "concrete_substs not normalized for trans: {:?}",
- concrete_substs);
- TransItem::Fn(Instance::new(def_id, concrete_substs))
+fn create_fn_trans_item<'a, 'tcx>(instance: Instance<'tcx>) -> TransItem<'tcx> {
+ debug!("create_fn_trans_item(instance={})", instance);
+ TransItem::Fn(instance)
}
/// Creates a `TransItem` for each method that is referenced by the vtable for
if let ty::TyDynamic(ref trait_ty, ..) = trait_ty.sty {
if let Some(principal) = trait_ty.principal() {
let poly_trait_ref = principal.with_self_ty(scx.tcx(), impl_ty);
- let param_substs = scx.tcx().intern_substs(&[]);
-
assert!(!poly_trait_ref.has_escaping_regions());
// Walk all methods of the trait, including those of its supertraits
let methods = traits::get_vtable_methods(scx.tcx(), poly_trait_ref);
let methods = methods.filter_map(|method| method)
- .filter_map(|(def_id, substs)| {
- if let StaticDispatchResult::Dispatched {
- def_id,
- substs,
- // We already add the drop-glue for the closure env
- // unconditionally below.
- fn_once_adjustment: _ ,
- } = do_static_dispatch(scx, def_id, substs, param_substs) {
- Some((def_id, substs))
- } else {
- None
- }
- })
- .filter(|&(def_id, _)| should_trans_locally(scx.tcx(), def_id))
- .map(|(def_id, substs)| create_fn_trans_item(scx, def_id, substs, param_substs));
+ .map(|(def_id, substs)| monomorphize::resolve(scx, def_id, substs))
+ .filter(|&instance| should_trans_locally(scx.tcx(), &instance))
+ .map(|instance| create_fn_trans_item(instance));
output.extend(methods);
}
// Also add the destructor
- let dg_type = glue::get_drop_glue_type(scx, impl_ty);
- output.push(TransItem::DropGlue(DropGlueKind::Ty(dg_type)));
+ visit_drop_use(scx, impl_ty, false, output);
}
}
def_id_to_string(self.scx.tcx(), def_id));
let ty = def_ty(self.scx, def_id, Substs::empty());
- let ty = glue::get_drop_glue_type(self.scx, ty);
- self.output.push(TransItem::DropGlue(DropGlueKind::Ty(ty)));
+ visit_drop_use(self.scx, ty, true, self.output);
}
}
}
debug!("RootCollector: ItemFn({})",
def_id_to_string(self.scx.tcx(), def_id));
- let instance = Instance::mono(self.scx, def_id);
+ let instance = Instance::mono(self.scx.tcx(), def_id);
self.output.push(TransItem::Fn(instance));
}
}
debug!("RootCollector: MethodImplItem({})",
def_id_to_string(self.scx.tcx(), def_id));
- let instance = Instance::mono(self.scx, def_id);
+ let instance = Instance::mono(self.scx.tcx(), def_id);
self.output.push(TransItem::Fn(instance));
}
}
continue;
}
- // The substitutions we have are on the impl, so we grab
- // the method type from the impl to substitute into.
- let impl_substs = Substs::for_item(tcx, impl_def_id,
- |_, _| tcx.mk_region(ty::ReErased),
- |_, _| tcx.types.err);
- let impl_data = traits::VtableImplData {
- impl_def_id: impl_def_id,
- substs: impl_substs,
- nested: vec![]
- };
- let (def_id, substs) = traits::find_method(tcx,
- method.name,
- callee_substs,
- &impl_data);
-
- let predicates = tcx.item_predicates(def_id).predicates
- .subst(tcx, substs);
+ let instance =
+ monomorphize::resolve(scx, method.def_id, callee_substs);
+
+ let predicates = tcx.item_predicates(instance.def_id()).predicates
+ .subst(tcx, instance.substs);
if !traits::normalize_and_test_predicates(tcx, predicates) {
continue;
}
- if should_trans_locally(tcx, method.def_id) {
- let item = create_fn_trans_item(scx,
- method.def_id,
- callee_substs,
- tcx.erase_regions(&substs));
- output.push(item);
+ if should_trans_locally(tcx, &instance) {
+ output.push(create_fn_trans_item(instance));
}
}
}
instance: Instance<'tcx>,
output: &mut Vec<TransItem<'tcx>>)
{
- let mir = scx.tcx().item_mir(instance.def);
+ let mir = scx.tcx().instance_mir(instance.def);
let mut visitor = MirNeighborCollector {
scx: scx,
printer.push_def_path(def_id, &mut output);
output
}
-
-fn type_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- ty: ty::Ty<'tcx>)
- -> String {
- let mut output = String::new();
- let printer = DefPathBasedNames::new(tcx, false, false);
- printer.push_type_name(ty, &mut output);
- output
-}
use llvm::{True, False, Bool, OperandBundleDef};
use rustc::hir::def_id::DefId;
use rustc::hir::map::DefPathData;
-use rustc::util::common::MemoizationMap;
use middle::lang_items::LangItem;
use base;
use builder::Builder;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::layout::Layout;
use rustc::ty::subst::{Subst, Substs};
-use rustc::traits::{self, SelectionContext, Reveal};
use rustc::hir;
use libc::{c_uint, c_char};
use std::iter;
-use syntax::ast;
+use syntax::attr;
use syntax::symbol::InternedString;
use syntax_pos::Span;
}
}
-/// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
-/// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
-/// guarantee to us that all nested obligations *could be* resolved if we wanted to.
-pub fn fulfill_obligation<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
- span: Span,
- trait_ref: ty::PolyTraitRef<'tcx>)
- -> traits::Vtable<'tcx, ()>
-{
- let tcx = scx.tcx();
-
- // Remove any references to regions; this helps improve caching.
- let trait_ref = tcx.erase_regions(&trait_ref);
-
- scx.trait_cache().memoize(trait_ref, || {
- debug!("trans::fulfill_obligation(trait_ref={:?}, def_id={:?})",
- trait_ref, trait_ref.def_id());
-
- // Do the initial selection for the obligation. This yields the
- // shallow result we are looking for -- that is, what specific impl.
- tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
- let mut selcx = SelectionContext::new(&infcx);
-
- let obligation_cause = traits::ObligationCause::misc(span,
- ast::DUMMY_NODE_ID);
- let obligation = traits::Obligation::new(obligation_cause,
- trait_ref.to_poly_trait_predicate());
-
- let selection = match selcx.select(&obligation) {
- Ok(Some(selection)) => selection,
- Ok(None) => {
- // Ambiguity can happen when monomorphizing during trans
- // expands to some humongo type that never occurred
- // statically -- this humongo type can then overflow,
- // leading to an ambiguous result. So report this as an
- // overflow bug, since I believe this is the only case
- // where ambiguity can result.
- debug!("Encountered ambiguity selecting `{:?}` during trans, \
- presuming due to overflow",
- trait_ref);
- tcx.sess.span_fatal(span,
- "reached the recursion limit during monomorphization \
- (selection ambiguity)");
- }
- Err(e) => {
- span_bug!(span, "Encountered error `{:?}` selecting `{:?}` during trans",
- e, trait_ref)
- }
- };
-
- debug!("fulfill_obligation: selection={:?}", selection);
-
- // Currently, we use a fulfillment context to completely resolve
- // all nested obligations. This is because they can inform the
- // inference of the impl's type parameters.
- let mut fulfill_cx = traits::FulfillmentContext::new();
- let vtable = selection.map(|predicate| {
- debug!("fulfill_obligation: register_predicate_obligation {:?}", predicate);
- fulfill_cx.register_predicate_obligation(&infcx, predicate);
- });
- let vtable = infcx.drain_fulfillment_cx_or_panic(span, &mut fulfill_cx, &vtable);
-
- info!("Cache miss: {:?} => {:?}", trait_ref, vtable);
- vtable
- })
- })
-}
-
pub fn langcall(tcx: TyCtxt,
span: Option<Span>,
msg: &str,
}
}
-pub fn is_closure(tcx: TyCtxt, def_id: DefId) -> bool {
- tcx.def_key(def_id).disambiguated_data.data == DefPathData::ClosureExpr
+pub fn requests_inline<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ instance: &ty::Instance<'tcx>
+) -> bool {
+ if is_inline_instance(tcx, instance) {
+ return true
+ }
+ attr::requests_inline(&instance.def.attrs(tcx)[..])
+}
+
+pub fn is_inline_instance<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ instance: &ty::Instance<'tcx>
+) -> bool {
+ let def_id = match instance.def {
+ ty::InstanceDef::Item(def_id) => def_id,
+ ty::InstanceDef::DropGlue(_, Some(_)) => return false,
+ _ => return true
+ };
+ match tcx.def_key(def_id).disambiguated_data.data {
+ DefPathData::StructCtor |
+ DefPathData::EnumVariant(..) |
+ DefPathData::ClosureExpr => true,
+ _ => false
+ }
}
/// Given a DefId and some Substs, produces the monomorphic item type.
let ty = shared.tcx().item_type(def_id);
monomorphize::apply_param_substs(shared, substs, &ty)
}
+
+/// Return the substituted type of an instance.
+pub fn instance_ty<'a, 'tcx>(shared: &SharedCrateContext<'a, 'tcx>,
+ instance: &ty::Instance<'tcx>)
+ -> Ty<'tcx>
+{
+ let ty = instance.def.def_ty(shared.tcx());
+ monomorphize::apply_param_substs(shared, instance.substs, &ty)
+}
// except according to those terms.
+use back::symbol_names;
use llvm;
use llvm::{SetUnnamedAddr};
use llvm::{ValueRef, True};
use type_::Type;
use type_of;
use rustc::ty;
-use rustc::ty::subst::Substs;
use rustc::hir;
}
pub fn get_static(ccx: &CrateContext, def_id: DefId) -> ValueRef {
- let instance = Instance::mono(ccx.shared(), def_id);
+ let instance = Instance::mono(ccx.tcx(), def_id);
if let Some(&g) = ccx.instances().borrow().get(&instance) {
return g;
}
- let ty = common::def_ty(ccx.shared(), def_id, Substs::empty());
+ let ty = common::instance_ty(ccx.shared(), &instance);
let g = if let Some(id) = ccx.tcx().hir.as_local_node_id(def_id) {
let llty = type_of::type_of(ccx, ty);
hir_map::NodeForeignItem(&hir::ForeignItem {
ref attrs, span, node: hir::ForeignItemStatic(..), ..
}) => {
- let sym = instance.symbol_name(ccx.shared());
+ let sym = symbol_names::symbol_name(instance, ccx.shared());
let g = if let Some(name) =
attr::first_attr_value_str_by_name(&attrs, "linkage") {
// If this is a static with a linkage specified, then we need to handle
g
} else {
- let sym = instance.symbol_name(ccx.shared());
+ let sym = symbol_names::symbol_name(instance, ccx.shared());
// FIXME(nagisa): perhaps the map of externs could be offloaded to llvm somehow?
// FIXME(nagisa): investigate whether it can be changed into define_global
v
};
- let ty = common::def_ty(ccx.shared(), def_id, Substs::empty());
+ let instance = Instance::mono(ccx.tcx(), def_id);
+ let ty = common::instance_ty(ccx.shared(), &instance);
let llty = type_of::type_of(ccx, ty);
let g = if val_llty == llty {
g
use rustc::hir::def_id::DefId;
use rustc::traits;
use debuginfo;
-use callee::Callee;
+use callee;
use base;
use declare;
-use glue::DropGlueKind;
use monomorphize::Instance;
use partitioning::CodegenUnit;
use syntax::ast;
use syntax::symbol::InternedString;
use syntax_pos::DUMMY_SP;
-use abi::{Abi, FnType};
+use abi::Abi;
pub struct Stats {
pub n_glues_created: Cell<usize>,
previous_work_product: Option<WorkProduct>,
codegen_unit: CodegenUnit<'tcx>,
needs_unwind_cleanup_cache: RefCell<FxHashMap<Ty<'tcx>, bool>>,
- fn_pointer_shims: RefCell<FxHashMap<Ty<'tcx>, ValueRef>>,
- drop_glues: RefCell<FxHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>>,
/// Cache instances of monomorphic and polymorphic items
instances: RefCell<FxHashMap<Instance<'tcx>, ValueRef>>,
/// Cache generated vtables
&self.translation_items
}
- /// Given the def-id of some item that has no type parameters, make
- /// a suitable "empty substs" for it.
- pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> {
- Substs::for_item(self.tcx(), item_def_id,
- |_, _| self.tcx().mk_region(ty::ReErased),
- |_, _| {
- bug!("empty_substs_for_def_id: {:?} has type parameters", item_def_id)
- })
- }
-
pub fn metadata_symbol_name(&self) -> String {
format!("rust_metadata_{}_{}",
self.link_meta().crate_name,
previous_work_product: previous_work_product,
codegen_unit: codegen_unit,
needs_unwind_cleanup_cache: RefCell::new(FxHashMap()),
- fn_pointer_shims: RefCell::new(FxHashMap()),
- drop_glues: RefCell::new(FxHashMap()),
instances: RefCell::new(FxHashMap()),
vtables: RefCell::new(FxHashMap()),
const_cstr_cache: RefCell::new(FxHashMap()),
&self.local().needs_unwind_cleanup_cache
}
- pub fn fn_pointer_shims(&self) -> &RefCell<FxHashMap<Ty<'tcx>, ValueRef>> {
- &self.local().fn_pointer_shims
- }
-
- pub fn drop_glues<'a>(&'a self)
- -> &'a RefCell<FxHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>> {
- &self.local().drop_glues
- }
-
pub fn instances<'a>(&'a self) -> &'a RefCell<FxHashMap<Instance<'tcx>, ValueRef>> {
&self.local().instances
}
/// Given the def-id of some item that has no type parameters, make
/// a suitable "empty substs" for it.
pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> {
- self.shared().empty_substs_for_def_id(item_def_id)
+ self.tcx().empty_substs_for_def_id(item_def_id)
}
/// Generate a new symbol name with the given prefix. This symbol name must
let tcx = self.tcx();
let llfn = match tcx.lang_items.eh_personality() {
Some(def_id) if !base::wants_msvc_seh(self.sess()) => {
- Callee::def(self, def_id, tcx.intern_substs(&[])).reify(self)
+ callee::resolve_and_get_fn(self, def_id, tcx.intern_substs(&[]))
}
_ => {
let name = if base::wants_msvc_seh(self.sess()) {
let tcx = self.tcx();
assert!(self.sess().target.target.options.custom_unwind_resume);
if let Some(def_id) = tcx.lang_items.eh_unwind_resume() {
- let llfn = Callee::def(self, def_id, tcx.intern_substs(&[])).reify(self);
+ let llfn = callee::resolve_and_get_fn(self, def_id, tcx.intern_substs(&[]));
unwresume.set(Some(llfn));
return llfn;
}
return FunctionDebugContext::DebugInfoDisabled;
}
- for attr in cx.tcx().get_attrs(instance.def).iter() {
+ for attr in instance.def.attrs(cx.tcx()).iter() {
if attr.check_name("no_debug") {
return FunctionDebugContext::FunctionWithoutDebugInfo;
}
};
// Find the enclosing function, in case this is a closure.
- let def_key = cx.tcx().def_key(instance.def);
+ let def_key = cx.tcx().def_key(instance.def_id());
let mut name = def_key.disambiguated_data.data.to_string();
let name_len = name.len();
- let fn_def_id = cx.tcx().closure_base_def_id(instance.def);
+ let fn_def_id = cx.tcx().closure_base_def_id(instance.def_id());
// Get_template_parameters() will append a `<...>` clause to the function
// name if necessary.
&mut name);
// Build the linkage_name out of the item path and "template" parameters.
- let linkage_name = mangled_name_of_item(cx, instance.def, &name[name_len..]);
+ let linkage_name = mangled_name_of_item(cx, instance.def_id(), &name[name_len..]);
let scope_line = span_start(cx, span).line;
- let local_id = cx.tcx().hir.as_local_node_id(instance.def);
+ let local_id = cx.tcx().hir.as_local_node_id(instance.def_id());
let is_local_to_unit = local_id.map_or(false, |id| is_node_local_to_unit(cx, id));
let function_name = CString::new(name).unwrap();
// First, let's see if this is a method within an inherent impl. Because
// if yes, we want to make the result subroutine DIE a child of the
// subroutine's self-type.
- let self_type = cx.tcx().impl_of_method(instance.def).and_then(|impl_def_id| {
+ let self_type = cx.tcx().impl_of_method(instance.def_id()).and_then(|impl_def_id| {
// If the method does *not* belong to a trait, proceed
if cx.tcx().trait_id_of_impl(impl_def_id).is_none() {
let impl_self_ty =
self_type.unwrap_or_else(|| {
namespace::item_namespace(cx, DefId {
- krate: instance.def.krate,
+ krate: instance.def_id().krate,
index: cx.tcx()
- .def_key(instance.def)
+ .def_key(instance.def_id())
.parent
.expect("get_containing_scope: missing parent?")
})
// Code relating to drop glue.
use std;
-use std::iter;
use llvm;
-use llvm::{ValueRef, get_param};
-use middle::lang_items::BoxFreeFnLangItem;
-use rustc::ty::subst::{Substs};
+use llvm::{ValueRef};
use rustc::traits;
-use rustc::ty::{self, layout, AdtDef, AdtKind, Ty, TypeFoldable};
-use rustc::ty::subst::Kind;
-use rustc::mir::tcx::LvalueTy;
-use mir::lvalue::LvalueRef;
-use adt;
-use base::*;
-use callee::Callee;
-use cleanup::CleanupScope;
+use rustc::ty::{self, Ty, TypeFoldable};
use common::*;
use machine::*;
+use meth;
use monomorphize;
-use trans_item::TransItem;
-use tvec;
-use type_of::{type_of, sizing_type_of, align_of};
-use type_::Type;
+use type_of::{sizing_type_of, align_of};
use value::Value;
-use Disr;
use builder::Builder;
-use syntax_pos::DUMMY_SP;
-use mir::lvalue::Alignment;
-
-pub fn trans_exchange_free_ty<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, ptr: LvalueRef<'tcx>) {
- let content_ty = ptr.ty.to_ty(bcx.tcx());
- let def_id = langcall(bcx.tcx(), None, "", BoxFreeFnLangItem);
- let substs = bcx.tcx().mk_substs(iter::once(Kind::from(content_ty)));
- let callee = Callee::def(bcx.ccx, def_id, substs);
-
- let fn_ty = callee.direct_fn_type(bcx.ccx, &[]);
-
- let llret = bcx.call(callee.reify(bcx.ccx),
- &[ptr.llval, ptr.llextra][..1 + ptr.has_extra() as usize], None);
- fn_ty.apply_attrs_callsite(llret);
-}
-
-pub fn get_drop_glue_type<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Ty<'tcx> {
+pub fn needs_drop_glue<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, t: Ty<'tcx>) -> bool {
assert!(t.is_normalized_for_trans());
let t = scx.tcx().erase_regions(&t);
- // Even if there is no dtor for t, there might be one deeper down and we
- // might need to pass in the vtable ptr.
- if !scx.type_is_sized(t) {
- return t;
- }
-
// FIXME (#22815): note that type_needs_drop conservatively
// approximates in some cases and may say a type expression
// requires drop glue when it actually does not.
//
// (In this case it is not clear whether any harm is done, i.e.
- // erroneously returning `t` in some cases where we could have
- // returned `tcx.types.i8` does not appear unsound. The impact on
+ // erroneously returning `true` in some cases where we could have
+ // returned `false` does not appear unsound. The impact on
// code quality is unknown at this time.)
if !scx.type_needs_drop(t) {
- return scx.tcx().types.i8;
+ return false;
}
match t.sty {
ty::TyAdt(def, _) if def.is_box() => {
let layout = t.layout(&infcx).unwrap();
if layout.size(&scx.tcx().data_layout).bytes() == 0 {
// `Box<ZeroSizeType>` does not allocate.
- scx.tcx().types.i8
+ false
} else {
- t
+ true
}
})
} else {
- t
+ true
}
}
- _ => t
+ _ => true
}
}
-fn drop_ty<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, args: LvalueRef<'tcx>) {
- call_drop_glue(bcx, args, false, None)
-}
-
-pub fn call_drop_glue<'a, 'tcx>(
- bcx: &Builder<'a, 'tcx>,
- mut args: LvalueRef<'tcx>,
- skip_dtor: bool,
- funclet: Option<&'a Funclet>,
-) {
- let t = args.ty.to_ty(bcx.tcx());
- // NB: v is an *alias* of type t here, not a direct value.
- debug!("call_drop_glue(t={:?}, skip_dtor={})", t, skip_dtor);
- if bcx.ccx.shared().type_needs_drop(t) {
- let ccx = bcx.ccx;
- let g = if skip_dtor {
- DropGlueKind::TyContents(t)
- } else {
- DropGlueKind::Ty(t)
- };
- let glue = get_drop_glue_core(ccx, g);
- let glue_type = get_drop_glue_type(ccx.shared(), t);
- if glue_type != t {
- args.llval = bcx.pointercast(args.llval, type_of(ccx, glue_type).ptr_to());
- }
-
- // No drop-hint ==> call standard drop glue
- bcx.call(glue, &[args.llval, args.llextra][..1 + args.has_extra() as usize],
- funclet.map(|b| b.bundle()));
- }
-}
-
-pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ValueRef {
- get_drop_glue_core(ccx, DropGlueKind::Ty(t))
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
-pub enum DropGlueKind<'tcx> {
- /// The normal path; runs the dtor, and then recurs on the contents
- Ty(Ty<'tcx>),
- /// Skips the dtor, if any, for ty; drops the contents directly.
- /// Note that the dtor is only skipped at the most *shallow*
- /// level, namely, an `impl Drop for Ty` itself. So, for example,
- /// if Ty is Newtype(S) then only the Drop impl for Newtype itself
- /// will be skipped, while the Drop impl for S, if any, will be
- /// invoked.
- TyContents(Ty<'tcx>),
-}
-
-impl<'tcx> DropGlueKind<'tcx> {
- pub fn ty(&self) -> Ty<'tcx> {
- match *self { DropGlueKind::Ty(t) | DropGlueKind::TyContents(t) => t }
- }
-
- pub fn map_ty<F>(&self, mut f: F) -> DropGlueKind<'tcx> where F: FnMut(Ty<'tcx>) -> Ty<'tcx>
- {
- match *self {
- DropGlueKind::Ty(t) => DropGlueKind::Ty(f(t)),
- DropGlueKind::TyContents(t) => DropGlueKind::TyContents(f(t)),
- }
- }
-}
-
-fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, g: DropGlueKind<'tcx>) -> ValueRef {
- let g = g.map_ty(|t| get_drop_glue_type(ccx.shared(), t));
- match ccx.drop_glues().borrow().get(&g) {
- Some(&(glue, _)) => glue,
- None => {
- bug!("Could not find drop glue for {:?} -- {} -- {}.",
- g,
- TransItem::DropGlue(g).to_raw_string(),
- ccx.codegen_unit().name());
- }
- }
-}
-
-pub fn implement_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, g: DropGlueKind<'tcx>) {
- assert_eq!(g.ty(), get_drop_glue_type(ccx.shared(), g.ty()));
- let (llfn, _) = ccx.drop_glues().borrow().get(&g).unwrap().clone();
-
- let mut bcx = Builder::new_block(ccx, llfn, "entry-block");
-
- ccx.stats().n_glues_created.set(ccx.stats().n_glues_created.get() + 1);
- // All glue functions take values passed *by alias*; this is a
- // requirement since in many contexts glue is invoked indirectly and
- // the caller has no idea if it's dealing with something that can be
- // passed by value.
- //
- // llfn is expected be declared to take a parameter of the appropriate
- // type, so we don't need to explicitly cast the function parameter.
-
- // NB: v0 is an *alias* of type t here, not a direct value.
- // Only drop the value when it ... well, we used to check for
- // non-null, (and maybe we need to continue doing so), but we now
- // must definitely check for special bit-patterns corresponding to
- // the special dtor markings.
- let t = g.ty();
-
- let value = get_param(llfn, 0);
- let ptr = if ccx.shared().type_is_sized(t) {
- LvalueRef::new_sized_ty(value, t, Alignment::AbiAligned)
- } else {
- LvalueRef::new_unsized_ty(value, get_param(llfn, 1), t, Alignment::AbiAligned)
- };
-
- let skip_dtor = match g {
- DropGlueKind::Ty(_) => false,
- DropGlueKind::TyContents(_) => true
- };
-
- let bcx = match t.sty {
- ty::TyAdt(def, _) if def.is_box() => {
- // Support for Box is built-in as yet and its drop glue is special
- // despite having a dummy Drop impl in the library.
- assert!(!skip_dtor);
- let content_ty = t.boxed_ty();
- let ptr = if !bcx.ccx.shared().type_is_sized(content_ty) {
- let llbox = bcx.load(get_dataptr(&bcx, ptr.llval), None);
- let info = bcx.load(get_meta(&bcx, ptr.llval), None);
- LvalueRef::new_unsized_ty(llbox, info, content_ty, Alignment::AbiAligned)
- } else {
- LvalueRef::new_sized_ty(
- bcx.load(ptr.llval, None),
- content_ty, Alignment::AbiAligned)
- };
- drop_ty(&bcx, ptr);
- trans_exchange_free_ty(&bcx, ptr);
- bcx
- }
- ty::TyDynamic(..) => {
- // No support in vtable for distinguishing destroying with
- // versus without calling Drop::drop. Assert caller is
- // okay with always calling the Drop impl, if any.
- assert!(!skip_dtor);
- let dtor = bcx.load(ptr.llextra, None);
- bcx.call(dtor, &[ptr.llval], None);
- bcx
- }
- ty::TyAdt(def, ..) if def.has_dtor(bcx.tcx()) && !skip_dtor => {
- let shallow_drop = def.is_union();
- let tcx = bcx.tcx();
-
- let def = t.ty_adt_def().unwrap();
-
- // Be sure to put the contents into a scope so we can use an invoke
- // instruction to call the user destructor but still call the field
- // destructors if the user destructor panics.
- //
- // FIXME (#14875) panic-in-drop semantics might be unsupported; we
- // might well consider changing below to more direct code.
- // Issue #23611: schedule cleanup of contents, re-inspecting the
- // discriminant (if any) in case of variant swap in drop code.
- let contents_scope = if !shallow_drop {
- CleanupScope::schedule_drop_adt_contents(&bcx, ptr)
- } else {
- CleanupScope::noop()
- };
-
- let trait_ref = ty::Binder(ty::TraitRef {
- def_id: tcx.lang_items.drop_trait().unwrap(),
- substs: tcx.mk_substs_trait(t, &[])
- });
- let vtbl = match fulfill_obligation(bcx.ccx.shared(), DUMMY_SP, trait_ref) {
- traits::VtableImpl(data) => data,
- _ => bug!("dtor for {:?} is not an impl???", t)
- };
- let dtor_did = def.destructor(tcx).unwrap().did;
- let callee = Callee::def(bcx.ccx, dtor_did, vtbl.substs);
- let fn_ty = callee.direct_fn_type(bcx.ccx, &[]);
- let llret;
- let args = &[ptr.llval, ptr.llextra][..1 + ptr.has_extra() as usize];
- if let Some(landing_pad) = contents_scope.landing_pad {
- let normal_bcx = bcx.build_sibling_block("normal-return");
- llret = bcx.invoke(callee.reify(ccx), args, normal_bcx.llbb(), landing_pad, None);
- bcx = normal_bcx;
- } else {
- llret = bcx.call(callee.reify(bcx.ccx), args, None);
- }
- fn_ty.apply_attrs_callsite(llret);
- contents_scope.trans(&bcx);
- bcx
- }
- ty::TyAdt(def, ..) if def.is_union() => {
- bcx
- }
- _ => {
- if bcx.ccx.shared().type_needs_drop(t) {
- drop_structural_ty(bcx, ptr)
- } else {
- bcx
- }
- }
- };
- bcx.ret_void();
-}
-
pub fn size_and_align_of_dst<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, info: ValueRef)
-> (ValueRef, ValueRef) {
debug!("calculate size of DST: {}; with lost info: {:?}",
(size, align)
}
ty::TyDynamic(..) => {
- // info points to the vtable and the second entry in the vtable is the
- // dynamic size of the object.
- let info = bcx.pointercast(info, Type::int(bcx.ccx).ptr_to());
- let size_ptr = bcx.gepi(info, &[1]);
- let align_ptr = bcx.gepi(info, &[2]);
-
- let size = bcx.load(size_ptr, None);
- let align = bcx.load(align_ptr, None);
-
- // Vtable loads are invariant
- bcx.set_invariant_load(size);
- bcx.set_invariant_load(align);
-
- (size, align)
+ // load size/align from vtable
+ (meth::SIZE.get_usize(bcx, info), meth::ALIGN.get_usize(bcx, info))
}
ty::TySlice(_) | ty::TyStr => {
let unit_ty = t.sequence_element_type(bcx.tcx());
_ => bug!("Unexpected unsized type, found {}", t)
}
}
-
-// Iterates through the elements of a structural type, dropping them.
-fn drop_structural_ty<'a, 'tcx>(
- cx: Builder<'a, 'tcx>,
- mut ptr: LvalueRef<'tcx>
-) -> Builder<'a, 'tcx> {
- fn iter_variant_fields<'a, 'tcx>(
- cx: &'a Builder<'a, 'tcx>,
- av: LvalueRef<'tcx>,
- adt_def: &'tcx AdtDef,
- variant_index: usize,
- substs: &'tcx Substs<'tcx>
- ) {
- let variant = &adt_def.variants[variant_index];
- let tcx = cx.tcx();
- for (i, field) in variant.fields.iter().enumerate() {
- let arg = monomorphize::field_ty(tcx, substs, field);
- let (field_ptr, align) = av.trans_field_ptr(&cx, i);
- drop_ty(&cx, LvalueRef::new_sized_ty(field_ptr, arg, align));
- }
- }
-
- let mut cx = cx;
- let t = ptr.ty.to_ty(cx.tcx());
- match t.sty {
- ty::TyClosure(def_id, substs) => {
- for (i, upvar_ty) in substs.upvar_tys(def_id, cx.tcx()).enumerate() {
- let (llupvar, align) = ptr.trans_field_ptr(&cx, i);
- drop_ty(&cx, LvalueRef::new_sized_ty(llupvar, upvar_ty, align));
- }
- }
- ty::TyArray(_, n) => {
- let base = get_dataptr(&cx, ptr.llval);
- let len = C_uint(cx.ccx, n);
- let unit_ty = t.sequence_element_type(cx.tcx());
- cx = tvec::slice_for_each(&cx, base, unit_ty, len,
- |bb, vv| drop_ty(bb, LvalueRef::new_sized_ty(vv, unit_ty, ptr.alignment)));
- }
- ty::TySlice(_) | ty::TyStr => {
- let unit_ty = t.sequence_element_type(cx.tcx());
- cx = tvec::slice_for_each(&cx, ptr.llval, unit_ty, ptr.llextra,
- |bb, vv| drop_ty(bb, LvalueRef::new_sized_ty(vv, unit_ty, ptr.alignment)));
- }
- ty::TyTuple(ref args, _) => {
- for (i, arg) in args.iter().enumerate() {
- let (llfld_a, align) = ptr.trans_field_ptr(&cx, i);
- drop_ty(&cx, LvalueRef::new_sized_ty(llfld_a, *arg, align));
- }
- }
- ty::TyAdt(adt, substs) => match adt.adt_kind() {
- AdtKind::Struct => {
- for (i, field) in adt.variants[0].fields.iter().enumerate() {
- let field_ty = monomorphize::field_ty(cx.tcx(), substs, field);
- let (llval, align) = ptr.trans_field_ptr(&cx, i);
- let field_ptr = if cx.ccx.shared().type_is_sized(field_ty) {
- LvalueRef::new_sized_ty(llval, field_ty, align)
- } else {
- LvalueRef::new_unsized_ty(llval, ptr.llextra, field_ty, align)
- };
- drop_ty(&cx, field_ptr);
- }
- }
- AdtKind::Union => {
- bug!("Union in `glue::drop_structural_ty`");
- }
- AdtKind::Enum => {
- let n_variants = adt.variants.len();
-
- // NB: we must hit the discriminant first so that structural
- // comparison know not to proceed when the discriminants differ.
-
- // Obtain a representation of the discriminant sufficient to translate
- // destructuring; this may or may not involve the actual discriminant.
- let l = cx.ccx.layout_of(t);
- match *l {
- layout::Univariant { .. } |
- layout::UntaggedUnion { .. } => {
- if n_variants != 0 {
- assert!(n_variants == 1);
- ptr.ty = LvalueTy::Downcast {
- adt_def: adt,
- substs: substs,
- variant_index: 0,
- };
- iter_variant_fields(&cx, ptr, &adt, 0, substs);
- }
- }
- layout::CEnum { .. } |
- layout::General { .. } |
- layout::RawNullablePointer { .. } |
- layout::StructWrappedNullablePointer { .. } => {
- let lldiscrim_a = adt::trans_get_discr(
- &cx, t, ptr.llval, ptr.alignment, None, false);
-
- // Create a fall-through basic block for the "else" case of
- // the switch instruction we're about to generate. Note that
- // we do **not** use an Unreachable instruction here, even
- // though most of the time this basic block will never be hit.
- //
- // When an enum is dropped it's contents are currently
- // overwritten to DTOR_DONE, which means the discriminant
- // could have changed value to something not within the actual
- // range of the discriminant. Currently this function is only
- // used for drop glue so in this case we just return quickly
- // from the outer function, and any other use case will only
- // call this for an already-valid enum in which case the `ret
- // void` will never be hit.
- let ret_void_cx = cx.build_sibling_block("enum-iter-ret-void");
- ret_void_cx.ret_void();
- let llswitch = cx.switch(lldiscrim_a, ret_void_cx.llbb(), n_variants);
- let next_cx = cx.build_sibling_block("enum-iter-next");
-
- for (i, discr) in adt.discriminants(cx.tcx()).enumerate() {
- let variant_cx_name = format!("enum-iter-variant-{}", i);
- let variant_cx = cx.build_sibling_block(&variant_cx_name);
- let case_val = adt::trans_case(&cx, t, Disr::from(discr));
- variant_cx.add_case(llswitch, case_val, variant_cx.llbb());
- ptr.ty = LvalueTy::Downcast {
- adt_def: adt,
- substs: substs,
- variant_index: i,
- };
- iter_variant_fields(&variant_cx, ptr, &adt, i, substs);
- variant_cx.br(next_cx.llbb());
- }
- cx = next_cx;
- }
- _ => bug!("{} is not an enum.", t),
- }
- }
- },
-
- _ => {
- cx.sess().unimpl(&format!("type in drop_structural_ty: {}", t))
- }
- }
- return cx;
-}
"ctlz" | "cttz" | "ctpop" | "bswap" |
"add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" |
"overflowing_add" | "overflowing_sub" | "overflowing_mul" |
- "unchecked_div" | "unchecked_rem" => {
+ "unchecked_div" | "unchecked_rem" | "unchecked_shl" | "unchecked_shr" => {
let sty = &arg_tys[0].sty;
match int_type_width_signed(sty, ccx) {
Some((width, signed)) =>
} else {
bcx.urem(llargs[0], llargs[1])
},
+ "unchecked_shl" => bcx.shl(llargs[0], llargs[1]),
+ "unchecked_shr" =>
+ if signed {
+ bcx.ashr(llargs[0], llargs[1])
+ } else {
+ bcx.lshr(llargs[0], llargs[1])
+ },
_ => bug!(),
},
None => {
#![feature(box_syntax)]
#![feature(const_fn)]
#![feature(custom_attribute)]
+#![cfg_attr(stage0, feature(field_init_shorthand))]
#![allow(unused_attributes)]
#![feature(i128_type)]
#![feature(libc)]
mod cabi_x86_64;
mod cabi_x86_win64;
mod callee;
-mod cleanup;
mod collector;
mod common;
mod consts;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use attributes;
-use llvm::{ValueRef, get_params};
+use llvm::ValueRef;
use rustc::traits;
-use callee::{Callee, CalleeData};
+use callee;
use common::*;
use builder::Builder;
use consts;
-use declare;
-use glue;
use machine;
-use monomorphize::Instance;
+use monomorphize;
use type_::Type;
use type_of::*;
use value::Value;
use rustc::ty;
-// drop_glue pointer, size, align.
-const VTABLE_OFFSET: usize = 3;
-
-/// Extracts a method from a trait object's vtable, at the specified index.
-pub fn get_virtual_method<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
- llvtable: ValueRef,
- vtable_index: usize) -> ValueRef {
- // Load the data pointer from the object.
- debug!("get_virtual_method(vtable_index={}, llvtable={:?})",
- vtable_index, Value(llvtable));
-
- let ptr = bcx.load_nonnull(bcx.gepi(llvtable, &[vtable_index + VTABLE_OFFSET]), None);
- // Vtable loads are invariant
- bcx.set_invariant_load(ptr);
- ptr
-}
+#[derive(Copy, Clone, Debug)]
+pub struct VirtualIndex(usize);
-/// Generate a shim function that allows an object type like `SomeTrait` to
-/// implement the type `SomeTrait`. Imagine a trait definition:
-///
-/// trait SomeTrait { fn get(&self) -> i32; ... }
-///
-/// And a generic bit of code:
-///
-/// fn foo<T:SomeTrait>(t: &T) {
-/// let x = SomeTrait::get;
-/// x(t)
-/// }
-///
-/// What is the value of `x` when `foo` is invoked with `T=SomeTrait`?
-/// The answer is that it is a shim function generated by this routine:
-///
-/// fn shim(t: &SomeTrait) -> i32 {
-/// // ... call t.get() virtually ...
-/// }
-///
-/// In fact, all virtual calls can be thought of as normal trait calls
-/// that go through this shim function.
-pub fn trans_object_shim<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>,
- callee: Callee<'tcx>)
- -> ValueRef {
- debug!("trans_object_shim({:?})", callee);
-
- let function_name = match callee.ty.sty {
- ty::TyFnDef(def_id, substs, _) => {
- let instance = Instance::new(def_id, substs);
- instance.symbol_name(ccx.shared())
- }
- _ => bug!()
- };
-
- let llfn = declare::define_internal_fn(ccx, &function_name, callee.ty);
- attributes::set_frame_pointer_elimination(ccx, llfn);
-
- let bcx = Builder::new_block(ccx, llfn, "entry-block");
-
- let mut llargs = get_params(llfn);
- let fn_ret = callee.ty.fn_ret();
- let fn_ty = callee.direct_fn_type(ccx, &[]);
-
- let fn_ptr = match callee.data {
- CalleeData::Virtual(idx) => {
- let fn_ptr = get_virtual_method(&bcx,
- llargs.remove(fn_ty.ret.is_indirect() as usize + 1), idx);
- let llty = fn_ty.llvm_type(bcx.ccx).ptr_to();
- bcx.pointercast(fn_ptr, llty)
- },
- _ => bug!("trans_object_shim called with non-virtual callee"),
- };
- let llret = bcx.call(fn_ptr, &llargs, None);
- fn_ty.apply_attrs_callsite(llret);
-
- if fn_ret.0.is_never() {
- bcx.unreachable();
- } else {
- if fn_ty.ret.is_indirect() || fn_ty.ret.is_ignore() {
- bcx.ret_void();
- } else {
- bcx.ret(llret);
- }
+pub const DESTRUCTOR: VirtualIndex = VirtualIndex(0);
+pub const SIZE: VirtualIndex = VirtualIndex(1);
+pub const ALIGN: VirtualIndex = VirtualIndex(2);
+
+impl<'a, 'tcx> VirtualIndex {
+ pub fn from_index(index: usize) -> Self {
+ VirtualIndex(index + 3)
+ }
+
+ pub fn get_fn(self, bcx: &Builder<'a, 'tcx>, llvtable: ValueRef) -> ValueRef {
+ // Load the data pointer from the object.
+ debug!("get_fn({:?}, {:?})", Value(llvtable), self);
+
+ let ptr = bcx.load_nonnull(bcx.gepi(llvtable, &[self.0]), None);
+ // Vtable loads are invariant
+ bcx.set_invariant_load(ptr);
+ ptr
}
- llfn
+ pub fn get_usize(self, bcx: &Builder<'a, 'tcx>, llvtable: ValueRef) -> ValueRef {
+ // Load the data pointer from the object.
+ debug!("get_int({:?}, {:?})", Value(llvtable), self);
+
+ let llvtable = bcx.pointercast(llvtable, Type::int(bcx.ccx).ptr_to());
+ let ptr = bcx.load(bcx.gepi(llvtable, &[self.0]), None);
+ // Vtable loads are invariant
+ bcx.set_invariant_load(ptr);
+ ptr
+ }
}
/// Creates a dynamic vtable for the given type and vtable origin.
let align = align_of(ccx, ty);
let mut components: Vec<_> = [
- // Generate a destructor for the vtable.
- glue::get_drop_glue(ccx, ty),
+ callee::get_fn(ccx, monomorphize::resolve_drop_in_place(ccx.shared(), ty)),
C_uint(ccx, size),
C_uint(ccx, align)
].iter().cloned().collect();
let trait_ref = trait_ref.with_self_ty(tcx, ty);
let methods = traits::get_vtable_methods(tcx, trait_ref).map(|opt_mth| {
opt_mth.map_or(nullptr, |(def_id, substs)| {
- Callee::def(ccx, def_id, substs).reify(ccx)
+ callee::resolve_and_get_fn(ccx, def_id, substs)
})
});
components.extend(methods);
use rustc::mir;
use abi::{Abi, FnType, ArgType};
use base::{self, Lifetime};
-use callee::{Callee, CalleeData, Fn, Intrinsic, NamedTupleConstructor, Virtual};
+use callee;
use builder::Builder;
use common::{self, Funclet};
-use common::{C_bool, C_str_slice, C_struct, C_u32, C_undef};
+use common::{C_bool, C_str_slice, C_struct, C_u32, C_uint, C_undef};
use consts;
use machine::llalign_of_min;
use meth;
+use monomorphize;
+use tvec;
use type_of::{self, align_of};
-use glue;
use type_::Type;
use rustc_data_structures::indexed_vec::IndexVec;
mir::TerminatorKind::Drop { ref location, target, unwind } => {
let ty = location.ty(&self.mir, bcx.tcx()).to_ty(bcx.tcx());
let ty = self.monomorphize(&ty);
+ let drop_fn = monomorphize::resolve_drop_in_place(bcx.ccx.shared(), ty);
- // Double check for necessity to drop
- if !bcx.ccx.shared().type_needs_drop(ty) {
+ if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
+ // we don't actually need to drop anything.
funclet_br(self, bcx, target);
- return;
+ return
}
- let mut lvalue = self.trans_lvalue(&bcx, location);
- let drop_fn = glue::get_drop_glue(bcx.ccx, ty);
- let drop_ty = glue::get_drop_glue_type(bcx.ccx.shared(), ty);
- if bcx.ccx.shared().type_is_sized(ty) && drop_ty != ty {
- lvalue.llval = bcx.pointercast(
- lvalue.llval, type_of::type_of(bcx.ccx, drop_ty).ptr_to());
- }
- let args = &[lvalue.llval, lvalue.llextra][..1 + lvalue.has_extra() as usize];
+ let lvalue = self.trans_lvalue(&bcx, location);
+ let (drop_fn, need_extra) = match ty.sty {
+ ty::TyDynamic(..) => (meth::DESTRUCTOR.get_fn(&bcx, lvalue.llextra),
+ false),
+ ty::TyArray(ety, _) | ty::TySlice(ety) => {
+ // FIXME: handle panics
+ let drop_fn = monomorphize::resolve_drop_in_place(
+ bcx.ccx.shared(), ety);
+ let drop_fn = callee::get_fn(bcx.ccx, drop_fn);
+ let bcx = tvec::slice_for_each(
+ &bcx,
+ lvalue.project_index(&bcx, C_uint(bcx.ccx, 0u64)),
+ ety,
+ lvalue.len(bcx.ccx),
+ |bcx, llval, loop_bb| {
+ self.set_debug_loc(&bcx, terminator.source_info);
+ if let Some(unwind) = unwind {
+ bcx.invoke(
+ drop_fn,
+ &[llval],
+ loop_bb,
+ llblock(self, unwind),
+ cleanup_bundle
+ );
+ } else {
+ bcx.call(drop_fn, &[llval], cleanup_bundle);
+ bcx.br(loop_bb);
+ }
+ });
+ funclet_br(self, bcx, target);
+ return
+ }
+ _ => (callee::get_fn(bcx.ccx, drop_fn), lvalue.has_extra())
+ };
+ let args = &[lvalue.llval, lvalue.llextra][..1 + need_extra as usize];
if let Some(unwind) = unwind {
bcx.invoke(
drop_fn,
// Obtain the panic entry point.
let def_id = common::langcall(bcx.tcx(), Some(span), "", lang_item);
- let callee = Callee::def(bcx.ccx, def_id,
- bcx.ccx.empty_substs_for_def_id(def_id));
- let llfn = callee.reify(bcx.ccx);
+ let instance = ty::Instance::mono(bcx.tcx(), def_id);
+ let llfn = callee::get_fn(bcx.ccx, instance);
// Translate the actual panic invoke/call.
if let Some(unwind) = cleanup {
// Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
let callee = self.trans_operand(&bcx, func);
- let (mut callee, sig) = match callee.ty.sty {
+ let (instance, mut llfn, sig) = match callee.ty.sty {
ty::TyFnDef(def_id, substs, sig) => {
- (Callee::def(bcx.ccx, def_id, substs), sig)
+ (Some(monomorphize::resolve(bcx.ccx.shared(), def_id, substs)),
+ None,
+ sig)
}
ty::TyFnPtr(sig) => {
- (Callee {
- data: Fn(callee.immediate()),
- ty: callee.ty
- }, sig)
+ (None,
+ Some(callee.immediate()),
+ sig)
}
_ => bug!("{} is not callable", callee.ty)
};
-
+ let def = instance.map(|i| i.def);
let sig = bcx.tcx().erase_late_bound_regions_and_normalize(&sig);
let abi = sig.abi;
// Handle intrinsics old trans wants Expr's for, ourselves.
- let intrinsic = match (&callee.ty.sty, &callee.data) {
- (&ty::TyFnDef(def_id, ..), &Intrinsic) => {
- Some(bcx.tcx().item_name(def_id).as_str())
- }
+ let intrinsic = match def {
+ Some(ty::InstanceDef::Intrinsic(def_id))
+ => Some(bcx.tcx().item_name(def_id).as_str()),
_ => None
};
- let mut intrinsic = intrinsic.as_ref().map(|s| &s[..]);
+ let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
if intrinsic == Some("move_val_init") {
let &(_, target) = destination.as_ref().unwrap();
let op_ty = op_arg.ty(&self.mir, bcx.tcx());
self.monomorphize(&op_ty)
}).collect::<Vec<_>>();
- let fn_ty = callee.direct_fn_type(bcx.ccx, &extra_args);
-
- if intrinsic == Some("drop_in_place") {
- let &(_, target) = destination.as_ref().unwrap();
- let ty = if let ty::TyFnDef(_, substs, _) = callee.ty.sty {
- substs.type_at(0)
- } else {
- bug!("Unexpected ty: {}", callee.ty);
- };
- // Double check for necessity to drop
- if !bcx.ccx.shared().type_needs_drop(ty) {
+ let fn_ty = match def {
+ Some(ty::InstanceDef::Virtual(..)) => {
+ FnType::new_vtable(bcx.ccx, sig, &extra_args)
+ }
+ Some(ty::InstanceDef::DropGlue(_, None)) => {
+ // empty drop glue - a nop.
+ let &(_, target) = destination.as_ref().unwrap();
funclet_br(self, bcx, target);
return;
}
-
- let drop_fn = glue::get_drop_glue(bcx.ccx, ty);
- let llty = fn_ty.llvm_type(bcx.ccx).ptr_to();
- callee.data = Fn(bcx.pointercast(drop_fn, llty));
- intrinsic = None;
- }
+ _ => FnType::new(bcx.ccx, sig, &extra_args)
+ };
// The arguments we'll be passing. Plus one to account for outptr, if used.
let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
// Prepare the return value destination
let ret_dest = if let Some((ref dest, _)) = *destination {
- let is_intrinsic = if let Intrinsic = callee.data {
- true
- } else {
- false
- };
- self.make_return_dest(&bcx, dest, &fn_ty.ret, &mut llargs, is_intrinsic)
+ let is_intrinsic = intrinsic.is_some();
+ self.make_return_dest(&bcx, dest, &fn_ty.ret, &mut llargs,
+ is_intrinsic)
} else {
ReturnDest::Nothing
};
let op = self.trans_operand(&bcx, arg);
self.trans_argument(&bcx, op, &mut llargs, &fn_ty,
- &mut idx, &mut callee.data);
+ &mut idx, &mut llfn, &def);
}
if let Some(tup) = untuple {
self.trans_arguments_untupled(&bcx, tup, &mut llargs, &fn_ty,
- &mut idx, &mut callee.data)
+ &mut idx, &mut llfn, &def)
}
- let fn_ptr = match callee.data {
- NamedTupleConstructor(_) => {
- // FIXME translate this like mir::Rvalue::Aggregate.
- callee.reify(bcx.ccx)
- }
- Intrinsic => {
- use intrinsic::trans_intrinsic_call;
-
- let (dest, llargs) = match ret_dest {
- _ if fn_ty.ret.is_indirect() => {
- (llargs[0], &llargs[1..])
- }
- ReturnDest::Nothing => {
- (C_undef(fn_ty.ret.original_ty.ptr_to()), &llargs[..])
- }
- ReturnDest::IndirectOperand(dst, _) |
- ReturnDest::Store(dst) => (dst, &llargs[..]),
- ReturnDest::DirectOperand(_) =>
- bug!("Cannot use direct operand with an intrinsic call")
- };
-
- trans_intrinsic_call(&bcx, callee.ty, &fn_ty, &llargs, dest,
- terminator.source_info.span);
+ if intrinsic.is_some() && intrinsic != Some("drop_in_place") {
+ use intrinsic::trans_intrinsic_call;
- if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
- // Make a fake operand for store_return
- let op = OperandRef {
- val: Ref(dst, Alignment::AbiAligned),
- ty: sig.output(),
- };
- self.store_return(&bcx, ret_dest, fn_ty.ret, op);
+ let (dest, llargs) = match ret_dest {
+ _ if fn_ty.ret.is_indirect() => {
+ (llargs[0], &llargs[1..])
}
-
- if let Some((_, target)) = *destination {
- funclet_br(self, bcx, target);
- } else {
- bcx.unreachable();
+ ReturnDest::Nothing => {
+ (C_undef(fn_ty.ret.original_ty.ptr_to()), &llargs[..])
}
+ ReturnDest::IndirectOperand(dst, _) |
+ ReturnDest::Store(dst) => (dst, &llargs[..]),
+ ReturnDest::DirectOperand(_) =>
+ bug!("Cannot use direct operand with an intrinsic call")
+ };
- return;
+ let callee_ty = common::instance_ty(
+ bcx.ccx.shared(), instance.as_ref().unwrap());
+ trans_intrinsic_call(&bcx, callee_ty, &fn_ty, &llargs, dest,
+ terminator.source_info.span);
+
+ if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
+ // Make a fake operand for store_return
+ let op = OperandRef {
+ val: Ref(dst, Alignment::AbiAligned),
+ ty: sig.output(),
+ };
+ self.store_return(&bcx, ret_dest, fn_ty.ret, op);
+ }
+
+ if let Some((_, target)) = *destination {
+ funclet_br(self, bcx, target);
+ } else {
+ bcx.unreachable();
}
- Fn(f) => f,
- Virtual(_) => bug!("Virtual fn ptr not extracted")
+
+ return;
+ }
+
+ let fn_ptr = match (llfn, instance) {
+ (Some(llfn), _) => llfn,
+ (None, Some(instance)) => callee::get_fn(bcx.ccx, instance),
+ _ => span_bug!(span, "no llfn for call"),
};
// Many different ways to call a function handled here
llargs: &mut Vec<ValueRef>,
fn_ty: &FnType,
next_idx: &mut usize,
- callee: &mut CalleeData) {
+ llfn: &mut Option<ValueRef>,
+ def: &Option<ty::InstanceDef<'tcx>>) {
if let Pair(a, b) = op.val {
// Treat the values in a fat pointer separately.
if common::type_is_fat_ptr(bcx.ccx, op.ty) {
let (ptr, meta) = (a, b);
if *next_idx == 0 {
- if let Virtual(idx) = *callee {
- let llfn = meth::get_virtual_method(bcx, meta, idx);
+ if let Some(ty::InstanceDef::Virtual(_, idx)) = *def {
+ let llmeth = meth::VirtualIndex::from_index(idx).get_fn(bcx, meta);
let llty = fn_ty.llvm_type(bcx.ccx).ptr_to();
- *callee = Fn(bcx.pointercast(llfn, llty));
+ *llfn = Some(bcx.pointercast(llmeth, llty));
}
}
// We won't be checking the type again.
ty: bcx.tcx().types.err
};
- self.trans_argument(bcx, imm_op(ptr), llargs, fn_ty, next_idx, callee);
- self.trans_argument(bcx, imm_op(meta), llargs, fn_ty, next_idx, callee);
+ self.trans_argument(bcx, imm_op(ptr), llargs, fn_ty, next_idx, llfn, def);
+ self.trans_argument(bcx, imm_op(meta), llargs, fn_ty, next_idx, llfn, def);
return;
}
}
llargs: &mut Vec<ValueRef>,
fn_ty: &FnType,
next_idx: &mut usize,
- callee: &mut CalleeData) {
+ llfn: &mut Option<ValueRef>,
+ def: &Option<ty::InstanceDef<'tcx>>) {
let tuple = self.trans_operand(bcx, operand);
let arg_types = match tuple.ty.sty {
val: val,
ty: ty
};
- self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
+ self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def);
}
}
val: Immediate(elem),
ty: ty
};
- self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
+ self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def);
}
}
Pair(a, b) => {
val: Immediate(elem),
ty: ty
};
- self.trans_argument(bcx, op, llargs, fn_ty, next_idx, callee);
+ self.trans_argument(bcx, op, llargs, fn_ty, next_idx, llfn, def);
}
}
}
return block;
}
+ let block = self.blocks[target_bb];
+ let landing_pad = self.landing_pad_uncached(block);
+ self.landing_pads[target_bb] = Some(landing_pad);
+ landing_pad
+ }
+
+ fn landing_pad_uncached(&mut self, target_bb: BasicBlockRef) -> BasicBlockRef {
if base::wants_msvc_seh(self.ccx.sess()) {
- return self.blocks[target_bb];
+ return target_bb;
}
- let target = self.get_builder(target_bb);
-
let bcx = self.new_block("cleanup");
- self.landing_pads[target_bb] = Some(bcx.llbb());
let ccx = bcx.ccx;
let llpersonality = self.ccx.eh_personality();
bcx.set_cleanup(llretval);
let slot = self.get_personality_slot(&bcx);
bcx.store(llretval, slot, None);
- bcx.br(target.llbb());
+ bcx.br(target_bb);
bcx.llbb()
}
use rustc::ty::subst::{Kind, Substs, Subst};
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use {abi, adt, base, Disr, machine};
-use callee::Callee;
+use callee;
use builder::Builder;
use common::{self, CrateContext, const_get_elt, val_ty};
use common::{C_array, C_bool, C_bytes, C_floating_f64, C_integral, C_big_integral};
use common::{C_null, C_struct, C_str_slice, C_undef, C_uint, C_vector, is_undef};
use common::const_to_opt_u128;
use consts;
-use monomorphize::{self, Instance};
+use monomorphize;
use type_of;
use type_::Type;
use value::Value;
}
fn trans_def(ccx: &'a CrateContext<'a, 'tcx>,
- instance: Instance<'tcx>,
+ def_id: DefId,
+ substs: &'tcx Substs<'tcx>,
args: IndexVec<mir::Local, Const<'tcx>>)
-> Result<Const<'tcx>, ConstEvalErr<'tcx>> {
- let instance = instance.resolve_const(ccx.shared());
- let mir = ccx.tcx().item_mir(instance.def);
+ let instance = monomorphize::resolve(ccx.shared(), def_id, substs);
+ let mir = ccx.tcx().instance_mir(instance.def);
MirConstContext::new(ccx, &mir, instance.substs, args).trans()
}
mir::TerminatorKind::Call { ref func, ref args, ref destination, .. } => {
let fn_ty = func.ty(self.mir, tcx);
let fn_ty = self.monomorphize(&fn_ty);
- let instance = match fn_ty.sty {
- ty::TyFnDef(def_id, substs, _) => {
- Instance::new(def_id, substs)
- }
+ let (def_id, substs) = match fn_ty.sty {
+ ty::TyFnDef(def_id, substs, _) => (def_id, substs),
_ => span_bug!(span, "calling {:?} (of type {}) in constant",
func, fn_ty)
};
}
}
if let Some((ref dest, target)) = *destination {
- match MirConstContext::trans_def(self.ccx, instance, const_args) {
+ match MirConstContext::trans_def(self.ccx, def_id, substs, const_args) {
Ok(value) => self.store(dest, value, span),
Err(err) => if failure.is_ok() { failure = Err(err); }
}
}
let substs = self.monomorphize(&substs);
- let instance = Instance::new(def_id, substs);
- MirConstContext::trans_def(self.ccx, instance, IndexVec::new())
+ MirConstContext::trans_def(self.ccx, def_id, substs, IndexVec::new())
}
mir::Literal::Promoted { index } => {
let mir = &self.mir.promoted[index];
mir::CastKind::ReifyFnPointer => {
match operand.ty.sty {
ty::TyFnDef(def_id, substs, _) => {
- Callee::def(self.ccx, def_id, substs)
- .reify(self.ccx)
+ callee::resolve_and_get_fn(self.ccx, def_id, substs)
}
_ => {
span_bug!(span, "{} cannot be reified to a fn ptr",
// Now create its substs [Closure, Tuple]
let input = tcx.closure_type(def_id)
.subst(tcx, substs.substs).input(0);
- let substs = tcx.mk_substs([operand.ty, input.skip_binder()]
+ let input = tcx.erase_late_bound_regions_and_normalize(&input);
+ let substs = tcx.mk_substs([operand.ty, input]
.iter().cloned().map(Kind::from));
- Callee::def(self.ccx, call_once, substs)
- .reify(self.ccx)
+ callee::resolve_and_get_fn(self.ccx, call_once, substs)
}
_ => {
bug!("{} cannot be cast to a fn ptr", operand.ty)
}
let substs = self.monomorphize(&substs);
- let instance = Instance::new(def_id, substs);
- MirConstContext::trans_def(bcx.ccx, instance, IndexVec::new())
+ MirConstContext::trans_def(bcx.ccx, def_id, substs, IndexVec::new())
}
mir::Literal::Promoted { index } => {
let mir = &self.mir.promoted[index];
def_id: DefId)
-> Result<ValueRef, ConstEvalErr<'tcx>>
{
- let instance = Instance::mono(ccx.shared(), def_id);
- MirConstContext::trans_def(ccx, instance, IndexVec::new()).map(|c| c.llval)
+ MirConstContext::trans_def(ccx, def_id, Substs::empty(), IndexVec::new())
+ .map(|c| c.llval)
}
/// Construct a constant value, suitable for initializing a
use std::ops;
use super::{MirContext, LocalRef};
-use super::operand::OperandValue;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Alignment {
LvalueRef::new_sized(llval, LvalueTy::from_ty(ty), alignment)
}
- pub fn new_unsized_ty(llval: ValueRef, llextra: ValueRef, ty: Ty<'tcx>, alignment: Alignment)
- -> LvalueRef<'tcx> {
- LvalueRef {
- llval: llval,
- llextra: llextra,
- ty: LvalueTy::from_ty(ty),
- alignment: alignment,
- }
- }
-
pub fn alloca(bcx: &Builder<'a, 'tcx>, ty: Ty<'tcx>, name: &str) -> LvalueRef<'tcx> {
debug!("alloca({:?}: {:?})", name, ty);
let tmp = bcx.alloca(type_of::type_of(bcx.ccx, ty), name);
_ => bug!("element access in type without elements: {} represented as {:#?}", t, l)
}
}
+
+ pub fn project_index(&self, bcx: &Builder<'a, 'tcx>, llindex: ValueRef) -> ValueRef {
+ if let ty::TySlice(_) = self.ty.to_ty(bcx.tcx()).sty {
+ // Slices already point to the array element type.
+ bcx.inbounds_gep(self.llval, &[llindex])
+ } else {
+ let zero = common::C_uint(bcx.ccx, 0u64);
+ bcx.inbounds_gep(self.llval, &[zero, llindex])
+ }
+ }
}
impl<'a, 'tcx> MirContext<'a, 'tcx> {
elem: mir::ProjectionElem::Deref
}) => {
// Load the pointer from its location.
- let ptr = self.trans_consume(bcx, base);
- let projected_ty = LvalueTy::from_ty(ptr.ty)
- .projection_ty(tcx, &mir::ProjectionElem::Deref);
- let projected_ty = self.monomorphize(&projected_ty);
- let (llptr, llextra) = match ptr.val {
- OperandValue::Immediate(llptr) => (llptr, ptr::null_mut()),
- OperandValue::Pair(llptr, llextra) => (llptr, llextra),
- OperandValue::Ref(..) => bug!("Deref of by-Ref type {:?}", ptr.ty)
- };
- LvalueRef {
- llval: llptr,
- llextra: llextra,
- ty: projected_ty,
- alignment: Alignment::AbiAligned,
- }
+ self.trans_consume(bcx, base).deref()
}
mir::Lvalue::Projection(ref projection) => {
let tr_base = self.trans_lvalue(bcx, &projection.base);
let projected_ty = self.monomorphize(&projected_ty);
let align = tr_base.alignment;
- let project_index = |llindex| {
- let element = if let ty::TySlice(_) = tr_base.ty.to_ty(tcx).sty {
- // Slices already point to the array element type.
- bcx.inbounds_gep(tr_base.llval, &[llindex])
- } else {
- let zero = common::C_uint(bcx.ccx, 0u64);
- bcx.inbounds_gep(tr_base.llval, &[zero, llindex])
- };
- (element, align)
- };
-
let ((llprojected, align), llextra) = match projection.elem {
mir::ProjectionElem::Deref => bug!(),
mir::ProjectionElem::Field(ref field, _) => {
}
mir::ProjectionElem::Index(ref index) => {
let index = self.trans_operand(bcx, index);
- (project_index(self.prepare_index(bcx, index.immediate())), ptr::null_mut())
+ let llindex = self.prepare_index(bcx, index.immediate());
+ ((tr_base.project_index(bcx, llindex), align), ptr::null_mut())
}
mir::ProjectionElem::ConstantIndex { offset,
from_end: false,
min_length: _ } => {
let lloffset = C_uint(bcx.ccx, offset);
- (project_index(lloffset), ptr::null_mut())
+ ((tr_base.project_index(bcx, lloffset), align), ptr::null_mut())
}
mir::ProjectionElem::ConstantIndex { offset,
from_end: true,
let lloffset = C_uint(bcx.ccx, offset);
let lllen = tr_base.len(bcx.ccx);
let llindex = bcx.sub(lllen, lloffset);
- (project_index(llindex), ptr::null_mut())
+ ((tr_base.project_index(bcx, llindex), align), ptr::null_mut())
}
mir::ProjectionElem::Subslice { from, to } => {
- let llindex = C_uint(bcx.ccx, from);
- let (llbase, align) = project_index(llindex);
+ let llbase = tr_base.project_index(bcx, C_uint(bcx.ccx, from));
let base_ty = tr_base.ty.to_ty(bcx.tcx());
match base_ty.sty {
// except according to those terms.
use llvm::ValueRef;
-use rustc::ty::Ty;
+use rustc::ty::{self, Ty};
use rustc::ty::layout::Layout;
use rustc::mir;
+use rustc::mir::tcx::LvalueTy;
use rustc_data_structures::indexed_vec::Idx;
use base;
use type_::Type;
use std::fmt;
+use std::ptr;
use super::{MirContext, LocalRef};
-use super::lvalue::Alignment;
+use super::lvalue::{Alignment, LvalueRef};
/// The representation of a Rust value. The enum variant is in fact
/// uniquely determined by the value's type, but is kept as a
}
}
+ pub fn deref(self) -> LvalueRef<'tcx> {
+ let projected_ty = self.ty.builtin_deref(true, ty::NoPreference)
+ .unwrap().ty;
+ let (llptr, llextra) = match self.val {
+ OperandValue::Immediate(llptr) => (llptr, ptr::null_mut()),
+ OperandValue::Pair(llptr, llextra) => (llptr, llextra),
+ OperandValue::Ref(..) => bug!("Deref of by-Ref operand {:?}", self)
+ };
+ LvalueRef {
+ llval: llptr,
+ llextra: llextra,
+ ty: LvalueTy::from_ty(projected_ty),
+ alignment: Alignment::AbiAligned,
+ }
+ }
+
/// If this operand is a Pair, we return an
/// Immediate aggregate with the two values.
pub fn pack_if_pair(mut self, bcx: &Builder<'a, 'tcx>) -> OperandRef<'tcx> {
}
mir::Operand::Constant(ref constant) => {
- let val = self.trans_constant(bcx, constant);
+ let val = self.trans_constant(&bcx, constant);
let operand = val.to_operand(bcx.ccx);
if let OperandValue::Ref(ptr, align) = operand.val {
// If this is a OperandValue::Ref to an immediate constant, load it.
use rustc::ty::{self, Ty};
use rustc::ty::cast::{CastTy, IntTy};
use rustc::ty::layout::Layout;
-use rustc::ty::subst::{Kind, Subst};
use rustc::mir::tcx::LvalueTy;
use rustc::mir;
use middle::lang_items::ExchangeMallocFnLangItem;
use base;
use builder::Builder;
-use callee::Callee;
+use callee;
use common::{self, val_ty, C_bool, C_null, C_uint};
use common::{C_integral};
use adt;
use machine;
+use monomorphize;
use type_::Type;
use type_of;
use tvec;
let size = count.as_u64(bcx.tcx().sess.target.uint_type);
let size = C_uint(bcx.ccx, size);
let base = base::get_dataptr(&bcx, dest.llval);
- tvec::slice_for_each(&bcx, base, tr_elem.ty, size, |bcx, llslot| {
+ tvec::slice_for_each(&bcx, base, tr_elem.ty, size, |bcx, llslot, loop_bb| {
self.store_operand(bcx, llslot, dest.alignment.to_align(), tr_elem);
+ bcx.br(loop_bb);
})
}
match operand.ty.sty {
ty::TyFnDef(def_id, substs, _) => {
OperandValue::Immediate(
- Callee::def(bcx.ccx, def_id, substs)
- .reify(bcx.ccx))
+ callee::resolve_and_get_fn(bcx.ccx, def_id, substs))
}
_ => {
bug!("{} cannot be reified to a fn ptr", operand.ty)
mir::CastKind::ClosureFnPointer => {
match operand.ty.sty {
ty::TyClosure(def_id, substs) => {
- // Get the def_id for FnOnce::call_once
- let fn_once = bcx.tcx().lang_items.fn_once_trait().unwrap();
- let call_once = bcx.tcx()
- .global_tcx().associated_items(fn_once)
- .find(|it| it.kind == ty::AssociatedKind::Method)
- .unwrap().def_id;
- // Now create its substs [Closure, Tuple]
- let input = bcx.tcx().closure_type(def_id)
- .subst(bcx.tcx(), substs.substs).input(0);
- let substs = bcx.tcx().mk_substs([operand.ty, input.skip_binder()]
- .iter().cloned().map(Kind::from));
- OperandValue::Immediate(
- Callee::def(bcx.ccx, call_once, substs)
- .reify(bcx.ccx))
+ let instance = monomorphize::resolve_closure(
+ bcx.ccx.shared(), def_id, substs, ty::ClosureKind::FnOnce);
+ OperandValue::Immediate(callee::get_fn(bcx.ccx, instance))
}
_ => {
bug!("{} cannot be cast to a fn ptr", operand.ty)
bcx.sess().fatal(&format!("allocation of `{}` {}", box_ty, s));
}
};
- let r = Callee::def(bcx.ccx, def_id, bcx.tcx().intern_substs(&[]))
- .reify(bcx.ccx);
+ let instance = ty::Instance::mono(bcx.tcx(), def_id);
+ let r = callee::get_fn(bcx.ccx, instance);
let val = bcx.pointercast(bcx.call(r, &[llsize, llalign], None), llty_ptr);
let operand = OperandRef {
};
(bcx, operand)
}
-
mir::Rvalue::Use(ref operand) => {
let operand = self.trans_operand(&bcx, operand);
(bcx, operand)
mir::Rvalue::UnaryOp(..) |
mir::Rvalue::Discriminant(..) |
mir::Rvalue::Box(..) |
- mir::Rvalue::Use(..) =>
+ mir::Rvalue::Use(..) => // (*)
true,
mir::Rvalue::Repeat(..) |
mir::Rvalue::Aggregate(..) =>
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use abi::Abi;
use common::*;
+use glue;
+
use rustc::hir::def_id::DefId;
use rustc::infer::TransNormalize;
-use rustc::traits;
+use rustc::middle::lang_items::DropInPlaceFnLangItem;
+use rustc::traits::{self, SelectionContext, Reveal};
+use rustc::ty::adjustment::CustomCoerceUnsized;
use rustc::ty::fold::{TypeFolder, TypeFoldable};
-use rustc::ty::subst::{Subst, Substs};
+use rustc::ty::subst::{Kind, Subst, Substs};
use rustc::ty::{self, Ty, TyCtxt};
-use rustc::util::ppaux;
use rustc::util::common::MemoizationMap;
-use syntax::codemap::DUMMY_SP;
+use syntax::ast;
+use syntax::codemap::{Span, DUMMY_SP};
+
+pub use rustc::ty::Instance;
+
+fn fn_once_adapter_instance<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ closure_did: DefId,
+ substs: ty::ClosureSubsts<'tcx>,
+ ) -> Instance<'tcx> {
+ debug!("fn_once_adapter_shim({:?}, {:?})",
+ closure_did,
+ substs);
+ let fn_once = tcx.lang_items.fn_once_trait().unwrap();
+ let call_once = tcx.associated_items(fn_once)
+ .find(|it| it.kind == ty::AssociatedKind::Method)
+ .unwrap().def_id;
+ let def = ty::InstanceDef::ClosureOnceShim { call_once };
-use std::fmt;
+ let self_ty = tcx.mk_closure_from_closure_substs(
+ closure_did, substs);
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
-pub struct Instance<'tcx> {
- pub def: DefId,
- pub substs: &'tcx Substs<'tcx>,
+ let sig = tcx.closure_type(closure_did).subst(tcx, substs.substs);
+ let sig = tcx.erase_late_bound_regions_and_normalize(&sig);
+ assert_eq!(sig.inputs().len(), 1);
+ let substs = tcx.mk_substs([
+ Kind::from(self_ty),
+ Kind::from(sig.inputs()[0]),
+ ].iter().cloned());
+
+ debug!("fn_once_adapter_shim: self_ty={:?} sig={:?}", self_ty, sig);
+ Instance { def, substs }
}
-impl<'tcx> fmt::Display for Instance<'tcx> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- ppaux::parameterized(f, &self.substs, self.def, &[])
+fn needs_fn_once_adapter_shim(actual_closure_kind: ty::ClosureKind,
+ trait_closure_kind: ty::ClosureKind)
+ -> Result<bool, ()>
+{
+ match (actual_closure_kind, trait_closure_kind) {
+ (ty::ClosureKind::Fn, ty::ClosureKind::Fn) |
+ (ty::ClosureKind::FnMut, ty::ClosureKind::FnMut) |
+ (ty::ClosureKind::FnOnce, ty::ClosureKind::FnOnce) => {
+ // No adapter needed.
+ Ok(false)
+ }
+ (ty::ClosureKind::Fn, ty::ClosureKind::FnMut) => {
+ // The closure fn `llfn` is a `fn(&self, ...)`. We want a
+ // `fn(&mut self, ...)`. In fact, at trans time, these are
+ // basically the same thing, so we can just return llfn.
+ Ok(false)
+ }
+ (ty::ClosureKind::Fn, ty::ClosureKind::FnOnce) |
+ (ty::ClosureKind::FnMut, ty::ClosureKind::FnOnce) => {
+ // The closure fn `llfn` is a `fn(&self, ...)` or `fn(&mut
+ // self, ...)`. We want a `fn(self, ...)`. We can produce
+ // this by doing something like:
+ //
+ // fn call_once(self, ...) { call_mut(&self, ...) }
+ // fn call_once(mut self, ...) { call_mut(&mut self, ...) }
+ //
+ // These are both the same at trans time.
+ Ok(true)
+ }
+ _ => Err(()),
}
}
-impl<'a, 'tcx> Instance<'tcx> {
- pub fn new(def_id: DefId, substs: &'tcx Substs<'tcx>)
- -> Instance<'tcx> {
- assert!(substs.regions().all(|&r| r == ty::ReErased));
- Instance { def: def_id, substs: substs }
- }
+pub fn resolve_closure<'a, 'tcx> (
+ scx: &SharedCrateContext<'a, 'tcx>,
+ def_id: DefId,
+ substs: ty::ClosureSubsts<'tcx>,
+ requested_kind: ty::ClosureKind)
+ -> Instance<'tcx>
+{
+ let actual_kind = scx.tcx().closure_kind(def_id);
- pub fn mono(scx: &SharedCrateContext<'a, 'tcx>, def_id: DefId) -> Instance<'tcx> {
- Instance::new(def_id, scx.empty_substs_for_def_id(def_id))
+ match needs_fn_once_adapter_shim(actual_kind, requested_kind) {
+ Ok(true) => fn_once_adapter_instance(scx.tcx(), def_id, substs),
+ _ => Instance::new(def_id, substs.substs)
}
+}
- /// For associated constants from traits, return the impl definition.
- pub fn resolve_const(&self, scx: &SharedCrateContext<'a, 'tcx>) -> Self {
- if let Some(trait_id) = scx.tcx().trait_of_item(self.def) {
- let trait_ref = ty::TraitRef::new(trait_id, self.substs);
- let trait_ref = ty::Binder(trait_ref);
- let vtable = fulfill_obligation(scx, DUMMY_SP, trait_ref);
- if let traits::VtableImpl(vtable_impl) = vtable {
- let name = scx.tcx().item_name(self.def);
- let ac = scx.tcx().associated_items(vtable_impl.impl_def_id)
- .find(|item| item.kind == ty::AssociatedKind::Const && item.name == name);
- if let Some(ac) = ac {
- return Instance::new(ac.def_id, vtable_impl.substs);
+/// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
+/// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
+/// guarantee to us that all nested obligations *could be* resolved if we wanted to.
+fn fulfill_obligation<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
+ span: Span,
+ trait_ref: ty::PolyTraitRef<'tcx>)
+ -> traits::Vtable<'tcx, ()>
+{
+ let tcx = scx.tcx();
+
+ // Remove any references to regions; this helps improve caching.
+ let trait_ref = tcx.erase_regions(&trait_ref);
+
+ scx.trait_cache().memoize(trait_ref, || {
+ debug!("trans::fulfill_obligation(trait_ref={:?}, def_id={:?})",
+ trait_ref, trait_ref.def_id());
+
+ // Do the initial selection for the obligation. This yields the
+ // shallow result we are looking for -- that is, what specific impl.
+ tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
+ let mut selcx = SelectionContext::new(&infcx);
+
+ let obligation_cause = traits::ObligationCause::misc(span,
+ ast::DUMMY_NODE_ID);
+ let obligation = traits::Obligation::new(obligation_cause,
+ trait_ref.to_poly_trait_predicate());
+
+ let selection = match selcx.select(&obligation) {
+ Ok(Some(selection)) => selection,
+ Ok(None) => {
+ // Ambiguity can happen when monomorphizing during trans
+ // expands to some humongo type that never occurred
+ // statically -- this humongo type can then overflow,
+ // leading to an ambiguous result. So report this as an
+ // overflow bug, since I believe this is the only case
+ // where ambiguity can result.
+ debug!("Encountered ambiguity selecting `{:?}` during trans, \
+ presuming due to overflow",
+ trait_ref);
+ tcx.sess.span_fatal(span,
+ "reached the recursion limit during monomorphization \
+ (selection ambiguity)");
}
+ Err(e) => {
+ span_bug!(span, "Encountered error `{:?}` selecting `{:?}` during trans",
+ e, trait_ref)
+ }
+ };
+
+ debug!("fulfill_obligation: selection={:?}", selection);
+
+ // Currently, we use a fulfillment context to completely resolve
+ // all nested obligations. This is because they can inform the
+ // inference of the impl's type parameters.
+ let mut fulfill_cx = traits::FulfillmentContext::new();
+ let vtable = selection.map(|predicate| {
+ debug!("fulfill_obligation: register_predicate_obligation {:?}", predicate);
+ fulfill_cx.register_predicate_obligation(&infcx, predicate);
+ });
+ let vtable = infcx.drain_fulfillment_cx_or_panic(span, &mut fulfill_cx, &vtable);
+
+ info!("Cache miss: {:?} => {:?}", trait_ref, vtable);
+ vtable
+ })
+ })
+}
+
+fn resolve_associated_item<'a, 'tcx>(
+ scx: &SharedCrateContext<'a, 'tcx>,
+ trait_item: &ty::AssociatedItem,
+ trait_id: DefId,
+ rcvr_substs: &'tcx Substs<'tcx>
+) -> Instance<'tcx> {
+ let tcx = scx.tcx();
+ let def_id = trait_item.def_id;
+ debug!("resolve_associated_item(trait_item={:?}, \
+ trait_id={:?}, \
+ rcvr_substs={:?})",
+ def_id, trait_id, rcvr_substs);
+
+ let trait_ref = ty::TraitRef::from_method(tcx, trait_id, rcvr_substs);
+ let vtbl = fulfill_obligation(scx, DUMMY_SP, ty::Binder(trait_ref));
+
+ // Now that we know which impl is being used, we can dispatch to
+ // the actual function:
+ match vtbl {
+ traits::VtableImpl(impl_data) => {
+ let (def_id, substs) = traits::find_associated_item(
+ tcx, trait_item, rcvr_substs, &impl_data);
+ let substs = tcx.erase_regions(&substs);
+ ty::Instance::new(def_id, substs)
+ }
+ traits::VtableClosure(closure_data) => {
+ let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap();
+ resolve_closure(scx, closure_data.closure_def_id, closure_data.substs,
+ trait_closure_kind)
+ }
+ traits::VtableFnPointer(ref data) => {
+ Instance {
+ def: ty::InstanceDef::FnPtrShim(trait_item.def_id, data.fn_ty),
+ substs: rcvr_substs
}
}
+ traits::VtableObject(ref data) => {
+ let index = tcx.get_vtable_index_of_object_method(data, def_id);
+ Instance {
+ def: ty::InstanceDef::Virtual(def_id, index),
+ substs: rcvr_substs
+ }
+ }
+ _ => {
+ bug!("static call to invalid vtable: {:?}", vtbl)
+ }
+ }
+}
- *self
+/// The point where linking happens. Resolve a (def_id, substs)
+/// pair to an instance.
+pub fn resolve<'a, 'tcx>(
+ scx: &SharedCrateContext<'a, 'tcx>,
+ def_id: DefId,
+ substs: &'tcx Substs<'tcx>
+) -> Instance<'tcx> {
+ debug!("resolve(def_id={:?}, substs={:?})",
+ def_id, substs);
+ let result = if let Some(trait_def_id) = scx.tcx().trait_of_item(def_id) {
+ debug!(" => associated item, attempting to find impl");
+ let item = scx.tcx().associated_item(def_id);
+ resolve_associated_item(scx, &item, trait_def_id, substs)
+ } else {
+ let item_type = def_ty(scx, def_id, substs);
+ let def = match item_type.sty {
+ ty::TyFnDef(_, _, f) if
+ f.abi() == Abi::RustIntrinsic ||
+ f.abi() == Abi::PlatformIntrinsic =>
+ {
+ debug!(" => intrinsic");
+ ty::InstanceDef::Intrinsic(def_id)
+ }
+ _ => {
+ if Some(def_id) == scx.tcx().lang_items.drop_in_place_fn() {
+ let ty = substs.type_at(0);
+ if glue::needs_drop_glue(scx, ty) {
+ debug!(" => nontrivial drop glue");
+ ty::InstanceDef::DropGlue(def_id, Some(ty))
+ } else {
+ debug!(" => trivial drop glue");
+ ty::InstanceDef::DropGlue(def_id, None)
+ }
+ } else {
+ debug!(" => free item");
+ ty::InstanceDef::Item(def_id)
+ }
+ }
+ };
+ Instance { def, substs }
+ };
+ debug!("resolve(def_id={:?}, substs={:?}) = {}",
+ def_id, substs, result);
+ result
+}
+
+pub fn resolve_drop_in_place<'a, 'tcx>(
+ scx: &SharedCrateContext<'a, 'tcx>,
+ ty: Ty<'tcx>)
+ -> ty::Instance<'tcx>
+{
+ let def_id = scx.tcx().require_lang_item(DropInPlaceFnLangItem);
+ let substs = scx.tcx().intern_substs(&[Kind::from(ty)]);
+ resolve(scx, def_id, substs)
+}
+
+pub fn custom_coerce_unsize_info<'scx, 'tcx>(scx: &SharedCrateContext<'scx, 'tcx>,
+ source_ty: Ty<'tcx>,
+ target_ty: Ty<'tcx>)
+ -> CustomCoerceUnsized {
+ let trait_ref = ty::Binder(ty::TraitRef {
+ def_id: scx.tcx().lang_items.coerce_unsized_trait().unwrap(),
+ substs: scx.tcx().mk_substs_trait(source_ty, &[target_ty])
+ });
+
+ match fulfill_obligation(scx, DUMMY_SP, trait_ref) {
+ traits::VtableImpl(traits::VtableImplData { impl_def_id, .. }) => {
+ scx.tcx().custom_coerce_unsized_kind(impl_def_id)
+ }
+ vtable => {
+ bug!("invalid CoerceUnsized vtable: {:?}", vtable);
+ }
}
}
AssociatedTypeNormalizer::new(scx).fold(&substituted)
}
-
/// Returns the normalized type of a struct field
pub fn field_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
param_substs: &Substs<'tcx>,
use rustc::hir::def_id::DefId;
use rustc::hir::map::DefPathData;
use rustc::session::config::NUMBERED_CODEGEN_UNIT_MARKER;
-use rustc::ty::TyCtxt;
+use rustc::ty::{self, TyCtxt};
use rustc::ty::item_path::characteristic_def_id_of_type;
use rustc_incremental::IchHasher;
use std::cmp::Ordering;
symbol_name.hash(&mut state);
let exported = match item {
TransItem::Fn(ref instance) => {
- let node_id = scx.tcx().hir.as_local_node_id(instance.def);
+ let node_id =
+ scx.tcx().hir.as_local_node_id(instance.def_id());
node_id.map(|node_id| exported_symbols.contains(&node_id))
.unwrap_or(false)
}
TransItem::Static(node_id) => {
exported_symbols.contains(&node_id)
}
- TransItem::DropGlue(..) => false,
};
exported.hash(&mut state);
}
fn local_node_id(tcx: TyCtxt, trans_item: TransItem) -> Option<NodeId> {
match trans_item {
TransItem::Fn(instance) => {
- tcx.hir.as_local_node_id(instance.def)
+ tcx.hir.as_local_node_id(instance.def_id())
}
TransItem::Static(node_id) => Some(node_id),
- TransItem::DropGlue(_) => None,
}
}
}
match trans_item {
TransItem::Fn(..) |
TransItem::Static(..) => llvm::ExternalLinkage,
- TransItem::DropGlue(..) => unreachable!(),
}
}
};
let tcx = scx.tcx();
match trans_item {
TransItem::Fn(instance) => {
+ let def_id = match instance.def {
+ ty::InstanceDef::Item(def_id) => def_id,
+ ty::InstanceDef::FnPtrShim(..) |
+ ty::InstanceDef::ClosureOnceShim { .. } |
+ ty::InstanceDef::Intrinsic(..) |
+ ty::InstanceDef::DropGlue(..) |
+ ty::InstanceDef::Virtual(..) => return None
+ };
+
// If this is a method, we want to put it into the same module as
// its self-type. If the self-type does not provide a characteristic
// DefId, we use the location of the impl after all.
- if tcx.trait_of_item(instance.def).is_some() {
+ if tcx.trait_of_item(def_id).is_some() {
let self_ty = instance.substs.type_at(0);
// This is an implementation of a trait method.
- return characteristic_def_id_of_type(self_ty).or(Some(instance.def));
+ return characteristic_def_id_of_type(self_ty).or(Some(def_id));
}
- if let Some(impl_def_id) = tcx.impl_of_method(instance.def) {
+ if let Some(impl_def_id) = tcx.impl_of_method(def_id) {
// This is a method within an inherent impl, find out what the
// self-type is:
let impl_self_ty = common::def_ty(scx, impl_def_id, instance.substs);
}
}
- Some(instance.def)
+ Some(def_id)
}
- TransItem::DropGlue(dg) => characteristic_def_id_of_type(dg.ty()),
TransItem::Static(node_id) => Some(tcx.hir.local_def_id(node_id)),
}
}
trans_item: TransItem<'tcx>) -> Option<Span> {
match trans_item {
TransItem::Fn(Instance { def, .. }) => {
- tcx.hir.as_local_node_id(def)
+ tcx.hir.as_local_node_id(def.def_id())
}
TransItem::Static(node_id) => Some(node_id),
- TransItem::DropGlue(_) => None,
}.map(|node_id| {
tcx.hir.span(node_id)
})
//! item-path. This is used for unit testing the code that generates
//! paths etc in all kinds of annoying scenarios.
+use back::symbol_names;
use rustc::hir;
use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
use syntax::ast;
for attr in tcx.get_attrs(def_id).iter() {
if attr.check_name(SYMBOL_NAME) {
// for now, can only use on monomorphic names
- let instance = Instance::mono(self.scx, def_id);
- let name = instance.symbol_name(self.scx);
+ let instance = Instance::mono(tcx, def_id);
+ let name = symbol_names::symbol_name(instance, self.scx);
tcx.sess.span_err(attr.span, &format!("symbol-name({})", name));
} else if attr.check_name(ITEM_PATH) {
let path = tcx.item_path_str(def_id);
intravisit::walk_impl_item(self, ii)
}
}
-
use context::{CrateContext, SharedCrateContext};
use common;
use declare;
-use glue::DropGlueKind;
use llvm;
use monomorphize::Instance;
use rustc::dep_graph::DepNode;
use syntax::ast::{self, NodeId};
use syntax::attr;
use type_of;
-use glue;
-use abi::{Abi, FnType};
use back::symbol_names;
use std::fmt::Write;
use std::iter;
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
pub enum TransItem<'tcx> {
- DropGlue(DropGlueKind<'tcx>),
Fn(Instance<'tcx>),
Static(NodeId)
}
}
TransItem::Fn(instance) => {
let _task = ccx.tcx().dep_graph.in_task(
- DepNode::TransCrateItem(instance.def)); // (*)
+ DepNode::TransCrateItem(instance.def_id())); // (*)
base::trans_instance(&ccx, instance);
}
- TransItem::DropGlue(dg) => {
- glue::implement_drop_glue(&ccx, dg);
- }
}
debug!("END IMPLEMENTING '{} ({})' in cgu {}",
TransItem::Fn(instance) => {
TransItem::predefine_fn(ccx, instance, linkage, &symbol_name);
}
- TransItem::DropGlue(dg) => {
- TransItem::predefine_drop_glue(ccx, dg, linkage, &symbol_name);
- }
}
debug!("END PREDEFINING '{} ({})' in cgu {}",
linkage: llvm::Linkage,
symbol_name: &str) {
let def_id = ccx.tcx().hir.local_def_id(node_id);
- let ty = common::def_ty(ccx.shared(), def_id, Substs::empty());
+ let instance = Instance::mono(ccx.tcx(), def_id);
+ let ty = common::instance_ty(ccx.shared(), &instance);
let llty = type_of::type_of(ccx, ty);
let g = declare::define_global(ccx, symbol_name, llty).unwrap_or_else(|| {
unsafe { llvm::LLVMRustSetLinkage(g, linkage) };
- let instance = Instance::mono(ccx.shared(), def_id);
ccx.instances().borrow_mut().insert(instance, g);
ccx.statics().borrow_mut().insert(g, def_id);
}
assert!(!instance.substs.needs_infer() &&
!instance.substs.has_param_types());
- let mono_ty = common::def_ty(ccx.shared(), instance.def, instance.substs);
- let attrs = ccx.tcx().get_attrs(instance.def);
+ let mono_ty = common::instance_ty(ccx.shared(), &instance);
+ let attrs = instance.def.attrs(ccx.tcx());
let lldecl = declare::declare_fn(ccx, symbol_name, mono_ty);
unsafe { llvm::LLVMRustSetLinkage(lldecl, linkage) };
base::set_link_section(ccx, lldecl, &attrs);
llvm::SetUniqueComdat(ccx.llmod(), lldecl);
}
- if let ty::TyClosure(..) = mono_ty.sty {
- // set an inline hint for all closures
+ debug!("predefine_fn: mono_ty = {:?} instance = {:?}", mono_ty, instance);
+ if common::is_inline_instance(ccx.tcx(), &instance) {
attributes::inline(lldecl, attributes::InlineAttr::Hint);
}
-
attributes::from_fn_attrs(ccx, &attrs, lldecl);
ccx.instances().borrow_mut().insert(instance, lldecl);
}
- fn predefine_drop_glue(ccx: &CrateContext<'a, 'tcx>,
- dg: glue::DropGlueKind<'tcx>,
- linkage: llvm::Linkage,
- symbol_name: &str) {
- let tcx = ccx.tcx();
- assert_eq!(dg.ty(), glue::get_drop_glue_type(ccx.shared(), dg.ty()));
- let t = dg.ty();
-
- let sig = tcx.mk_fn_sig(
- iter::once(tcx.mk_mut_ptr(t)),
- tcx.mk_nil(),
- false,
- hir::Unsafety::Normal,
- Abi::Rust
- );
-
- debug!("predefine_drop_glue: sig={}", sig);
-
- let fn_ty = FnType::new(ccx, sig, &[]);
- let llfnty = fn_ty.llvm_type(ccx);
-
- assert!(declare::get_defined_value(ccx, symbol_name).is_none());
- let llfn = declare::declare_cfn(ccx, symbol_name, llfnty);
- unsafe { llvm::LLVMRustSetLinkage(llfn, linkage) };
- if linkage == llvm::Linkage::LinkOnceODRLinkage ||
- linkage == llvm::Linkage::WeakODRLinkage {
- llvm::SetUniqueComdat(ccx.llmod(), llfn);
- }
- attributes::set_frame_pointer_elimination(ccx, llfn);
- ccx.drop_glues().borrow_mut().insert(dg, (llfn, fn_ty));
- }
-
pub fn compute_symbol_name(&self,
scx: &SharedCrateContext<'a, 'tcx>) -> String {
match *self {
- TransItem::Fn(instance) => instance.symbol_name(scx),
+ TransItem::Fn(instance) => symbol_names::symbol_name(instance, scx),
TransItem::Static(node_id) => {
let def_id = scx.tcx().hir.local_def_id(node_id);
- Instance::mono(scx, def_id).symbol_name(scx)
- }
- TransItem::DropGlue(dg) => {
- let prefix = match dg {
- DropGlueKind::Ty(_) => "drop",
- DropGlueKind::TyContents(_) => "drop_contents",
- };
- symbol_names::exported_name_from_type_and_prefix(scx, dg.ty(), prefix)
+ symbol_names::symbol_name(Instance::mono(scx.tcx(), def_id), scx)
}
}
}
- pub fn is_from_extern_crate(&self) -> bool {
- match *self {
- TransItem::Fn(ref instance) => !instance.def.is_local(),
- TransItem::DropGlue(..) |
- TransItem::Static(..) => false,
- }
- }
-
pub fn instantiation_mode(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> InstantiationMode {
match *self {
TransItem::Fn(ref instance) => {
if self.explicit_linkage(tcx).is_none() &&
- (common::is_closure(tcx, instance.def) ||
- attr::requests_inline(&tcx.get_attrs(instance.def)[..])) {
+ common::requests_inline(tcx, instance)
+ {
InstantiationMode::LocalCopy
} else {
InstantiationMode::GloballyShared
}
}
- TransItem::DropGlue(..) => InstantiationMode::LocalCopy,
TransItem::Static(..) => InstantiationMode::GloballyShared,
}
}
TransItem::Fn(ref instance) => {
instance.substs.types().next().is_some()
}
- TransItem::DropGlue(..) |
TransItem::Static(..) => false,
}
}
pub fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<llvm::Linkage> {
let def_id = match *self {
- TransItem::Fn(ref instance) => instance.def,
+ TransItem::Fn(ref instance) => instance.def_id(),
TransItem::Static(node_id) => tcx.hir.local_def_id(node_id),
- TransItem::DropGlue(..) => return None,
};
let attributes = tcx.get_attrs(def_id);
let hir_map = &tcx.hir;
return match *self {
- TransItem::DropGlue(dg) => {
- let mut s = String::with_capacity(32);
- match dg {
- DropGlueKind::Ty(_) => s.push_str("drop-glue "),
- DropGlueKind::TyContents(_) => s.push_str("drop-glue-contents "),
- };
- let printer = DefPathBasedNames::new(tcx, false, false);
- printer.push_type_name(dg.ty(), &mut s);
- s
- }
TransItem::Fn(instance) => {
to_string_internal(tcx, "fn ", instance)
},
pub fn to_raw_string(&self) -> String {
match *self {
- TransItem::DropGlue(dg) => {
- let prefix = match dg {
- DropGlueKind::Ty(_) => "Ty",
- DropGlueKind::TyContents(_) => "TyContents",
- };
- format!("DropGlue({}: {})", prefix, dg.ty() as *const _ as usize)
- }
TransItem::Fn(instance) => {
format!("Fn({:?}, {})",
instance.def,
pub fn push_instance_as_string(&self,
instance: Instance<'tcx>,
output: &mut String) {
- self.push_def_path(instance.def, output);
+ self.push_def_path(instance.def_id(), output);
self.push_type_params(instance.substs, iter::empty(), output);
}
}
use llvm;
use builder::Builder;
-use llvm::ValueRef;
+use llvm::{BasicBlockRef, ValueRef};
use common::*;
use rustc::ty::Ty;
unit_ty: Ty<'tcx>,
len: ValueRef,
f: F
-) -> Builder<'a, 'tcx> where F: FnOnce(&Builder<'a, 'tcx>, ValueRef) {
+) -> Builder<'a, 'tcx> where F: FnOnce(&Builder<'a, 'tcx>, ValueRef, BasicBlockRef) {
// Special-case vectors with elements of size 0 so they don't go out of bounds (#9890)
let zst = type_is_zero_size(bcx.ccx, unit_ty);
let add = |bcx: &Builder, a, b| if zst {
let keep_going = header_bcx.icmp(llvm::IntNE, current, end);
header_bcx.cond_br(keep_going, body_bcx.llbb(), next_bcx.llbb());
- f(&body_bcx, if zst { data_ptr } else { current });
let next = add(&body_bcx, current, C_uint(bcx.ccx, 1usize));
+ f(&body_bcx, if zst { data_ptr } else { current }, header_bcx.llbb());
header_bcx.add_incoming_to_phi(current, next, body_bcx.llbb());
- body_bcx.br(header_bcx.llbb());
next_bcx
}
// Call the generic checker.
let expected_arg_tys =
- self.expected_types_for_fn_args(call_expr.span,
+ self.expected_inputs_for_expected_output(call_expr.span,
expected,
fn_sig.output(),
fn_sig.inputs());
// do know the types expected for each argument and the return
// type.
- let expected_arg_tys = self.expected_types_for_fn_args(call_expr.span,
+ let expected_arg_tys = self.expected_inputs_for_expected_output(call_expr.span,
expected,
fn_sig.output().clone(),
fn_sig.inputs());
"unchecked_div" | "unchecked_rem" =>
(1, vec![param(0), param(0)], param(0)),
+ "unchecked_shl" | "unchecked_shr" =>
+ (1, vec![param(0), param(0)], param(0)),
"overflowing_add" | "overflowing_sub" | "overflowing_mul" =>
(1, vec![param(0), param(0)], param(0)),
match method_fn_ty.sty {
ty::TyFnDef(def_id, .., ref fty) => {
// HACK(eddyb) ignore self in the definition (see above).
- let expected_arg_tys = self.expected_types_for_fn_args(
+ let expected_arg_tys = self.expected_inputs_for_expected_output(
sp,
expected,
fty.0.output(),
TypeAndSubsts { substs: substs, ty: substd_ty }
}
- /// Unifies the return type with the expected type early, for more coercions
- /// and forward type information on the argument expressions.
- fn expected_types_for_fn_args(&self,
- call_span: Span,
- expected_ret: Expectation<'tcx>,
- formal_ret: Ty<'tcx>,
- formal_args: &[Ty<'tcx>])
- -> Vec<Ty<'tcx>> {
+ /// Unifies the output type with the expected type early, for more coercions
+ /// and forward type information on the input expressions.
+ fn expected_inputs_for_expected_output(&self,
+ call_span: Span,
+ expected_ret: Expectation<'tcx>,
+ formal_ret: Ty<'tcx>,
+ formal_args: &[Ty<'tcx>])
+ -> Vec<Ty<'tcx>> {
let expected_args = expected_ret.only_has_type(self).and_then(|ret_ty| {
self.fudge_regions_if_ok(&RegionVariableOrigin::Coercion(call_span), || {
// Attempt to apply a subtyping relationship between the formal
}).collect())
}).ok()
}).unwrap_or(vec![]);
- debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})",
+ debug!("expected_inputs_for_expected_output(formal={:?} -> {:?}, expected={:?} -> {:?})",
formal_args, formal_ret,
expected_args, expected_ret);
expected_args
fn check_expr_struct_fields(&self,
adt_ty: Ty<'tcx>,
+ expected: Expectation<'tcx>,
expr_id: ast::NodeId,
span: Span,
variant: &'tcx ty::VariantDef,
ast_fields: &'gcx [hir::Field],
check_completeness: bool) {
let tcx = self.tcx;
- let (substs, adt_kind, kind_name) = match adt_ty.sty {
- ty::TyAdt(adt, substs) => (substs, adt.adt_kind(), adt.variant_descr()),
+
+ let adt_ty_hint =
+ self.expected_inputs_for_expected_output(span, expected, adt_ty, &[adt_ty])
+ .get(0).cloned().unwrap_or(adt_ty);
+
+ let (substs, hint_substs, adt_kind, kind_name) = match (&adt_ty.sty, &adt_ty_hint.sty) {
+ (&ty::TyAdt(adt, substs), &ty::TyAdt(_, hint_substs)) => {
+ (substs, hint_substs, adt.adt_kind(), adt.variant_descr())
+ }
_ => span_bug!(span, "non-ADT passed to check_expr_struct_fields")
};
// Typecheck each field.
for field in ast_fields {
- let expected_field_type;
+ let final_field_type;
+ let field_type_hint;
if let Some(v_field) = remaining_fields.remove(&field.name.node) {
- expected_field_type = self.field_ty(field.span, v_field, substs);
+ final_field_type = self.field_ty(field.span, v_field, substs);
+ field_type_hint = self.field_ty(field.span, v_field, hint_substs);
seen_fields.insert(field.name.node, field.span);
}
} else {
error_happened = true;
- expected_field_type = tcx.types.err;
+ final_field_type = tcx.types.err;
+ field_type_hint = tcx.types.err;
if let Some(_) = variant.find_field_named(field.name.node) {
let mut err = struct_span_err!(self.tcx.sess,
field.name.span,
// Make sure to give a type to the field even if there's
// an error, so we can continue typechecking
- self.check_expr_coercable_to_type(&field.expr, expected_field_type);
+ let ty = self.check_expr_with_hint(&field.expr, field_type_hint);
+ self.demand_coerce(&field.expr, ty, final_field_type);
}
// Make sure the programmer specified correct number of fields.
fn check_expr_struct(&self,
expr: &hir::Expr,
+ expected: Expectation<'tcx>,
qpath: &hir::QPath,
fields: &'gcx [hir::Field],
base_expr: &'gcx Option<P<hir::Expr>>) -> Ty<'tcx>
hir::QPath::TypeRelative(ref qself, _) => qself.span
};
- self.check_expr_struct_fields(struct_ty, expr.id, path_span, variant, fields,
+ self.check_expr_struct_fields(struct_ty, expected, expr.id, path_span, variant, fields,
base_expr.is_none());
if let &Some(ref base_expr) = base_expr {
self.check_expr_has_type(base_expr, struct_ty);
}
}
hir::ExprStruct(ref qpath, ref fields, ref base_expr) => {
- self.check_expr_struct(expr, qpath, fields, base_expr)
+ self.check_expr_struct(expr, expected, qpath, fields, base_expr)
}
hir::ExprField(ref base, ref field) => {
self.check_field(expr, lvalue_pref, &base, field)
use rustc::hir;
+use std::{mem, slice, vec};
use std::path::PathBuf;
use std::rc::Rc;
-use std::slice;
use std::sync::Arc;
use std::u32;
-use std::mem;
use core::DocContext;
use doctree;
pub struct ListAttributesIter<'a> {
attrs: slice::Iter<'a, ast::Attribute>,
- current_list: slice::Iter<'a, ast::NestedMetaItem>,
+ current_list: vec::IntoIter<ast::NestedMetaItem>,
name: &'a str
}
impl<'a> Iterator for ListAttributesIter<'a> {
- type Item = &'a ast::NestedMetaItem;
+ type Item = ast::NestedMetaItem;
fn next(&mut self) -> Option<Self::Item> {
if let Some(nested) = self.current_list.next() {
}
for attr in &mut self.attrs {
- if let Some(ref list) = attr.meta_item_list() {
+ if let Some(list) = attr.meta_item_list() {
if attr.check_name(self.name) {
- self.current_list = list.iter();
+ self.current_list = list.into_iter();
if let Some(nested) = self.current_list.next() {
return Some(nested);
}
fn lists<'a>(&'a self, name: &'a str) -> ListAttributesIter<'a> {
ListAttributesIter {
attrs: self.iter(),
- current_list: [].iter(),
+ current_list: Vec::new().into_iter(),
name: name
}
}
fn has_word(self, &str) -> bool;
}
-impl<'a, I: IntoIterator<Item=&'a ast::NestedMetaItem>> NestedAttributesExt for I {
+impl<I: IntoIterator<Item=ast::NestedMetaItem>> NestedAttributesExt for I {
fn has_word(self, word: &str) -> bool {
self.into_iter().any(|attr| attr.is_word() && attr.check_name(word))
}
decl: decl,
abi: sig.abi(),
- // trait methods canot (currently, at least) be const
+ // trait methods cannot (currently, at least) be const
constness: hir::Constness::NotConst,
})
} else {
AngleBracketed {
lifetimes: Vec<Lifetime>,
types: Vec<Type>,
- bindings: Vec<TypeBinding>
+ bindings: Vec<TypeBinding>,
},
Parenthesized {
inputs: Vec<Type>,
- output: Option<Type>
+ output: Option<Type>,
}
}
data.lifetimes.clean(cx)
},
types: data.types.clean(cx),
- bindings: data.bindings.clean(cx)
+ bindings: data.bindings.clean(cx),
}
}
hir::ParenthesizedParameters(ref data) => {
PathParameters::Parenthesized {
inputs: data.inputs.clean(cx),
- output: data.output.clean(cx)
+ output: data.output.clean(cx),
}
}
}
// #[doc(no_inline)] attribute is present.
// Don't inline doc(hidden) imports so they can be stripped at a later stage.
let denied = self.vis != hir::Public || self.attrs.iter().any(|a| {
- a.name() == "doc" && match a.meta_item_list() {
- Some(l) => attr::list_contains_name(l, "no_inline") ||
- attr::list_contains_name(l, "hidden"),
+ a.name().unwrap() == "doc" && match a.meta_item_list() {
+ Some(l) => attr::list_contains_name(&l, "no_inline") ||
+ attr::list_contains_name(&l, "hidden"),
None => false,
}
});
if is_not_debug {
write!(w, "{:#}{:#}", HRef::new(did, &last.name), last.params)?;
} else {
- write!(w, "{:?}{:?}", HRef::new(did, &last.name), last.params)?;
+ write!(w, "{:?}{}", HRef::new(did, &last.name), last.params)?;
}
} else {
if is_not_debug {
} else {
format!("{:?}", HRef::new(did, &last.name))
};
- write!(w, "{}{:?}", path, last.params)?;
+ write!(w, "{}{}", path, last.params)?;
}
}
Ok(())
use rustc::middle::stability;
use rustc::hir;
use rustc::util::nodemap::{FxHashMap, FxHashSet};
+use rustc::session::config::nightly_options::is_nightly_build;
use rustc_data_structures::flock;
use clean::{self, AttributesExt, GetDefId, SelfTy, Mutability};
}
};
// FIXME(#24111): remove when `const_fn` is stabilized
- let vis_constness = match UnstableFeatures::from_environment() {
- UnstableFeatures::Allow => constness,
- _ => hir::Constness::NotConst
+ let vis_constness = if is_nightly_build() {
+ constness
+ } else {
+ hir::Constness::NotConst
};
let prefix = format!("{}{}{:#}fn {}{:#}",
ConstnessSpace(vis_constness),
let mut attrs = String::new();
for attr in &it.attrs.other_attrs {
- let name = attr.name();
+ let name = attr.name().unwrap();
if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) {
continue;
}
- if let Some(s) = render_attribute(attr.meta()) {
+ if let Some(s) = render_attribute(&attr.meta().unwrap()) {
attrs.push_str(&format!("#[{}]\n", s));
}
}
attrs: Vec::new(),
};
- let attrs = krate.attrs.iter()
- .filter(|a| a.check_name("doc"))
- .filter_map(|a| a.meta_item_list())
- .flat_map(|l| l)
- .filter(|a| a.check_name("test"))
- .filter_map(|a| a.meta_item_list())
- .flat_map(|l| l);
+ let test_attrs: Vec<_> = krate.attrs.iter()
+ .filter(|a| a.check_name("doc"))
+ .flat_map(|a| a.meta_item_list().unwrap_or_else(Vec::new))
+ .filter(|a| a.check_name("test"))
+ .collect();
+ let attrs = test_attrs.iter().flat_map(|a| a.meta_item_list().unwrap_or(&[]));
+
for attr in attrs {
if attr.check_name("no_crate_inject") {
opts.no_crate_inject = true;
if item.vis == hir::Public && self.inside_public_path {
let please_inline = item.attrs.iter().any(|item| {
match item.meta_item_list() {
- Some(list) if item.check_name("doc") => {
+ Some(ref list) if item.check_name("doc") => {
list.iter().any(|i| i.check_name("inline"))
}
_ => false,
/// resistance against HashDoS attacks. The algorithm is randomly seeded, and a
/// reasonable best-effort is made to generate this seed from a high quality,
/// secure source of randomness provided by the host without blocking the
-/// program. Because of this, the randomness of the seed is dependant on the
-/// quality of the system's random number generator at the time it is created.
+/// program. Because of this, the randomness of the seed depends on the output
+/// quality of the system's random number generator when the seed is created.
/// In particular, seeds generated when the system's entropy pool is abnormally
/// low such as during system boot may be of a lower quality.
///
fn next(&mut self) -> Option<(SafeHash, K, V)> {
self.iter.next().map(|bucket| {
unsafe {
- (**self.table).size -= 1;
+ (*self.table.as_mut_ptr()).size -= 1;
let (k, v) = ptr::read(bucket.pair);
(SafeHash { hash: ptr::replace(bucket.hash, EMPTY_BUCKET) }, k, v)
}
}
}
+#[stable(feature = "never_error", since = "1.18.0")]
+impl Error for ! {
+ fn description(&self) -> &str { *self }
+}
+
#[stable(feature = "rust1", since = "1.0.0")]
impl Error for str::ParseBoolError {
fn description(&self) -> &str { "failed to parse bool" }
/// error conditions for when a directory is being created (after it is
/// determined to not exist) are outlined by `fs::create_dir`.
///
+/// Notable exception is made for situations where any of the directories
+/// specified in the `path` could not be created as it was created concurrently.
+/// Such cases are considered success. In other words: calling `create_dir_all`
+/// concurrently from multiple threads or processes is guaranteed to not fail
+/// due to race itself.
+///
/// # Examples
///
/// ```
}
fn create_dir_all(&self, path: &Path) -> io::Result<()> {
- if path == Path::new("") || path.is_dir() { return Ok(()) }
- if let Some(p) = path.parent() {
- self.create_dir_all(p)?
+ if path == Path::new("") {
+ return Ok(())
+ }
+
+ match self.inner.mkdir(path) {
+ Ok(()) => return Ok(()),
+ Err(ref e) if e.kind() == io::ErrorKind::NotFound => {}
+ Err(_) if path.is_dir() => return Ok(()),
+ Err(e) => return Err(e),
+ }
+ match path.parent() {
+ Some(p) => try!(self.create_dir_all(p)),
+ None => return Err(io::Error::new(io::ErrorKind::Other, "failed to create whole tree")),
+ }
+ match self.inner.mkdir(path) {
+ Ok(()) => Ok(()),
+ Err(_) if path.is_dir() => Ok(()),
+ Err(e) => Err(e),
}
- self.inner.mkdir(path)
}
}
use rand::{StdRng, Rng};
use str;
use sys_common::io::test::{TempDir, tmpdir};
+ use thread;
#[cfg(windows)]
use os::windows::fs::{symlink_dir, symlink_file};
assert!(result.is_err());
}
+ #[test]
+ fn concurrent_recursive_mkdir() {
+ for _ in 0..100 {
+ let dir = tmpdir();
+ let mut dir = dir.join("a");
+ for _ in 0..40 {
+ dir = dir.join("a");
+ }
+ let mut join = vec!();
+ for _ in 0..8 {
+ let dir = dir.clone();
+ join.push(thread::spawn(move || {
+ check!(fs::create_dir_all(&dir));
+ }))
+ }
+
+ // No `Display` on result of `join()`
+ join.drain(..).map(|join| join.join().unwrap()).count();
+ }
+ }
+
#[test]
fn recursive_mkdir_slash() {
check!(fs::create_dir_all(&Path::new("/")));
}
+ #[test]
+ fn recursive_mkdir_dot() {
+ check!(fs::create_dir_all(&Path::new(".")));
+ }
+
+ #[test]
+ fn recursive_mkdir_empty() {
+ check!(fs::create_dir_all(&Path::new("")));
+ }
+
#[test]
fn recursive_rmdir() {
let tmpdir = tmpdir();
#![feature(char_escape_debug)]
#![feature(char_internals)]
#![feature(collections)]
-#![feature(collections_bound)]
#![feature(collections_range)]
#![feature(compiler_builtins_lib)]
#![feature(const_fn)]
#![feature(linkage)]
#![feature(macro_reexport)]
#![feature(needs_panic_runtime)]
+#![feature(never_type)]
#![feature(num_bits_bytes)]
#![feature(old_wrapping)]
#![feature(on_unimplemented)]
#[stable(feature = "ip_u32", since = "1.1.0")]
impl From<Ipv4Addr> for u32 {
+ /// It performs the conversion in network order (big-endian).
fn from(ip: Ipv4Addr) -> u32 {
let ip = ip.octets();
((ip[0] as u32) << 24) + ((ip[1] as u32) << 16) + ((ip[2] as u32) << 8) + (ip[3] as u32)
#[stable(feature = "ip_u32", since = "1.1.0")]
impl From<u32> for Ipv4Addr {
+ /// It performs the conversion in network order (big-endian).
fn from(ip: u32) -> Ipv4Addr {
Ipv4Addr::new((ip >> 24) as u8, (ip >> 16) as u8, (ip >> 8) as u8, ip as u8)
}
/// assert_eq!(path.to_string_lossy(), "foo.txt");
/// ```
///
- /// Had `os_str` contained invalid unicode, the `to_string_lossy` call might
+ /// Had `path` contained invalid unicode, the `to_string_lossy` call might
/// have returned `"fo�.txt"`.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_string_lossy(&self) -> Cow<str> {
/// will be run. If a clean shutdown is needed it is recommended to only call
/// this function at a known point where there are no more destructors left
/// to run.
-#[unstable(feature = "process_abort", issue = "37838")]
+#[stable(feature = "process_abort", since = "1.17.0")]
pub fn abort() -> ! {
unsafe { ::sys::abort_internal() };
}
/// An RAII implementation of a "scoped lock" of a mutex. When this structure is
/// dropped (falls out of scope), the lock will be unlocked.
///
-/// The data protected by the mutex can be access through this guard via its
+/// The data protected by the mutex can be accessed through this guard via its
/// [`Deref`] and [`DerefMut`] implementations.
///
/// This structure is created by the [`lock`] and [`try_lock`] methods on
pub segments: Vec<PathSegment>,
}
+impl<'a> PartialEq<&'a str> for Path {
+ fn eq(&self, string: &&'a str) -> bool {
+ self.segments.len() == 1 && self.segments[0].identifier.name == *string
+ }
+}
+
impl fmt::Debug for Path {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "path({})", pprust::path_to_string(self))
pub struct Attribute {
pub id: AttrId,
pub style: AttrStyle,
- pub value: MetaItem,
+ pub path: Path,
+ pub tokens: TokenStream,
pub is_sugared_doc: bool,
pub span: Span,
}
pub use self::IntType::*;
use ast;
-use ast::{AttrId, Attribute, Name};
+use ast::{AttrId, Attribute, Name, Ident};
use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind};
-use ast::{Lit, Expr, Item, Local, Stmt, StmtKind};
+use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind};
use codemap::{Spanned, spanned, dummy_spanned, mk_sp};
use syntax_pos::{Span, BytePos, DUMMY_SP};
use errors::Handler;
use feature_gate::{Features, GatedCfg};
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
-use parse::ParseSess;
+use parse::parser::Parser;
+use parse::{self, ParseSess, PResult};
+use parse::token::{self, Token};
use ptr::P;
use symbol::Symbol;
+use tokenstream::{TokenStream, TokenTree, Delimited};
use util::ThinVec;
use std::cell::{RefCell, Cell};
+use std::iter;
thread_local! {
static USED_ATTRS: RefCell<Vec<u64>> = RefCell::new(Vec::new());
impl Attribute {
pub fn check_name(&self, name: &str) -> bool {
- let matches = self.name() == name;
+ let matches = self.path == name;
if matches {
mark_used(self);
}
matches
}
- pub fn name(&self) -> Name { self.meta().name() }
+ pub fn name(&self) -> Option<Name> {
+ match self.path.segments.len() {
+ 1 => Some(self.path.segments[0].identifier.name),
+ _ => None,
+ }
+ }
pub fn value_str(&self) -> Option<Symbol> {
- self.meta().value_str()
+ self.meta().and_then(|meta| meta.value_str())
}
- pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> {
- self.meta().meta_item_list()
+ pub fn meta_item_list(&self) -> Option<Vec<NestedMetaItem>> {
+ match self.meta() {
+ Some(MetaItem { node: MetaItemKind::List(list), .. }) => Some(list),
+ _ => None
+ }
}
- pub fn is_word(&self) -> bool { self.meta().is_word() }
+ pub fn is_word(&self) -> bool {
+ self.path.segments.len() == 1 && self.tokens.is_empty()
+ }
- pub fn span(&self) -> Span { self.meta().span }
+ pub fn span(&self) -> Span {
+ self.span
+ }
pub fn is_meta_item_list(&self) -> bool {
self.meta_item_list().is_some()
match self.node {
MetaItemKind::NameValue(ref v) => {
match v.node {
- ast::LitKind::Str(ref s, _) => Some((*s).clone()),
+ LitKind::Str(ref s, _) => Some((*s).clone()),
_ => None,
}
},
impl Attribute {
/// Extract the MetaItem from inside this Attribute.
- pub fn meta(&self) -> &MetaItem {
- &self.value
+ pub fn meta(&self) -> Option<MetaItem> {
+ let mut tokens = self.tokens.trees().peekable();
+ Some(MetaItem {
+ name: match self.path.segments.len() {
+ 1 => self.path.segments[0].identifier.name,
+ _ => return None,
+ },
+ node: if let Some(node) = MetaItemKind::from_tokens(&mut tokens) {
+ if tokens.peek().is_some() {
+ return None;
+ }
+ node
+ } else {
+ return None;
+ },
+ span: self.span,
+ })
+ }
+
+ pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T>
+ where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+ {
+ let mut parser = Parser::new(sess, self.tokens.clone(), None, false);
+ let result = f(&mut parser)?;
+ if parser.token != token::Eof {
+ parser.unexpected()?;
+ }
+ Ok(result)
+ }
+
+ pub fn parse_list<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, Vec<T>>
+ where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
+ {
+ if self.tokens.is_empty() {
+ return Ok(Vec::new());
+ }
+ self.parse(sess, |parser| {
+ parser.expect(&token::OpenDelim(token::Paren))?;
+ let mut list = Vec::new();
+ while !parser.eat(&token::CloseDelim(token::Paren)) {
+ list.push(f(parser)?);
+ if !parser.eat(&token::Comma) {
+ parser.expect(&token::CloseDelim(token::Paren))?;
+ break
+ }
+ }
+ Ok(list)
+ })
+ }
+
+ pub fn parse_meta<'a>(&self, sess: &'a ParseSess) -> PResult<'a, MetaItem> {
+ if self.path.segments.len() > 1 {
+ sess.span_diagnostic.span_err(self.path.span, "expected ident, found path");
+ }
+
+ Ok(MetaItem {
+ name: self.path.segments.last().unwrap().identifier.name,
+ node: self.parse(sess, |parser| parser.parse_meta_item_kind())?,
+ span: self.span,
+ })
}
/// Convert self to a normal #[doc="foo"] comment, if it is a
/* Constructors */
pub fn mk_name_value_item_str(name: Name, value: Symbol) -> MetaItem {
- let value_lit = dummy_spanned(ast::LitKind::Str(value, ast::StrStyle::Cooked));
+ let value_lit = dummy_spanned(LitKind::Str(value, ast::StrStyle::Cooked));
mk_spanned_name_value_item(DUMMY_SP, name, value_lit)
}
Attribute {
id: id,
style: ast::AttrStyle::Inner,
- value: item,
+ path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)),
+ tokens: item.node.tokens(item.span),
is_sugared_doc: false,
span: sp,
}
Attribute {
id: id,
style: ast::AttrStyle::Outer,
- value: item,
+ path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)),
+ tokens: item.node.tokens(item.span),
is_sugared_doc: false,
span: sp,
}
pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos)
-> Attribute {
let style = doc_comment_style(&text.as_str());
- let lit = spanned(lo, hi, ast::LitKind::Str(text, ast::StrStyle::Cooked));
+ let lit = spanned(lo, hi, LitKind::Str(text, ast::StrStyle::Cooked));
Attribute {
id: id,
style: style,
- value: MetaItem {
- span: mk_sp(lo, hi),
- name: Symbol::intern("doc"),
- node: MetaItemKind::NameValue(lit),
- },
+ path: ast::Path::from_ident(mk_sp(lo, hi), ast::Ident::from_str("doc")),
+ tokens: MetaItemKind::NameValue(lit).tokens(mk_sp(lo, hi)),
is_sugared_doc: true,
span: mk_sp(lo, hi),
}
}
pub fn list_contains_name(items: &[NestedMetaItem], name: &str) -> bool {
- debug!("attr::list_contains_name (name={})", name);
items.iter().any(|item| {
- debug!(" testing: {:?}", item.name());
item.check_name(name)
})
}
pub fn contains_name(attrs: &[Attribute], name: &str) -> bool {
- debug!("attr::contains_name (name={})", name);
attrs.iter().any(|item| {
- debug!(" testing: {}", item.name());
item.check_name(name)
})
}
/// Determine what `#[inline]` attribute is present in `attrs`, if any.
pub fn find_inline_attr(diagnostic: Option<&Handler>, attrs: &[Attribute]) -> InlineAttr {
attrs.iter().fold(InlineAttr::None, |ia, attr| {
- match attr.value.node {
- _ if attr.value.name != "inline" => ia,
+ if attr.path != "inline" {
+ return ia;
+ }
+ let meta = match attr.meta() {
+ Some(meta) => meta.node,
+ None => return ia,
+ };
+ match meta {
MetaItemKind::Word => {
mark_used(attr);
InlineAttr::Hint
let mut rustc_depr: Option<RustcDeprecation> = None;
'outer: for attr in attrs_iter {
- let tag = attr.name();
- if tag != "rustc_deprecated" && tag != "unstable" && tag != "stable" {
+ if attr.path != "rustc_deprecated" && attr.path != "unstable" && attr.path != "stable" {
continue // not a stability level
}
mark_used(attr);
- if let Some(metas) = attr.meta_item_list() {
+ let meta = attr.meta();
+ if let Some(MetaItem { node: MetaItemKind::List(ref metas), .. }) = meta {
+ let meta = meta.as_ref().unwrap();
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
if item.is_some() {
handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
}
};
- match &*tag.as_str() {
+ match &*meta.name.as_str() {
"rustc_deprecated" => {
if rustc_depr.is_some() {
span_err!(diagnostic, item_sp, E0540,
let mut depr: Option<Deprecation> = None;
'outer: for attr in attrs_iter {
- if attr.name() != "deprecated" {
+ if attr.path != "deprecated" {
continue
}
/// structure layout, and `packed` to remove padding.
pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec<ReprAttr> {
let mut acc = Vec::new();
- match attr.value.node {
- ast::MetaItemKind::List(ref items) if attr.value.name == "repr" => {
+ if attr.path == "repr" {
+ if let Some(items) = attr.meta_item_list() {
mark_used(attr);
for item in items {
if !item.is_meta_item() {
}
}
}
- // Not a "repr" hint: ignore.
- _ => { }
}
acc
}
}
}
+impl MetaItem {
+ fn tokens(&self) -> TokenStream {
+ let ident = TokenTree::Token(self.span, Token::Ident(Ident::with_empty_ctxt(self.name)));
+ TokenStream::concat(vec![ident.into(), self.node.tokens(self.span)])
+ }
+
+ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
+ where I: Iterator<Item = TokenTree>,
+ {
+ let (mut span, name) = match tokens.next() {
+ Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
+ Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => return match **nt {
+ token::Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
+ _ => None,
+ },
+ _ => return None,
+ };
+ let node = match MetaItemKind::from_tokens(tokens) {
+ Some(node) => node,
+ _ => return None,
+ };
+ if let Some(last_span) = node.last_span() {
+ span.hi = last_span.hi;
+ }
+ Some(MetaItem { name: name, span: span, node: node })
+ }
+}
+
+impl MetaItemKind {
+ fn last_span(&self) -> Option<Span> {
+ match *self {
+ MetaItemKind::Word => None,
+ MetaItemKind::List(ref list) => list.last().map(NestedMetaItem::span),
+ MetaItemKind::NameValue(ref lit) => Some(lit.span),
+ }
+ }
+
+ pub fn tokens(&self, span: Span) -> TokenStream {
+ match *self {
+ MetaItemKind::Word => TokenStream::empty(),
+ MetaItemKind::NameValue(ref lit) => {
+ TokenStream::concat(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
+ }
+ MetaItemKind::List(ref list) => {
+ let mut tokens = Vec::new();
+ for (i, item) in list.iter().enumerate() {
+ if i > 0 {
+ tokens.push(TokenTree::Token(span, Token::Comma).into());
+ }
+ tokens.push(item.node.tokens());
+ }
+ TokenTree::Delimited(span, Delimited {
+ delim: token::Paren,
+ tts: TokenStream::concat(tokens).into(),
+ }).into()
+ }
+ }
+ }
+
+ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind>
+ where I: Iterator<Item = TokenTree>,
+ {
+ let delimited = match tokens.peek().cloned() {
+ Some(TokenTree::Token(_, token::Eq)) => {
+ tokens.next();
+ return if let Some(TokenTree::Token(span, token)) = tokens.next() {
+ LitKind::from_token(token)
+ .map(|lit| MetaItemKind::NameValue(Spanned { node: lit, span: span }))
+ } else {
+ None
+ };
+ }
+ Some(TokenTree::Delimited(_, ref delimited)) if delimited.delim == token::Paren => {
+ tokens.next();
+ delimited.stream()
+ }
+ _ => return Some(MetaItemKind::Word),
+ };
+
+ let mut tokens = delimited.into_trees().peekable();
+ let mut result = Vec::new();
+ while let Some(..) = tokens.peek() {
+ match NestedMetaItemKind::from_tokens(&mut tokens) {
+ Some(item) => result.push(Spanned { span: item.span(), node: item }),
+ None => return None,
+ }
+ match tokens.next() {
+ None | Some(TokenTree::Token(_, Token::Comma)) => {}
+ _ => return None,
+ }
+ }
+ Some(MetaItemKind::List(result))
+ }
+}
+
+impl NestedMetaItemKind {
+ fn span(&self) -> Span {
+ match *self {
+ NestedMetaItemKind::MetaItem(ref item) => item.span,
+ NestedMetaItemKind::Literal(ref lit) => lit.span,
+ }
+ }
+
+ fn tokens(&self) -> TokenStream {
+ match *self {
+ NestedMetaItemKind::MetaItem(ref item) => item.tokens(),
+ NestedMetaItemKind::Literal(ref lit) => lit.tokens(),
+ }
+ }
+
+ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItemKind>
+ where I: Iterator<Item = TokenTree>,
+ {
+ if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
+ if let Some(node) = LitKind::from_token(token) {
+ tokens.next();
+ return Some(NestedMetaItemKind::Literal(Spanned { node: node, span: span }));
+ }
+ }
+
+ MetaItem::from_tokens(tokens).map(NestedMetaItemKind::MetaItem)
+ }
+}
+
+impl Lit {
+ fn tokens(&self) -> TokenStream {
+ TokenTree::Token(self.span, self.node.token()).into()
+ }
+}
+
+impl LitKind {
+ fn token(&self) -> Token {
+ use std::ascii;
+
+ match *self {
+ LitKind::Str(string, ast::StrStyle::Cooked) => {
+ let mut escaped = String::new();
+ for ch in string.as_str().chars() {
+ escaped.extend(ch.escape_unicode());
+ }
+ Token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None)
+ }
+ LitKind::Str(string, ast::StrStyle::Raw(n)) => {
+ Token::Literal(token::Lit::StrRaw(string, n), None)
+ }
+ LitKind::ByteStr(ref bytes) => {
+ let string = bytes.iter().cloned().flat_map(ascii::escape_default)
+ .map(Into::<char>::into).collect::<String>();
+ Token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None)
+ }
+ LitKind::Byte(byte) => {
+ let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect();
+ Token::Literal(token::Lit::Byte(Symbol::intern(&string)), None)
+ }
+ LitKind::Char(ch) => {
+ let string: String = ch.escape_default().map(Into::<char>::into).collect();
+ Token::Literal(token::Lit::Char(Symbol::intern(&string)), None)
+ }
+ LitKind::Int(n, ty) => {
+ let suffix = match ty {
+ ast::LitIntType::Unsigned(ty) => Some(Symbol::intern(ty.ty_to_string())),
+ ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())),
+ ast::LitIntType::Unsuffixed => None,
+ };
+ Token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), suffix)
+ }
+ LitKind::Float(symbol, ty) => {
+ Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
+ }
+ LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None),
+ LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(match value {
+ true => "true",
+ false => "false",
+ }))),
+ }
+ }
+
+ fn from_token(token: Token) -> Option<LitKind> {
+ match token {
+ Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)),
+ Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)),
+ Token::Interpolated(ref nt) => match **nt {
+ token::NtExpr(ref v) => match v.node {
+ ExprKind::Lit(ref lit) => Some(lit.node.clone()),
+ _ => None,
+ },
+ _ => None,
+ },
+ Token::Literal(lit, suf) => {
+ let (suffix_illegal, result) = parse::lit_token(lit, suf, None);
+ if suffix_illegal && suf.is_some() {
+ return None;
+ }
+ result
+ }
+ _ => None,
+ }
+ }
+}
+
pub trait HasAttrs: Sized {
fn attrs(&self) -> &[ast::Attribute];
fn map_attrs<F: FnOnce(Vec<ast::Attribute>) -> Vec<ast::Attribute>>(self, f: F) -> Self;
use {fold, attr};
use ast;
use codemap::Spanned;
-use parse::ParseSess;
-use ptr::P;
+use parse::{token, ParseSess};
+use syntax_pos::Span;
+use ptr::P;
use util::small_vector::SmallVector;
/// A folder that strips out items that do not belong in the current configuration.
return Some(attr);
}
- let attr_list = match attr.meta_item_list() {
- Some(attr_list) => attr_list,
- None => {
- let msg = "expected `#[cfg_attr(<cfg pattern>, <attr>)]`";
- self.sess.span_diagnostic.span_err(attr.span, msg);
- return None;
- }
- };
-
- let (cfg, mi) = match (attr_list.len(), attr_list.get(0), attr_list.get(1)) {
- (2, Some(cfg), Some(mi)) => (cfg, mi),
- _ => {
- let msg = "expected `#[cfg_attr(<cfg pattern>, <attr>)]`";
- self.sess.span_diagnostic.span_err(attr.span, msg);
+ let (cfg, path, tokens, span) = match attr.parse(self.sess, |parser| {
+ parser.expect(&token::OpenDelim(token::Paren))?;
+ let cfg = parser.parse_meta_item()?;
+ parser.expect(&token::Comma)?;
+ let lo = parser.span.lo;
+ let (path, tokens) = parser.parse_path_and_tokens()?;
+ parser.expect(&token::CloseDelim(token::Paren))?;
+ Ok((cfg, path, tokens, Span { lo: lo, ..parser.prev_span }))
+ }) {
+ Ok(result) => result,
+ Err(mut e) => {
+ e.emit();
return None;
}
};
- use attr::cfg_matches;
- match (cfg.meta_item(), mi.meta_item()) {
- (Some(cfg), Some(mi)) =>
- if cfg_matches(&cfg, self.sess, self.features) {
- self.process_cfg_attr(ast::Attribute {
- id: attr::mk_attr_id(),
- style: attr.style,
- value: mi.clone(),
- is_sugared_doc: false,
- span: mi.span,
- })
- } else {
- None
- },
- _ => {
- let msg = "unexpected literal(s) in `#[cfg_attr(<cfg pattern>, <attr>)]`";
- self.sess.span_diagnostic.span_err(attr.span, msg);
- None
- }
+ if attr::cfg_matches(&cfg, self.sess, self.features) {
+ self.process_cfg_attr(ast::Attribute {
+ id: attr::mk_attr_id(),
+ style: attr.style,
+ path: path,
+ tokens: tokens,
+ is_sugared_doc: false,
+ span: span,
+ })
+ } else {
+ None
}
}
return false;
}
- let mis = match attr.value.node {
- ast::MetaItemKind::List(ref mis) if is_cfg(&attr) => mis,
- _ => return true
+ let mis = if !is_cfg(&attr) {
+ return true;
+ } else if let Some(mis) = attr.meta_item_list() {
+ mis
+ } else {
+ return true;
};
if mis.len() != 1 {
use {ast, codemap};
use ext::base::ExtCtxt;
use ext::build::AstBuilder;
+use parse::parser::PathStyle;
use symbol::Symbol;
use syntax_pos::Span;
-pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec<(Symbol, Span)> {
+pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
let mut result = Vec::new();
attrs.retain(|attr| {
- if attr.name() != "derive" {
+ if attr.path != "derive" {
return true;
}
- if attr.value_str().is_some() {
- cx.span_err(attr.span, "unexpected value in `derive`");
- return false;
- }
-
- let traits = attr.meta_item_list().unwrap_or(&[]).to_owned();
- if traits.is_empty() {
- cx.span_warn(attr.span, "empty trait list in `derive`");
- return false;
- }
-
- for titem in traits {
- if titem.word().is_none() {
- cx.span_err(titem.span, "malformed `derive` entry");
- return false;
+ match attr.parse_list(cx.parse_sess, |parser| parser.parse_path(PathStyle::Mod)) {
+ Ok(ref traits) if traits.is_empty() => {
+ cx.span_warn(attr.span, "empty trait list in `derive`");
+ false
+ }
+ Ok(traits) => {
+ result.extend(traits);
+ true
+ }
+ Err(mut e) => {
+ e.emit();
+ false
}
- result.push((titem.name().unwrap(), titem.span));
}
-
- true
});
result
}
}
}
-pub fn add_derived_markers<T: HasAttrs>(cx: &mut ExtCtxt, traits: &[(Symbol, Span)], item: T) -> T {
+pub fn add_derived_markers<T: HasAttrs>(cx: &mut ExtCtxt, traits: &[ast::Path], item: T) -> T {
let span = match traits.get(0) {
- Some(&(_, span)) => span,
+ Some(path) => path.span,
None => return item,
};
item.map_attrs(|mut attrs| {
- if traits.iter().any(|&(name, _)| name == "PartialEq") &&
- traits.iter().any(|&(name, _)| name == "Eq") {
+ if traits.iter().any(|path| *path == "PartialEq") &&
+ traits.iter().any(|path| *path == "Eq") {
let span = allow_unstable(cx, span, "derive(PartialEq, Eq)");
let meta = cx.meta_word(span, Symbol::intern("structural_match"));
attrs.push(cx.attribute(span, meta));
}
- if traits.iter().any(|&(name, _)| name == "Copy") &&
- traits.iter().any(|&(name, _)| name == "Clone") {
+ if traits.iter().any(|path| *path == "Copy") &&
+ traits.iter().any(|path| *path == "Clone") {
let span = allow_unstable(cx, span, "derive(Copy, Clone)");
let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker"));
attrs.push(cx.attribute(span, meta));
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use ast::{self, Block, Ident, PatKind};
-use ast::{Name, MacStmtStyle, StmtKind, ItemKind};
+use ast::{self, Block, Ident, PatKind, Path};
+use ast::{MacStmtStyle, StmtKind, ItemKind};
use attr::{self, HasAttrs};
use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
use config::{is_test_or_bench, StripUnconfigured};
use std_inject;
use symbol::Symbol;
use symbol::keywords;
-use syntax_pos::{self, Span, ExpnId};
+use syntax_pos::{Span, ExpnId, DUMMY_SP};
use tokenstream::TokenStream;
use util::small_vector::SmallVector;
use visit::Visitor;
},
Attr {
attr: Option<ast::Attribute>,
- traits: Vec<(Symbol, Span)>,
+ traits: Vec<Path>,
item: Annotatable,
},
Derive {
- name: Symbol,
- span: Span,
+ path: Path,
item: Annotatable,
},
}
match self.kind {
InvocationKind::Bang { span, .. } => span,
InvocationKind::Attr { attr: Some(ref attr), .. } => attr.span,
- InvocationKind::Attr { attr: None, .. } => syntax_pos::DUMMY_SP,
- InvocationKind::Derive { span, .. } => span,
+ InvocationKind::Attr { attr: None, .. } => DUMMY_SP,
+ InvocationKind::Derive { ref path, .. } => path.span,
}
}
}
self.collect_invocations(expansion, &[])
} else if let InvocationKind::Attr { attr: None, traits, item } = invoc.kind {
let item = item
- .map_attrs(|mut attrs| { attrs.retain(|a| a.name() != "derive"); attrs });
+ .map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs });
let item_with_markers =
add_derived_markers(&mut self.cx, &traits, item.clone());
let derives = derives.entry(invoc.expansion_data.mark).or_insert_with(Vec::new);
- for &(name, span) in &traits {
+ for path in &traits {
let mark = Mark::fresh();
derives.push(mark);
- let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
let item = match self.cx.resolver.resolve_macro(
- Mark::root(), &path, MacroKind::Derive, false) {
+ Mark::root(), path, MacroKind::Derive, false) {
Ok(ext) => match *ext {
SyntaxExtension::BuiltinDerive(..) => item_with_markers.clone(),
_ => item.clone(),
_ => item.clone(),
};
invocations.push(Invocation {
- kind: InvocationKind::Derive { name: name, span: span, item: item },
+ kind: InvocationKind::Derive { path: path.clone(), item: item },
expansion_kind: invoc.expansion_kind,
expansion_data: ExpansionData {
mark: mark,
};
attr::mark_used(&attr);
- let name = attr.name();
self.cx.bt_push(ExpnInfo {
call_site: attr.span,
callee: NameAndSpan {
- format: MacroAttribute(name),
+ format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))),
span: Some(attr.span),
allow_internal_unstable: false,
}
match *ext {
MultiModifier(ref mac) => {
- let item = mac.expand(self.cx, attr.span, &attr.value, item);
+ let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+ let item = mac.expand(self.cx, attr.span, &meta, item);
kind.expect_from_annotatables(item)
}
MultiDecorator(ref mac) => {
let mut items = Vec::new();
- mac.expand(self.cx, attr.span, &attr.value, &item,
- &mut |item| items.push(item));
+ let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+ mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item));
items.push(item);
kind.expect_from_annotatables(items)
}
SyntaxExtension::AttrProcMacro(ref mac) => {
- let attr_toks = stream_for_attr_args(&attr, &self.cx.parse_sess);
let item_toks = stream_for_item(&item, &self.cx.parse_sess);
let span = Span {
expn_id: self.cx.codemap().record_expansion(ExpnInfo {
call_site: attr.span,
callee: NameAndSpan {
- format: MacroAttribute(name),
+ format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))),
span: None,
allow_internal_unstable: false,
},
..attr.span
};
- let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
- self.parse_expansion(tok_result, kind, name, span)
+ let tok_result = mac.expand(self.cx, attr.span, attr.tokens.clone(), item_toks);
+ self.parse_expansion(tok_result, kind, &attr.path, span)
}
SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => {
- self.cx.span_err(attr.span, &format!("`{}` is a derive mode", name));
+ self.cx.span_err(attr.span, &format!("`{}` is a derive mode", attr.path));
kind.dummy(attr.span)
}
_ => {
- let msg = &format!("macro `{}` may not be used in attributes", name);
+ let msg = &format!("macro `{}` may not be used in attributes", attr.path);
self.cx.span_err(attr.span, &msg);
kind.dummy(attr.span)
}
};
let path = &mac.node.path;
- let extname = path.segments.last().unwrap().identifier.name;
let ident = ident.unwrap_or(keywords::Invalid.ident());
let marked_tts =
noop_fold_tts(mac.node.stream(), &mut Marker { mark: mark, expn_id: None });
NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
if ident.name != keywords::Invalid.name() {
let msg =
- format!("macro {}! expects no ident argument, given '{}'", extname, ident);
+ format!("macro {}! expects no ident argument, given '{}'", path, ident);
self.cx.span_err(path.span, &msg);
return kind.dummy(span);
}
self.cx.bt_push(ExpnInfo {
call_site: span,
callee: NameAndSpan {
- format: MacroBang(extname),
+ format: MacroBang(Symbol::intern(&format!("{}", path))),
span: exp_span,
allow_internal_unstable: allow_internal_unstable,
},
IdentTT(ref expander, tt_span, allow_internal_unstable) => {
if ident.name == keywords::Invalid.name() {
self.cx.span_err(path.span,
- &format!("macro {}! expects an ident argument", extname));
+ &format!("macro {}! expects an ident argument", path));
return kind.dummy(span);
};
self.cx.bt_push(ExpnInfo {
call_site: span,
callee: NameAndSpan {
- format: MacroBang(extname),
+ format: MacroBang(Symbol::intern(&format!("{}", path))),
span: tt_span,
allow_internal_unstable: allow_internal_unstable,
}
MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => {
self.cx.span_err(path.span,
- &format!("`{}` can only be used in attributes", extname));
+ &format!("`{}` can only be used in attributes", path));
return kind.dummy(span);
}
SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => {
- self.cx.span_err(path.span, &format!("`{}` is a derive mode", extname));
+ self.cx.span_err(path.span, &format!("`{}` is a derive mode", path));
return kind.dummy(span);
}
SyntaxExtension::ProcMacro(ref expandfun) => {
if ident.name != keywords::Invalid.name() {
let msg =
- format!("macro {}! expects no ident argument, given '{}'", extname, ident);
+ format!("macro {}! expects no ident argument, given '{}'", path, ident);
self.cx.span_err(path.span, &msg);
return kind.dummy(span);
}
self.cx.bt_push(ExpnInfo {
call_site: span,
callee: NameAndSpan {
- format: MacroBang(extname),
+ format: MacroBang(Symbol::intern(&format!("{}", path))),
// FIXME procedural macros do not have proper span info
// yet, when they do, we should use it here.
span: None,
});
let tok_result = expandfun.expand(self.cx, span, marked_tts);
- Some(self.parse_expansion(tok_result, kind, extname, span))
+ Some(self.parse_expansion(tok_result, kind, path, span))
}
};
/// Expand a derive invocation. Returns the result of expansion.
fn expand_derive_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) -> Expansion {
let Invocation { expansion_kind: kind, .. } = invoc;
- let (name, span, item) = match invoc.kind {
- InvocationKind::Derive { name, span, item } => (name, span, item),
+ let (path, item) = match invoc.kind {
+ InvocationKind::Derive { path, item } => (path, item),
_ => unreachable!(),
};
- let mitem = ast::MetaItem { name: name, span: span, node: ast::MetaItemKind::Word };
- let pretty_name = Symbol::intern(&format!("derive({})", name));
+ let pretty_name = Symbol::intern(&format!("derive({})", path));
+ let span = path.span;
+ let attr = ast::Attribute {
+ path: path, tokens: TokenStream::empty(), span: span,
+ // irrelevant:
+ id: ast::AttrId(0), style: ast::AttrStyle::Outer, is_sugared_doc: false,
+ };
self.cx.bt_push(ExpnInfo {
call_site: span,
callee: NameAndSpan {
format: MacroAttribute(pretty_name),
- span: Some(span),
+ span: None,
allow_internal_unstable: false,
}
});
}),
..span
};
- return kind.expect_from_annotatables(ext.expand(self.cx, span, &mitem, item));
+ let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this
+ name: keywords::Invalid.name(),
+ span: DUMMY_SP,
+ node: ast::MetaItemKind::Word,
+ };
+ return kind.expect_from_annotatables(ext.expand(self.cx, span, &dummy, item));
}
SyntaxExtension::BuiltinDerive(func) => {
let span = Span {
..span
};
let mut items = Vec::new();
- func(self.cx, span, &mitem, &item, &mut |a| {
- items.push(a)
- });
+ func(self.cx, span, &attr.meta().unwrap(), &item, &mut |a| items.push(a));
return kind.expect_from_annotatables(items);
}
_ => {
- let msg = &format!("macro `{}` may not be used for derive attributes", name);
+ let msg = &format!("macro `{}` may not be used for derive attributes", attr.path);
self.cx.span_err(span, &msg);
kind.dummy(span)
}
}
}
- fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, name: Name, span: Span)
+ fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, path: &Path, span: Span)
-> Expansion {
let mut parser = self.cx.new_parser_from_tts(&toks.into_trees().collect::<Vec<_>>());
let expansion = match parser.parse_expansion(kind, false) {
return kind.dummy(span);
}
};
- parser.ensure_complete_parse(name, kind.name(), span);
+ parser.ensure_complete_parse(path, kind.name(), span);
// FIXME better span info
expansion.fold_with(&mut ChangeSpan { span: span })
}
})
}
- pub fn ensure_complete_parse(&mut self, macro_name: ast::Name, kind_name: &str, span: Span) {
+ pub fn ensure_complete_parse(&mut self, macro_path: &Path, kind_name: &str, span: Span) {
if self.token != token::Eof {
let msg = format!("macro expansion ignores token `{}` and any following",
self.this_token_to_string());
let mut err = self.diagnostic().struct_span_err(self.span, &msg);
let msg = format!("caused by the macro expansion here; the usage \
of `{}!` is likely invalid in {} context",
- macro_name, kind_name);
+ macro_path, kind_name);
err.span_note(span, &msg).emit();
}
}
fn collect_attr(&mut self,
attr: Option<ast::Attribute>,
- traits: Vec<(Symbol, Span)>,
+ traits: Vec<Path>,
item: Annotatable,
kind: ExpansionKind)
-> Expansion {
if !traits.is_empty() &&
(kind == ExpansionKind::TraitItems || kind == ExpansionKind::ImplItems) {
- self.cx.span_err(traits[0].1, "`derive` can be only be applied to items");
+ self.cx.span_err(traits[0].span, "`derive` can be only be applied to items");
return kind.expect_from_annotatables(::std::iter::once(item));
}
self.collect(kind, InvocationKind::Attr { attr: attr, traits: traits, item: item })
}
// If `item` is an attr invocation, remove and return the macro attribute.
- fn classify_item<T>(&mut self, mut item: T) -> (Option<ast::Attribute>, Vec<(Symbol, Span)>, T)
+ fn classify_item<T>(&mut self, mut item: T) -> (Option<ast::Attribute>, Vec<Path>, T)
where T: HasAttrs,
{
let (mut attr, mut traits) = (None, Vec::new());
string_to_stream(text, parse_sess)
}
-fn stream_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> TokenStream {
- use ast::MetaItemKind::*;
- use print::pp::Breaks;
- use print::pprust::PrintState;
-
- let token_string = match attr.value.node {
- // For `#[foo]`, an empty token
- Word => return TokenStream::empty(),
- // For `#[foo(bar, baz)]`, returns `(bar, baz)`
- List(ref items) => pprust::to_string(|s| {
- s.popen()?;
- s.commasep(Breaks::Consistent,
- &items[..],
- |s, i| s.print_meta_list_item(&i))?;
- s.pclose()
- }),
- // For `#[foo = "bar"]`, returns `= "bar"`
- NameValue(ref lit) => pprust::to_string(|s| {
- s.word_space("=")?;
- s.print_literal(lit)
- }),
- };
-
- string_to_stream(token_string, parse_sess)
-}
-
fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream {
let filename = String::from("<macro expansion>");
filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
// Detect if this is an inline module (`mod m { ... }` as opposed to `mod m;`).
// In the non-inline case, `inner` is never the dummy span (c.f. `parse_item_mod`).
// Thus, if `inner` is the dummy span, we know the module is inline.
- let inline_module = item.span.contains(inner) || inner == syntax_pos::DUMMY_SP;
+ let inline_module = item.span.contains(inner) || inner == DUMMY_SP;
if inline_module {
if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") {
}
impl ToTokens for ast::Attribute {
- fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
+ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
let mut r = vec![];
// FIXME: The spans could be better
r.push(TokenTree::Token(self.span, token::Pound));
if self.style == ast::AttrStyle::Inner {
r.push(TokenTree::Token(self.span, token::Not));
}
+ let mut inner = Vec::new();
+ for (i, segment) in self.path.segments.iter().enumerate() {
+ if i > 0 {
+ inner.push(TokenTree::Token(self.span, token::Colon).into());
+ }
+ inner.push(TokenTree::Token(self.span, token::Ident(segment.identifier)).into());
+ }
+ inner.push(self.tokens.clone());
+
r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
- delim: token::Bracket,
- tts: self.value.to_tokens(cx).into_iter().collect::<TokenStream>().into(),
+ delim: token::Bracket, tts: TokenStream::concat(inner).into()
}));
r
}
fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
match name {
"tt" => {
- return token::NtTT(panictry!(p.parse_token_tree()));
+ return token::NtTT(p.parse_token_tree());
}
_ => {}
}
}
// Make sure we don't have any tokens left to parse so we don't silently drop anything.
- parser.ensure_complete_parse(macro_ident.name, kind.name(), site_span);
+ let path = ast::Path::from_ident(site_span, macro_ident);
+ parser.ensure_complete_parse(&path, kind.name(), site_span);
expansion
}
}
// Allows the `catch {...}` expression
(active, catch_expr, "1.17.0", Some(31436)),
+
+ // See rust-lang/rfcs#1414. Allows code like `let x: &'static u32 = &42` to work.
+ (active, rvalue_static_promotion, "1.15.1", Some(38865)),
);
declare_features! (
impl<'a> Context<'a> {
fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) {
debug!("check_attribute(attr = {:?})", attr);
- let name = &*attr.name().as_str();
+ let name = unwrap_or!(attr.name(), return);
+
for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
- if n == name {
+ if name == n {
if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
gate_feature_fn!(self, has_feature, attr.span, name, desc);
}
- debug!("check_attribute: {:?} is builtin, {:?}, {:?}", name, ty, gateage);
+ debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage);
return;
}
}
for &(ref n, ref ty) in self.plugin_attributes {
- if n == name {
+ if attr.path == &**n {
// Plugins can't gate attributes, so we don't check for it
// unlike the code above; we only use this loop to
// short-circuit to avoid the checks below
- debug!("check_attribute: {:?} is registered by a plugin, {:?}", name, ty);
+ debug!("check_attribute: {:?} is registered by a plugin, {:?}", attr.path, ty);
return;
}
}
- if name.starts_with("rustc_") {
+ if name.as_str().starts_with("rustc_") {
gate_feature!(self, rustc_attrs, attr.span,
"unless otherwise specified, attributes \
with the prefix `rustc_` \
are reserved for internal compiler diagnostics");
- } else if name.starts_with("derive_") {
+ } else if name.as_str().starts_with("derive_") {
gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE);
- } else if attr::is_known(attr) {
- debug!("check_attribute: {:?} is known", name);
- } else {
+ } else if !attr::is_known(attr) {
// Only run the custom attribute lint during regular
// feature gate checking. Macro gating runs
// before the plugin attributes are registered
unknown to the compiler and \
may have meaning \
added to it in the future",
- name));
+ attr.path));
}
}
}
self.context.check_attribute(attr, false);
}
- if contains_novel_literal(&attr.value) {
+ if self.context.features.proc_macro && attr::is_known(attr) {
+ return
+ }
+
+ let meta = panictry!(attr.parse_meta(&self.context.parse_sess));
+ if contains_novel_literal(&meta) {
gate_feature_post!(&self, attr_literals, attr.span,
"non-string literals in attributes, or string \
literals in top-level positions, are experimental");
`#[repr(simd)]` instead");
}
for attr in &i.attrs {
- if attr.name() == "repr" {
- for item in attr.meta_item_list().unwrap_or(&[]) {
+ if attr.path == "repr" {
+ for item in attr.meta_item_list().unwrap_or_else(Vec::new) {
if item.check_name("simd") {
gate_feature_post!(&self, repr_simd, i.span,
"SIMD types are experimental \
Some(Attribute {
id: attr.id,
style: attr.style,
- value: fld.fold_meta_item(attr.value),
+ path: fld.fold_path(attr.path),
+ tokens: fld.fold_tts(attr.tokens),
is_sugared_doc: attr.is_sugared_doc,
span: fld.new_span(attr.span),
})
token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
token::NtIdent(id) => token::NtIdent(Spanned::<Ident>{node: fld.fold_ident(id.node), ..id}),
- token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
+ token::NtMeta(meta) => token::NtMeta(fld.fold_meta_item(meta)),
token::NtPath(path) => token::NtPath(fld.fold_path(path)),
token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)),
token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
matches_codepattern,
"matches_codepattern",
pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
- "#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string());
+ "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string());
}
// even inside macro defs....
})
}
+#[macro_export]
+macro_rules! unwrap_or {
+ ($opt:expr, $default:expr) => {
+ match $opt {
+ Some(x) => x,
+ None => $default,
+ }
+ }
+}
+
#[macro_use]
pub mod diagnostics {
#[macro_use]
use codemap::spanned;
use parse::common::SeqSep;
use parse::PResult;
-use parse::token;
-use parse::parser::{Parser, TokenType};
+use parse::token::{self, Nonterminal};
+use parse::parser::{Parser, TokenType, PathStyle};
+use tokenstream::TokenStream;
#[derive(PartialEq, Eq, Debug)]
enum InnerAttributeParsePolicy<'a> {
debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
inner_parse_policy,
self.token);
- let (span, value, mut style) = match self.token {
+ let (span, path, tokens, mut style) = match self.token {
token::Pound => {
let lo = self.span.lo;
self.bump();
};
self.expect(&token::OpenDelim(token::Bracket))?;
- let meta_item = self.parse_meta_item()?;
+ let (path, tokens) = self.parse_path_and_tokens()?;
self.expect(&token::CloseDelim(token::Bracket))?;
let hi = self.prev_span.hi;
- (mk_sp(lo, hi), meta_item, style)
+ (mk_sp(lo, hi), path, tokens, style)
}
_ => {
let token_str = self.this_token_to_string();
Ok(ast::Attribute {
id: attr::mk_attr_id(),
style: style,
- value: value,
+ path: path,
+ tokens: tokens,
is_sugared_doc: false,
span: span,
})
}
+ pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
+ let meta = match self.token {
+ token::Interpolated(ref nt) => match **nt {
+ Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
+ _ => None,
+ },
+ _ => None,
+ };
+ Ok(if let Some(meta) = meta {
+ self.bump();
+ (ast::Path::from_ident(meta.span, ast::Ident::with_empty_ctxt(meta.name)),
+ meta.node.tokens(meta.span))
+ } else {
+ (self.parse_path(PathStyle::Mod)?, self.parse_tokens())
+ })
+ }
+
/// Parse attributes that appear after the opening of an item. These should
/// be preceded by an exclamation mark, but we accept and warn about one
/// terminated by a semicolon.
let lo = self.span.lo;
let ident = self.parse_ident()?;
- let node = if self.eat(&token::Eq) {
+ let node = self.parse_meta_item_kind()?;
+ let hi = self.prev_span.hi;
+ Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) })
+ }
+
+ pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
+ Ok(if self.eat(&token::Eq) {
ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
} else if self.token == token::OpenDelim(token::Paren) {
ast::MetaItemKind::List(self.parse_meta_seq()?)
} else {
+ self.eat(&token::OpenDelim(token::Paren));
ast::MetaItemKind::Word
- };
- let hi = self.prev_span.hi;
- Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) })
+ })
}
/// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;
base = 16;
num_digits = self.scan_digits(16, 16);
}
- '0'...'9' | '_' | '.' => {
+ '0'...'9' | '_' | '.' | 'e' | 'E' => {
num_digits = self.scan_digits(10, 10) + 1;
}
_ => {
s[1..].chars().all(|c| '0' <= c && c <= '9')
}
-fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, sd: &Handler, sp: Span)
- -> ast::LitKind {
+macro_rules! err {
+ ($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => {
+ match $opt_diag {
+ Some(($span, $diag)) => { $($body)* }
+ None => return None,
+ }
+ }
+}
+
+pub fn lit_token(lit: token::Lit, suf: Option<Symbol>, diag: Option<(Span, &Handler)>)
+ -> (bool /* suffix illegal? */, Option<ast::LitKind>) {
+ use ast::LitKind;
+
+ match lit {
+ token::Byte(i) => (true, Some(LitKind::Byte(byte_lit(&i.as_str()).0))),
+ token::Char(i) => (true, Some(LitKind::Char(char_lit(&i.as_str()).0))),
+
+ // There are some valid suffixes for integer and float literals,
+ // so all the handling is done internally.
+ token::Integer(s) => (false, integer_lit(&s.as_str(), suf, diag)),
+ token::Float(s) => (false, float_lit(&s.as_str(), suf, diag)),
+
+ token::Str_(s) => {
+ let s = Symbol::intern(&str_lit(&s.as_str()));
+ (true, Some(LitKind::Str(s, ast::StrStyle::Cooked)))
+ }
+ token::StrRaw(s, n) => {
+ let s = Symbol::intern(&raw_str_lit(&s.as_str()));
+ (true, Some(LitKind::Str(s, ast::StrStyle::Raw(n))))
+ }
+ token::ByteStr(i) => {
+ (true, Some(LitKind::ByteStr(byte_str_lit(&i.as_str()))))
+ }
+ token::ByteStrRaw(i, _) => {
+ (true, Some(LitKind::ByteStr(Rc::new(i.to_string().into_bytes()))))
+ }
+ }
+}
+
+fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
+ -> Option<ast::LitKind> {
debug!("filtered_float_lit: {}, {:?}", data, suffix);
let suffix = match suffix {
Some(suffix) => suffix,
- None => return ast::LitKind::FloatUnsuffixed(data),
+ None => return Some(ast::LitKind::FloatUnsuffixed(data)),
};
- match &*suffix.as_str() {
+ Some(match &*suffix.as_str() {
"f32" => ast::LitKind::Float(data, ast::FloatTy::F32),
"f64" => ast::LitKind::Float(data, ast::FloatTy::F64),
suf => {
- if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
- // if it looks like a width, lets try to be helpful.
- sd.struct_span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..]))
- .help("valid widths are 32 and 64")
- .emit();
- } else {
- sd.struct_span_err(sp, &format!("invalid suffix `{}` for float literal", suf))
- .help("valid suffixes are `f32` and `f64`")
- .emit();
- }
+ err!(diag, |span, diag| {
+ if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
+ // if it looks like a width, lets try to be helpful.
+ let msg = format!("invalid width `{}` for float literal", &suf[1..]);
+ diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit()
+ } else {
+ let msg = format!("invalid suffix `{}` for float literal", suf);
+ diag.struct_span_err(span, &msg)
+ .help("valid suffixes are `f32` and `f64`")
+ .emit();
+ }
+ });
ast::LitKind::FloatUnsuffixed(data)
}
- }
+ })
}
-pub fn float_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
+pub fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
+ -> Option<ast::LitKind> {
debug!("float_lit: {:?}, {:?}", s, suffix);
// FIXME #2252: bounds checking float literals is deferred until trans
let s = s.chars().filter(|&c| c != '_').collect::<String>();
- filtered_float_lit(Symbol::intern(&s), suffix, sd, sp)
+ filtered_float_lit(Symbol::intern(&s), suffix, diag)
}
/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
Rc::new(res)
}
-pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
+pub fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
+ -> Option<ast::LitKind> {
// s can only be ascii, byte indexing is fine
let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
// 1f64 and 2f32 etc. are valid float literals.
if let Some(suf) = suffix {
if looks_like_width_suffix(&['f'], &suf.as_str()) {
- match base {
- 16 => sd.span_err(sp, "hexadecimal float literal is not supported"),
- 8 => sd.span_err(sp, "octal float literal is not supported"),
- 2 => sd.span_err(sp, "binary float literal is not supported"),
- _ => ()
+ let err = match base {
+ 16 => Some("hexadecimal float literal is not supported"),
+ 8 => Some("octal float literal is not supported"),
+ 2 => Some("binary float literal is not supported"),
+ _ => None,
+ };
+ if let Some(err) = err {
+ err!(diag, |span, diag| diag.span_err(span, err));
}
- return filtered_float_lit(Symbol::intern(&s), Some(suf), sd, sp)
+ return filtered_float_lit(Symbol::intern(&s), Some(suf), diag)
}
}
}
if let Some(suf) = suffix {
- if suf.as_str().is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")}
+ if suf.as_str().is_empty() {
+ err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some"));
+ }
ty = match &*suf.as_str() {
"isize" => ast::LitIntType::Signed(ast::IntTy::Is),
"i8" => ast::LitIntType::Signed(ast::IntTy::I8),
suf => {
// i<digits> and u<digits> look like widths, so lets
// give an error message along those lines
- if looks_like_width_suffix(&['i', 'u'], suf) {
- sd.struct_span_err(sp, &format!("invalid width `{}` for integer literal",
- &suf[1..]))
- .help("valid widths are 8, 16, 32, 64 and 128")
- .emit();
- } else {
- sd.struct_span_err(sp, &format!("invalid suffix `{}` for numeric literal", suf))
- .help("the suffix must be one of the integral types \
- (`u32`, `isize`, etc)")
- .emit();
- }
+ err!(diag, |span, diag| {
+ if looks_like_width_suffix(&['i', 'u'], suf) {
+ let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
+ diag.struct_span_err(span, &msg)
+ .help("valid widths are 8, 16, 32, 64 and 128")
+ .emit();
+ } else {
+ let msg = format!("invalid suffix `{}` for numeric literal", suf);
+ diag.struct_span_err(span, &msg)
+ .help("the suffix must be one of the integral types \
+ (`u32`, `isize`, etc)")
+ .emit();
+ }
+ });
ty
}
debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \
string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
- match u128::from_str_radix(s, base) {
+ Some(match u128::from_str_radix(s, base) {
Ok(r) => ast::LitKind::Int(r, ty),
Err(_) => {
// small bases are lexed as if they were base 10, e.g, the string
s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
if !already_errored {
- sd.span_err(sp, "int literal is too large");
+ err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
}
ast::LitKind::Int(0, ty)
}
- }
+ })
}
#[cfg(test)]
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, &sess)
.unwrap().unwrap();
- let docs = item.attrs.iter().filter(|a| a.name() == "doc")
+ let docs = item.attrs.iter().filter(|a| a.path == "doc")
.map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
assert_eq!(&docs[..], b);
use std::collections::HashSet;
use std::{cmp, mem, slice};
use std::path::{Path, PathBuf};
-use std::rc::Rc;
bitflags! {
flags Restrictions: u8 {
self.parse_seq_to_before_tokens(kets,
SeqSep::none(),
- |p| p.parse_token_tree(),
+ |p| Ok(p.parse_token_tree()),
|mut e| handler.cancel(&mut e));
}
break;
}
token::OpenDelim(token::Brace) => {
- self.parse_token_tree()?;
+ self.parse_token_tree();
break;
}
_ => self.bump(),
_ => { return self.unexpected_last(&self.token); }
},
token::Literal(lit, suf) => {
- let (suffix_illegal, out) = match lit {
- token::Byte(i) => (true, LitKind::Byte(parse::byte_lit(&i.as_str()).0)),
- token::Char(i) => (true, LitKind::Char(parse::char_lit(&i.as_str()).0)),
-
- // there are some valid suffixes for integer and
- // float literals, so all the handling is done
- // internally.
- token::Integer(s) => {
- let diag = &self.sess.span_diagnostic;
- (false, parse::integer_lit(&s.as_str(), suf, diag, self.span))
- }
- token::Float(s) => {
- let diag = &self.sess.span_diagnostic;
- (false, parse::float_lit(&s.as_str(), suf, diag, self.span))
- }
-
- token::Str_(s) => {
- let s = Symbol::intern(&parse::str_lit(&s.as_str()));
- (true, LitKind::Str(s, ast::StrStyle::Cooked))
- }
- token::StrRaw(s, n) => {
- let s = Symbol::intern(&parse::raw_str_lit(&s.as_str()));
- (true, LitKind::Str(s, ast::StrStyle::Raw(n)))
- }
- token::ByteStr(i) => {
- (true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str())))
- }
- token::ByteStrRaw(i, _) => {
- (true, LitKind::ByteStr(Rc::new(i.to_string().into_bytes())))
- }
- };
+ let diag = Some((self.span, &self.sess.span_diagnostic));
+ let (suffix_illegal, result) = parse::lit_token(lit, suf, diag);
if suffix_illegal {
let sp = self.span;
self.expect_no_suffix(sp, &format!("{} literal", lit.short_name()), suf)
}
- out
+ result.unwrap()
}
_ => { return self.unexpected_last(&self.token); }
};
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, ThinTokenStream)> {
match self.token {
- token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree {
- TokenTree::Delimited(_, delimited) => (delim, delimited.stream().into()),
+ token::OpenDelim(delim) => match self.parse_token_tree() {
+ TokenTree::Delimited(_, delimited) => Ok((delim, delimited.stream().into())),
_ => unreachable!(),
- }),
+ },
_ => Err(self.fatal("expected open delimiter")),
}
}
}
/// parse a single token tree from the input.
- pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
+ pub fn parse_token_tree(&mut self) -> TokenTree {
match self.token {
token::OpenDelim(..) => {
let frame = mem::replace(&mut self.token_cursor.frame,
self.token_cursor.stack.pop().unwrap());
self.span = frame.span;
self.bump();
- return Ok(TokenTree::Delimited(frame.span, Delimited {
+ TokenTree::Delimited(frame.span, Delimited {
delim: frame.delim,
tts: frame.tree_cursor.original_stream().into(),
- }));
+ })
},
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => {
let token = mem::replace(&mut self.token, token::Underscore);
- let res = Ok(TokenTree::Token(self.span, token));
self.bump();
- res
+ TokenTree::Token(self.prev_span, token)
}
}
}
pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
let mut tts = Vec::new();
while self.token != token::Eof {
- tts.push(self.parse_token_tree()?);
+ tts.push(self.parse_token_tree());
}
Ok(tts)
}
+ pub fn parse_tokens(&mut self) -> TokenStream {
+ let mut result = Vec::new();
+ loop {
+ match self.token {
+ token::Eof | token::CloseDelim(..) => break,
+ _ => result.push(self.parse_token_tree().into()),
+ }
+ }
+ TokenStream::concat(result)
+ }
+
/// Parse a prefix-unary-operator expr
pub fn parse_prefix_expr(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>)
let attr = ast::Attribute {
id: attr::mk_attr_id(),
style: ast::AttrStyle::Outer,
- value: ast::MetaItem {
- name: Symbol::intern("warn_directory_ownership"),
- node: ast::MetaItemKind::Word,
- span: syntax_pos::DUMMY_SP,
- },
+ path: ast::Path::from_ident(syntax_pos::DUMMY_SP,
+ Ident::from_str("warn_directory_ownership")),
+ tokens: TokenStream::empty(),
is_sugared_doc: false,
span: syntax_pos::DUMMY_SP,
};
use ast::{self};
use ptr::P;
use symbol::keywords;
-use tokenstream;
+use tokenstream::TokenTree;
use std::fmt;
use std::rc::Rc;
/// Stuff inside brackets for attributes
NtMeta(ast::MetaItem),
NtPath(ast::Path),
- NtTT(tokenstream::TokenTree),
+ NtTT(TokenTree),
// These are not exposed to macros, but are used by quasiquote.
NtArm(ast::Arm),
NtImplItem(ast::ImplItem),
use std_inject;
use symbol::{Symbol, keywords};
use syntax_pos::DUMMY_SP;
-use tokenstream::{self, TokenTree};
+use tokenstream::{self, TokenStream, TokenTree};
use std::ascii;
use std::io::{self, Write, Read};
to_string(|s| s.print_tts(tts.iter().cloned().collect()))
}
+pub fn tokens_to_string(tokens: TokenStream) -> String {
+ to_string(|s| s.print_tts(tokens))
+}
+
pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
to_string(|s| s.print_stmt(stmt))
}
ast::AttrStyle::Inner => word(self.writer(), "#![")?,
ast::AttrStyle::Outer => word(self.writer(), "#[")?,
}
- self.print_meta_item(&attr.meta())?;
+ if let Some(mi) = attr.meta() {
+ self.print_meta_item(&mi)?
+ } else {
+ for (i, segment) in attr.path.segments.iter().enumerate() {
+ if i > 0 {
+ word(self.writer(), "::")?
+ }
+ if segment.identifier.name != keywords::CrateRoot.name() &&
+ segment.identifier.name != "$crate" {
+ word(self.writer(), &segment.identifier.name.as_str())?;
+ }
+ }
+ space(self.writer())?;
+ self.print_tts(attr.tokens.clone())?;
+ }
word(self.writer(), "]")
}
}
self.end()
}
+ /// This doesn't deserve to be called "pretty" printing, but it should be
+ /// meaning-preserving. A quick hack that might help would be to look at the
+ /// spans embedded in the TTs to decide where to put spaces and newlines.
+ /// But it'd be better to parse these according to the grammar of the
+ /// appropriate macro, transcribe back into the grammar we just parsed from,
+ /// and then pretty-print the resulting AST nodes (so, e.g., we print
+ /// expression arguments as expressions). It can be done! I think.
+ fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
+ match tt {
+ TokenTree::Token(_, ref tk) => {
+ word(self.writer(), &token_to_string(tk))?;
+ match *tk {
+ parse::token::DocComment(..) => {
+ hardbreak(self.writer())
+ }
+ _ => Ok(())
+ }
+ }
+ TokenTree::Delimited(_, ref delimed) => {
+ word(self.writer(), &token_to_string(&delimed.open_token()))?;
+ space(self.writer())?;
+ self.print_tts(delimed.stream())?;
+ space(self.writer())?;
+ word(self.writer(), &token_to_string(&delimed.close_token()))
+ },
+ }
+ }
+
+ fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> {
+ self.ibox(0)?;
+ for (i, tt) in tts.into_trees().enumerate() {
+ if i != 0 {
+ space(self.writer())?;
+ }
+ self.print_tt(tt)?;
+ }
+ self.end()
+ }
+
fn space_if_not_bol(&mut self) -> io::Result<()> {
if !self.is_bol() { space(self.writer())?; }
Ok(())
}
}
- /// This doesn't deserve to be called "pretty" printing, but it should be
- /// meaning-preserving. A quick hack that might help would be to look at the
- /// spans embedded in the TTs to decide where to put spaces and newlines.
- /// But it'd be better to parse these according to the grammar of the
- /// appropriate macro, transcribe back into the grammar we just parsed from,
- /// and then pretty-print the resulting AST nodes (so, e.g., we print
- /// expression arguments as expressions). It can be done! I think.
- pub fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
- match tt {
- TokenTree::Token(_, ref tk) => {
- word(&mut self.s, &token_to_string(tk))?;
- match *tk {
- parse::token::DocComment(..) => {
- hardbreak(&mut self.s)
- }
- _ => Ok(())
- }
- }
- TokenTree::Delimited(_, ref delimed) => {
- word(&mut self.s, &token_to_string(&delimed.open_token()))?;
- space(&mut self.s)?;
- self.print_tts(delimed.stream())?;
- space(&mut self.s)?;
- word(&mut self.s, &token_to_string(&delimed.close_token()))
- },
- }
- }
-
- pub fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> {
- self.ibox(0)?;
- for (i, tt) in tts.into_trees().enumerate() {
- if i != 0 {
- space(&mut self.s)?;
- }
- self.print_tt(tt)?;
- }
- self.end()
- }
-
pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> {
self.head("")?;
let generics = ast::Generics::default();
use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute};
use parse::ParseSess;
use ptr::P;
+use tokenstream::TokenStream;
/// Craft a span that will be ignored by the stability lint's
/// call to codemap's is_internal check.
krate.module.items.insert(0, P(ast::Item {
attrs: vec![ast::Attribute {
style: ast::AttrStyle::Outer,
- value: ast::MetaItem {
- name: Symbol::intern("prelude_import"),
- node: ast::MetaItemKind::Word,
- span: span,
- },
+ path: ast::Path::from_ident(span, ast::Ident::from_str("prelude_import")),
+ tokens: TokenStream::empty(),
id: attr::mk_attr_id(),
is_sugared_doc: false,
span: span,
}
}
+impl From<Token> for TokenStream {
+ fn from(token: Token) -> TokenStream {
+ TokenTree::Token(DUMMY_SP, token).into()
+ }
+}
+
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
TokenStream::concat(iter.into_iter().map(Into::into).collect::<Vec<_>>())
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.write_str(&pprust::tts_to_string(&self.trees().collect::<Vec<_>>()))
+ f.write_str(&pprust::tokens_to_string(self.clone()))
}
}
impl<'a> Visitor<'a> for MarkAttrs<'a> {
fn visit_attribute(&mut self, attr: &Attribute) {
- if self.0.contains(&attr.name()) {
- mark_used(attr);
- mark_known(attr);
+ if let Some(name) = attr.name() {
+ if self.0.contains(&name) {
+ mark_used(attr);
+ mark_known(attr);
+ }
}
}
attrs.extend(item.attrs
.iter()
.filter(|a| {
- match &*a.name().as_str() {
+ a.name().is_some() && match &*a.name().unwrap().as_str() {
"allow" | "warn" | "deny" | "forbid" | "stable" | "unstable" => true,
_ => false,
}
fn visit_item(&mut self, item: &'a ast::Item) {
if let ast::ItemKind::MacroDef(..) = item.node {
if self.is_proc_macro_crate &&
- item.attrs.iter().any(|attr| attr.name() == "macro_export") {
+ item.attrs.iter().any(|attr| attr.path == "macro_export") {
let msg =
"cannot export macro_rules! macros from a `proc-macro` crate type currently";
self.handler.span_err(item.span, msg);
for attr in &item.attrs {
if is_proc_macro_attr(&attr) {
if let Some(prev_attr) = found_attr {
- let msg = if attr.name() == prev_attr.name() {
+ let msg = if attr.path == prev_attr.path {
format!("Only one `#[{}]` attribute is allowed on any given function",
- attr.name())
+ attr.path)
} else {
format!("`#[{}]` and `#[{}]` attributes cannot both be applied \
- to the same function", attr.name(), prev_attr.name())
+ to the same function", attr.path, prev_attr.path)
};
self.handler.struct_span_err(attr.span(), &msg)
if !is_fn {
let msg = format!("the `#[{}]` attribute may only be used on bare functions",
- attr.name());
+ attr.path);
self.handler.span_err(attr.span(), &msg);
return;
if !self.is_proc_macro_crate {
let msg = format!("the `#[{}]` attribute is only usable with crates of the \
- `proc-macro` crate type", attr.name());
+ `proc-macro` crate type", attr.path);
self.handler.span_err(attr.span(), &msg);
return;
-Subproject commit 859fb269364623b17e092efaba3f94e70ce97c5e
+Subproject commit d5ef27a79661d4f0d57d7b7d2cdbe9204f790a4a
// except according to those terms.
#include "rustllvm.h"
+#include "llvm/IR/DebugInfoMetadata.h"
#include "llvm/IR/DiagnosticInfo.h"
#include "llvm/IR/DiagnosticPrinter.h"
#include "llvm/IR/Instructions.h"
const char *LinkageName, LLVMRustMetadataRef File, unsigned LineNo,
LLVMRustMetadataRef Ty, bool IsLocalToUnit, LLVMValueRef V,
LLVMRustMetadataRef Decl = nullptr, uint32_t AlignInBits = 0) {
- Constant *InitVal = cast<Constant>(unwrap(V));
+ llvm::GlobalVariable *InitVal = cast<llvm::GlobalVariable>(unwrap(V));
#if LLVM_VERSION_GE(4, 0)
llvm::DIExpression *InitExpr = nullptr;
InitExpr = Builder->createConstantValueExpression(
FPVal->getValueAPF().bitcastToAPInt().getZExtValue());
}
-#endif
-#if LLVM_VERSION_GE(4, 0)
- return wrap(Builder->createGlobalVariableExpression(
+ llvm::DIGlobalVariableExpression *VarExpr = Builder->createGlobalVariableExpression(
+ unwrapDI<DIDescriptor>(Context), Name, LinkageName,
+ unwrapDI<DIFile>(File), LineNo, unwrapDI<DIType>(Ty), IsLocalToUnit,
+ InitExpr, unwrapDIPtr<MDNode>(Decl), AlignInBits);
+
+ InitVal->setMetadata("dbg", VarExpr);
+
+ return wrap(VarExpr);
#else
return wrap(Builder->createGlobalVariable(
-#endif
unwrapDI<DIDescriptor>(Context), Name, LinkageName,
unwrapDI<DIFile>(File), LineNo, unwrapDI<DIType>(Ty), IsLocalToUnit,
-#if LLVM_VERSION_GE(4, 0)
- InitExpr,
-#else
- InitVal,
-#endif
- unwrapDIPtr<MDNode>(Decl)
-#if LLVM_VERSION_GE(4, 0)
- ,
- AlignInBits
+ InitVal, unwrapDIPtr<MDNode>(Decl)));
#endif
- ));
}
extern "C" LLVMRustMetadataRef LLVMRustDIBuilderCreateVariable(
# If this file is modified, then llvm will be (optionally) cleaned and then rebuilt.
# The actual contents of this file do not matter, but to trigger a change on the
# build bots then the contents should be changed so git updates the mtime.
-2017-03-04
+2017-03-19
// This should not introduce a codegen item
let _ = cgu_generic_function::exported_but_not_generic(3);
}
-
-//~ TRANS_ITEM drop-glue i8
// ignore-tidy-linelength
// compile-flags:-Zprint-trans-items=eager
-//~ TRANS_ITEM drop-glue drop_in_place_intrinsic::StructWithDtor[0]
-//~ TRANS_ITEM drop-glue-contents drop_in_place_intrinsic::StructWithDtor[0]
+//~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<drop_in_place_intrinsic::StructWithDtor[0]> @@ drop_in_place_intrinsic.cgu-0[Internal]
struct StructWithDtor(u32);
impl Drop for StructWithDtor {
//~ TRANS_ITEM fn drop_in_place_intrinsic::main[0]
fn main() {
- //~ TRANS_ITEM drop-glue [drop_in_place_intrinsic::StructWithDtor[0]; 2]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<[drop_in_place_intrinsic::StructWithDtor[0]; 2]> @@ drop_in_place_intrinsic.cgu-0[Internal]
let x = [StructWithDtor(0), StructWithDtor(1)];
drop_slice_in_place(&x);
// This is the interesting thing in this test case: Normally we would
// not have drop-glue for the unsized [StructWithDtor]. This has to be
// generated though when the drop_in_place() intrinsic is used.
- //~ TRANS_ITEM drop-glue [drop_in_place_intrinsic::StructWithDtor[0]]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<[drop_in_place_intrinsic::StructWithDtor[0]]> @@ drop_in_place_intrinsic.cgu-0[Internal]
::std::ptr::drop_in_place(x as *const _ as *mut [StructWithDtor]);
}
}
//~ TRANS_ITEM fn function_as_argument::take_fn_once[0]<u32, &str, fn(u32, &str)>
//~ TRANS_ITEM fn function_as_argument::function[0]<u32, &str>
+ //~ TRANS_ITEM fn core::ops[0]::FnOnce[0]::call_once[0]<fn(u32, &str), (u32, &str)>
take_fn_once(function, 0u32, "abc");
//~ TRANS_ITEM fn function_as_argument::take_fn_once[0]<char, f64, fn(char, f64)>
//~ TRANS_ITEM fn function_as_argument::function[0]<char, f64>
+ //~ TRANS_ITEM fn core::ops[0]::FnOnce[0]::call_once[0]<fn(char, f64), (char, f64)>
take_fn_once(function, 'c', 0f64);
//~ TRANS_ITEM fn function_as_argument::take_fn_pointer[0]<i32, ()>
//~ TRANS_ITEM fn function_as_argument::function[0]<f32, i64>
take_fn_pointer(function, 0f32, 0i64);
}
-
-//~ TRANS_ITEM drop-glue i8
struct NonGenericNoDrop(i32);
struct NonGenericWithDrop(i32);
-//~ TRANS_ITEM drop-glue generic_drop_glue::NonGenericWithDrop[0]
-//~ TRANS_ITEM drop-glue-contents generic_drop_glue::NonGenericWithDrop[0]
+//~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<generic_drop_glue::NonGenericWithDrop[0]> @@ generic_drop_glue.cgu-0[Internal]
impl Drop for NonGenericWithDrop {
//~ TRANS_ITEM fn generic_drop_glue::{{impl}}[2]::drop[0]
//~ TRANS_ITEM fn generic_drop_glue::main[0]
fn main() {
- //~ TRANS_ITEM drop-glue generic_drop_glue::StructWithDrop[0]<i8, char>
- //~ TRANS_ITEM drop-glue-contents generic_drop_glue::StructWithDrop[0]<i8, char>
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<generic_drop_glue::StructWithDrop[0]<i8, char>> @@ generic_drop_glue.cgu-0[Internal]
//~ TRANS_ITEM fn generic_drop_glue::{{impl}}[0]::drop[0]<i8, char>
let _ = StructWithDrop { x: 0i8, y: 'a' }.x;
- //~ TRANS_ITEM drop-glue generic_drop_glue::StructWithDrop[0]<&str, generic_drop_glue::NonGenericNoDrop[0]>
- //~ TRANS_ITEM drop-glue-contents generic_drop_glue::StructWithDrop[0]<&str, generic_drop_glue::NonGenericNoDrop[0]>
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<generic_drop_glue::StructWithDrop[0]<&str, generic_drop_glue::NonGenericNoDrop[0]>> @@ generic_drop_glue.cgu-0[Internal]
//~ TRANS_ITEM fn generic_drop_glue::{{impl}}[0]::drop[0]<&str, generic_drop_glue::NonGenericNoDrop[0]>
let _ = StructWithDrop { x: "&str", y: NonGenericNoDrop(0) }.y;
// This is supposed to generate drop-glue because it contains a field that
// needs to be dropped.
- //~ TRANS_ITEM drop-glue generic_drop_glue::StructNoDrop[0]<generic_drop_glue::NonGenericWithDrop[0], f64>
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<generic_drop_glue::StructNoDrop[0]<generic_drop_glue::NonGenericWithDrop[0], f64>> @@ generic_drop_glue.cgu-0[Internal]
let _ = StructNoDrop { x: NonGenericWithDrop(0), y: 0f64 }.y;
- //~ TRANS_ITEM drop-glue generic_drop_glue::EnumWithDrop[0]<i32, i64>
- //~ TRANS_ITEM drop-glue-contents generic_drop_glue::EnumWithDrop[0]<i32, i64>
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<generic_drop_glue::EnumWithDrop[0]<i32, i64>> @@ generic_drop_glue.cgu-0[Internal]
//~ TRANS_ITEM fn generic_drop_glue::{{impl}}[1]::drop[0]<i32, i64>
let _ = match EnumWithDrop::A::<i32, i64>(0) {
EnumWithDrop::A(x) => x,
EnumWithDrop::B(x) => x as i32
};
- //~ TRANS_ITEM drop-glue generic_drop_glue::EnumWithDrop[0]<f64, f32>
- //~ TRANS_ITEM drop-glue-contents generic_drop_glue::EnumWithDrop[0]<f64, f32>
+ //~TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<generic_drop_glue::EnumWithDrop[0]<f64, f32>> @@ generic_drop_glue.cgu-0[Internal]
//~ TRANS_ITEM fn generic_drop_glue::{{impl}}[1]::drop[0]<f64, f32>
let _ = match EnumWithDrop::B::<f64, f32>(1.0) {
EnumWithDrop::A(x) => x,
EnumNoDrop::B(x) => x as f64
};
}
-
-//~ TRANS_ITEM drop-glue i8
fn main() {
let s1 = Struct { _a: 0u32 };
- //~ TRANS_ITEM drop-glue i8
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<instantiation_through_vtable::Struct[0]<u32>> @@ instantiation_through_vtable.cgu-0[Internal]
//~ TRANS_ITEM fn instantiation_through_vtable::{{impl}}[0]::foo[0]<u32>
//~ TRANS_ITEM fn instantiation_through_vtable::{{impl}}[0]::bar[0]<u32>
let _ = &s1 as &Trait;
let s1 = Struct { _a: 0u64 };
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<instantiation_through_vtable::Struct[0]<u64>> @@ instantiation_through_vtable.cgu-0[Internal]
//~ TRANS_ITEM fn instantiation_through_vtable::{{impl}}[0]::foo[0]<u64>
//~ TRANS_ITEM fn instantiation_through_vtable::{{impl}}[0]::bar[0]<u64>
let _ = &s1 as &Trait;
//~ TRANS_ITEM fn items_within_generic_items::generic_fn[0]<i8>
let _ = generic_fn(0i8);
}
-
-//~ TRANS_ITEM drop-glue i8
#![deny(dead_code)]
-//~ TRANS_ITEM drop-glue non_generic_drop_glue::StructWithDrop[0]
-//~ TRANS_ITEM drop-glue-contents non_generic_drop_glue::StructWithDrop[0]
+//~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<non_generic_drop_glue::StructWithDrop[0]> @@ non_generic_drop_glue.cgu-0[Internal]
struct StructWithDrop {
x: i32
}
x: i32
}
-//~ TRANS_ITEM drop-glue non_generic_drop_glue::EnumWithDrop[0]
-//~ TRANS_ITEM drop-glue-contents non_generic_drop_glue::EnumWithDrop[0]
+//~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<non_generic_drop_glue::EnumWithDrop[0]> @@ non_generic_drop_glue.cgu-0[Internal]
enum EnumWithDrop {
A(i32)
}
EnumNoDrop::A(x) => x
};
}
-
-//~ TRANS_ITEM drop-glue i8
let x = Struct { _x: 0 };
x.bar();
}
-
-//~ TRANS_ITEM drop-glue i8
&self.0
}
}
-
-//~ TRANS_ITEM drop-glue i8
fn main() { }
//~ TRANS_ITEM fn static_init::main[0]
-//~ TRANS_ITEM drop-glue i8
//~ TRANS_ITEM static statics_and_consts::foo[0]::STATIC2[2]
//~ TRANS_ITEM fn statics_and_consts::main[0]
-
-//~ TRANS_ITEM drop-glue i8
//~ TRANS_ITEM fn trait_implementations::{{impl}}[3]::bar[0]<&str, &str>
0f32.bar("&str", "&str");
}
-
-//~ TRANS_ITEM drop-glue i8
fn main() {
//~ TRANS_ITEM fn trait_method_as_argument::take_foo_once[0]<u32, fn(u32) -> u32>
//~ TRANS_ITEM fn trait_method_as_argument::{{impl}}[0]::foo[0]
+ //~ TRANS_ITEM fn core::ops[0]::FnOnce[0]::call_once[0]<fn(u32) -> u32, (u32)>
take_foo_once(Trait::foo, 0u32);
//~ TRANS_ITEM fn trait_method_as_argument::take_foo_once[0]<char, fn(char) -> char>
//~ TRANS_ITEM fn trait_method_as_argument::Trait[0]::foo[0]<char>
+ //~ TRANS_ITEM fn core::ops[0]::FnOnce[0]::call_once[0]<fn(char) -> char, (char)>
take_foo_once(Trait::foo, 'c');
//~ TRANS_ITEM fn trait_method_as_argument::take_foo[0]<u32, fn(u32) -> u32>
+ //~ TRANS_ITEM fn core::ops[0]::Fn[0]::call[0]<fn(u32) -> u32, (u32)>
take_foo(Trait::foo, 0u32);
//~ TRANS_ITEM fn trait_method_as_argument::take_foo[0]<char, fn(char) -> char>
+ //~ TRANS_ITEM fn core::ops[0]::Fn[0]::call[0]<fn(char) -> char, (char)>
take_foo(Trait::foo, 'c');
//~ TRANS_ITEM fn trait_method_as_argument::take_foo_mut[0]<u32, fn(u32) -> u32>
+ //~ TRANS_ITEM fn core::ops[0]::FnMut[0]::call_mut[0]<fn(char) -> char, (char)>
take_foo_mut(Trait::foo, 0u32);
//~ TRANS_ITEM fn trait_method_as_argument::take_foo_mut[0]<char, fn(char) -> char>
+ //~ TRANS_ITEM fn core::ops[0]::FnMut[0]::call_mut[0]<fn(u32) -> u32, (u32)>
take_foo_mut(Trait::foo, 'c');
}
-
-//~ TRANS_ITEM drop-glue i8
//~ TRANS_ITEM fn trait_method_default_impl::SomeGenericTrait[0]::bar[0]<u32, i16, ()>
0u32.bar(0i16, ());
}
-
-//~ TRANS_ITEM drop-glue i8
#![deny(dead_code)]
-//~ TRANS_ITEM drop-glue transitive_drop_glue::Root[0]
+//~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<transitive_drop_glue::Root[0]> @@ transitive_drop_glue.cgu-0[Internal]
struct Root(Intermediate);
-//~ TRANS_ITEM drop-glue transitive_drop_glue::Intermediate[0]
+//~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<transitive_drop_glue::Intermediate[0]> @@ transitive_drop_glue.cgu-0[Internal]
struct Intermediate(Leaf);
-//~ TRANS_ITEM drop-glue transitive_drop_glue::Leaf[0]
-//~ TRANS_ITEM drop-glue-contents transitive_drop_glue::Leaf[0]
+//~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<transitive_drop_glue::Leaf[0]> @@ transitive_drop_glue.cgu-0[Internal]
struct Leaf;
impl Drop for Leaf {
let _ = Root(Intermediate(Leaf));
- //~ TRANS_ITEM drop-glue transitive_drop_glue::RootGen[0]<u32>
- //~ TRANS_ITEM drop-glue transitive_drop_glue::IntermediateGen[0]<u32>
- //~ TRANS_ITEM drop-glue transitive_drop_glue::LeafGen[0]<u32>
- //~ TRANS_ITEM drop-glue-contents transitive_drop_glue::LeafGen[0]<u32>
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<transitive_drop_glue::RootGen[0]<u32>> @@ transitive_drop_glue.cgu-0[Internal]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<transitive_drop_glue::IntermediateGen[0]<u32>> @@ transitive_drop_glue.cgu-0[Internal]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<transitive_drop_glue::LeafGen[0]<u32>> @@ transitive_drop_glue.cgu-0[Internal]
//~ TRANS_ITEM fn transitive_drop_glue::{{impl}}[1]::drop[0]<u32>
let _ = RootGen(IntermediateGen(LeafGen(0u32)));
- //~ TRANS_ITEM drop-glue transitive_drop_glue::RootGen[0]<i16>
- //~ TRANS_ITEM drop-glue transitive_drop_glue::IntermediateGen[0]<i16>
- //~ TRANS_ITEM drop-glue transitive_drop_glue::LeafGen[0]<i16>
- //~ TRANS_ITEM drop-glue-contents transitive_drop_glue::LeafGen[0]<i16>
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<transitive_drop_glue::RootGen[0]<i16>> @@ transitive_drop_glue.cgu-0[Internal]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<transitive_drop_glue::IntermediateGen[0]<i16>> @@ transitive_drop_glue.cgu-0[Internal]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<transitive_drop_glue::LeafGen[0]<i16>> @@ transitive_drop_glue.cgu-0[Internal]
//~ TRANS_ITEM fn transitive_drop_glue::{{impl}}[1]::drop[0]<i16>
let _ = RootGen(IntermediateGen(LeafGen(0i16)));
}
#![deny(dead_code)]
-//~ TRANS_ITEM drop-glue tuple_drop_glue::Dropped[0]
-//~ TRANS_ITEM drop-glue-contents tuple_drop_glue::Dropped[0]
+//~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<tuple_drop_glue::Dropped[0]> @@ tuple_drop_glue.cgu-0[Internal]
struct Dropped;
impl Drop for Dropped {
//~ TRANS_ITEM fn tuple_drop_glue::main[0]
fn main() {
- //~ TRANS_ITEM drop-glue (u32, tuple_drop_glue::Dropped[0])
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<(u32, tuple_drop_glue::Dropped[0])> @@ tuple_drop_glue.cgu-0[Internal]
let x = (0u32, Dropped);
- //~ TRANS_ITEM drop-glue (i16, (tuple_drop_glue::Dropped[0], bool))
- //~ TRANS_ITEM drop-glue (tuple_drop_glue::Dropped[0], bool)
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<(i16, (tuple_drop_glue::Dropped[0], bool))> @@ tuple_drop_glue.cgu-0[Internal]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<(tuple_drop_glue::Dropped[0], bool)> @@ tuple_drop_glue.cgu-0[Internal]
let x = (0i16, (Dropped, true));
}
{
// simple case
let bool_sized = &true;
- //~ TRANS_ITEM drop-glue i8
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<bool> @@ unsizing.cgu-0[Internal]
//~ TRANS_ITEM fn unsizing::{{impl}}[0]::foo[0]
let _bool_unsized = bool_sized as &Trait;
- let char_sized = &true;
+ let char_sized = &'a';
+
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<char> @@ unsizing.cgu-0[Internal]
//~ TRANS_ITEM fn unsizing::{{impl}}[1]::foo[0]
let _char_unsized = char_sized as &Trait;
_b: 2,
_c: 3.0f64
};
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<f64> @@ unsizing.cgu-0[Internal]
//~ TRANS_ITEM fn unsizing::{{impl}}[2]::foo[0]
let _struct_unsized = struct_sized as &Struct<Trait>;
// custom coercion
let wrapper_sized = Wrapper(&0u32);
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<u32> @@ unsizing.cgu-0[Internal]
//~ TRANS_ITEM fn unsizing::{{impl}}[3]::foo[0]
let _wrapper_sized = wrapper_sized as Wrapper<Trait>;
}
// Only the non-generic methods should be instantiated:
//~ TRANS_ITEM fn unused_traits_and_generics::{{impl}}[3]::foo[0]
-//~ TRANS_ITEM drop-glue i8
// aux-build:cgu_extern_drop_glue.rs
extern crate cgu_extern_drop_glue;
-//~ TRANS_ITEM drop-glue cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[Internal] extern_drop_glue-mod1[Internal]
-//~ TRANS_ITEM drop-glue-contents cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[Internal] extern_drop_glue-mod1[Internal]
+//~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<cgu_extern_drop_glue::Struct[0]> @@ extern_drop_glue[Internal] extern_drop_glue-mod1[Internal]
struct LocalStruct(cgu_extern_drop_glue::Struct);
//~ TRANS_ITEM fn extern_drop_glue::user[0] @@ extern_drop_glue[External]
fn user()
{
- //~ TRANS_ITEM drop-glue extern_drop_glue::LocalStruct[0] @@ extern_drop_glue[Internal]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<extern_drop_glue::LocalStruct[0]> @@ extern_drop_glue[Internal]
let _ = LocalStruct(cgu_extern_drop_glue::Struct(0));
}
//~ TRANS_ITEM fn extern_drop_glue::mod1[0]::user[0] @@ extern_drop_glue-mod1[External]
fn user()
{
- //~ TRANS_ITEM drop-glue extern_drop_glue::mod1[0]::LocalStruct[0] @@ extern_drop_glue-mod1[Internal]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<extern_drop_glue::mod1[0]::LocalStruct[0]> @@ extern_drop_glue-mod1[Internal]
let _ = LocalStruct(cgu_extern_drop_glue::Struct(0));
}
}
// once for the current crate
//~ TRANS_ITEM fn cgu_generic_function::foo[0]<&str> @@ cgu_generic_function.volatile[External]
//~ TRANS_ITEM fn cgu_generic_function::bar[0]<&str> @@ cgu_generic_function.volatile[External]
-
-//~ TRANS_ITEM drop-glue i8
#![allow(dead_code)]
#![crate_type="lib"]
-//~ TRANS_ITEM drop-glue local_drop_glue::Struct[0] @@ local_drop_glue[Internal] local_drop_glue-mod1[Internal]
-//~ TRANS_ITEM drop-glue-contents local_drop_glue::Struct[0] @@ local_drop_glue[Internal] local_drop_glue-mod1[Internal]
+//~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<local_drop_glue::Struct[0]> @@ local_drop_glue[Internal] local_drop_glue-mod1[Internal]
struct Struct {
_a: u32
}
fn drop(&mut self) {}
}
-//~ TRANS_ITEM drop-glue local_drop_glue::Outer[0] @@ local_drop_glue[Internal]
+//~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<local_drop_glue::Outer[0]> @@ local_drop_glue[Internal]
struct Outer {
_a: Struct
}
{
use super::Struct;
- //~ TRANS_ITEM drop-glue local_drop_glue::mod1[0]::Struct2[0] @@ local_drop_glue-mod1[Internal]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<local_drop_glue::mod1[0]::Struct2[0]> @@ local_drop_glue-mod1[Internal]
struct Struct2 {
_a: Struct,
- //~ TRANS_ITEM drop-glue (u32, local_drop_glue::Struct[0]) @@ local_drop_glue-mod1[Internal]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<(u32, local_drop_glue::Struct[0])> @@ local_drop_glue-mod1[Internal]
_b: (u32, Struct),
}
static BAZ: u64 = 0;
}
}
-
-//~ TRANS_ITEM drop-glue i8
static BAR: u32 = 0;
}
}
-
-//~ TRANS_ITEM drop-glue i8
//~ TRANS_ITEM fn vtable_through_const::main[0] @@ vtable_through_const[External]
fn main() {
- //~ TRANS_ITEM drop-glue i8 @@ vtable_through_const[Internal]
+ //~ TRANS_ITEM fn core::ptr[0]::drop_in_place[0]<u32> @@ vtable_through_const[Internal]
// Since Trait1::do_something() is instantiated via its default implementation,
// it is considered a generic and is instantiated here only because it is
#[C] //~ ERROR: The attribute `C` is currently unknown to the compiler
#[B(D)]
#[B(E = "foo")]
+#[B arbitrary tokens] //~ expected one of `(` or `=`, found `arbitrary`
struct B;
fn main() {}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[allow(unused_variables)]
+fn main() {
+ let x: &'static u32 = &42; //~ error: does not live long enough
+ let y: &'static Option<u32> = &None; //~ error: does not live long enough
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[doc = $not_there] //~ error: unexpected token: `$`
+fn main() { }
globnar::brotz!(); //~ ERROR non-ident macro paths are experimental
::foo!(); //~ ERROR non-ident macro paths are experimental
foo::<T>!(); //~ ERROR type parameters are not allowed on macros
+ #[derive(foo::Bar)] struct T; //~ ERROR non-ident macro paths are experimental
}
// except according to those terms.
#[derive(Copy(Bad))]
-//~^ ERROR malformed `derive` entry
+//~^ ERROR expected one of `)`, `,`, or `::`, found `(`
struct Test1;
#[derive(Copy="bad")]
-//~^ ERROR malformed `derive` entry
+//~^ ERROR expected one of `)`, `,`, or `::`, found `=`
struct Test2;
#[derive()]
--- /dev/null
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(attr_literals)]
+
+#[path = 1usize] //~ ERROR: suffixed literals are not allowed in attributes
+#[path = 1u8] //~ ERROR: suffixed literals are not allowed in attributes
+#[path = 1u16] //~ ERROR: suffixed literals are not allowed in attributes
+#[path = 1u32] //~ ERROR: suffixed literals are not allowed in attributes
+#[path = 1u64] //~ ERROR: suffixed literals are not allowed in attributes
+#[path = 1isize] //~ ERROR: suffixed literals are not allowed in attributes
+#[path = 1i8] //~ ERROR: suffixed literals are not allowed in attributes
+#[path = 1i16] //~ ERROR: suffixed literals are not allowed in attributes
+#[path = 1i32] //~ ERROR: suffixed literals are not allowed in attributes
+#[path = 1i64] //~ ERROR: suffixed literals are not allowed in attributes
+#[path = 1.0f32] //~ ERROR: suffixed literals are not allowed in attributes
+#[path = 1.0f64] //~ ERROR: suffixed literals are not allowed in attributes
+fn main() { }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z parse-only
-
-// error-pattern:expected one of `=` or `]`
-
// asterisk is bogus
-#[attr*]
+#[path*] //~ ERROR expected one of `(` or `=`
mod m {}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags: -Z parse-only
-
-#[doc = $not_there] //~ error: unexpected token: `$`
-fn main() { }
+++ /dev/null
-// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags: -Z parse-only
-
-#[foo = 1usize] //~ ERROR: suffixed literals are not allowed in attributes
-#[foo = 1u8] //~ ERROR: suffixed literals are not allowed in attributes
-#[foo = 1u16] //~ ERROR: suffixed literals are not allowed in attributes
-#[foo = 1u32] //~ ERROR: suffixed literals are not allowed in attributes
-#[foo = 1u64] //~ ERROR: suffixed literals are not allowed in attributes
-#[foo = 1isize] //~ ERROR: suffixed literals are not allowed in attributes
-#[foo = 1i8] //~ ERROR: suffixed literals are not allowed in attributes
-#[foo = 1i16] //~ ERROR: suffixed literals are not allowed in attributes
-#[foo = 1i32] //~ ERROR: suffixed literals are not allowed in attributes
-#[foo = 1i64] //~ ERROR: suffixed literals are not allowed in attributes
-#[foo = 1.0f32] //~ ERROR: suffixed literals are not allowed in attributes
-#[foo = 1.0f64] //~ ERROR: suffixed literals are not allowed in attributes
-fn main() { }
_ => false,
};
conds.push(if is_else || input.peek().is_none() {
- qquote!({ unquote rhs })
+ quote!({ $rhs })
} else {
- qquote!(if unquote(test.unwrap()) { unquote rhs } else)
+ let test = test.unwrap();
+ quote!(if $test { $rhs } else)
});
}
// This macro is not very interesting, but it does contain delimited tokens with
// no content - `()` and `{}` - which has caused problems in the past.
+// Also, it tests that we can escape `$` via `$$`.
fn hello(_: TokenStream) -> TokenStream {
- qquote!({ fn hello() {} hello(); })
+ quote!({
+ fn hello() {}
+ macro_rules! m { ($$($$t:tt)*) => { $$($$t)* } }
+ m!(hello());
+ })
}
}
fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream {
- qquote!(fn f1() -> bool { true })
+ quote!(fn f1() -> bool { true })
}
fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
- qquote!(unquote item)
+ quote!($item)
}
fn tru(_ts: TokenStream) -> TokenStream {
- qquote!(true)
+ quote!(true)
}
fn ret_tru(_ts: TokenStream) -> TokenStream {
- qquote!(return true;)
+ quote!(return true;)
}
fn identity(ts: TokenStream) -> TokenStream {
- qquote!(unquote ts)
+ quote!($ts)
}
use syntax::tokenstream::TokenTree;
fn main() {
- let true_tok = TokenTree::Token(syntax_pos::DUMMY_SP, token::Ident(Ident::from_str("true")));
- assert!(qquote!(true).eq_unspanned(&true_tok.into()));
+ let true_tok = token::Ident(Ident::from_str("true"));
+ assert!(quote!(true).eq_unspanned(&true_tok.into()));
}
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that a macro can emit delimiters with nothing inside - `()`, `{}`
-
-// aux-build:hello_macro.rs
-// ignore-stage1
-
-#![feature(plugin)]
-#![feature(rustc_private)]
-#![plugin(hello_macro)]
-
-fn main() {
- hello!();
-}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that a macro can emit delimiters with nothing inside - `()`, `{}`
+
+// aux-build:hello_macro.rs
+// ignore-stage1
+
+#![feature(plugin)]
+#![feature(rustc_private)]
+#![plugin(hello_macro)]
+
+fn main() {
+ hello!();
+}
#[attr_with_args(text = "Hello, world!")]
fn foo() {}
-fn main() {
- assert_eq!(foo(), "Hello, world!");
-}
+#[::attr_args::identity
+ fn main() { assert_eq!(foo(), "Hello, world!"); }]
+struct Dummy;
fn foo() -> &'static str { "Hello, world!" }
"#.parse().unwrap()
}
+
+#[proc_macro_attribute]
+pub fn identity(attr_args: TokenStream, _: TokenStream) -> TokenStream {
+ attr_args
+}
#[proc_macro_derive(B, attributes(B, C))]
pub fn derive(input: TokenStream) -> TokenStream {
let input = input.to_string();
- assert!(input.contains("#[B]"));
+ assert!(input.contains("#[B arbitrary tokens]"));
assert!(input.contains("struct B {"));
assert!(input.contains("#[C]"));
"".parse().unwrap()
// aux-build:derive-b.rs
// ignore-stage1
-#[macro_use]
+#![feature(proc_macro)]
+
extern crate derive_b;
-#[derive(Debug, PartialEq, B, Eq, Copy, Clone)]
-#[B]
+#[derive(Debug, PartialEq, derive_b::B, Eq, Copy, Clone)]
+#[cfg_attr(all(), B arbitrary tokens)]
struct B {
#[C]
a: u64
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+macro_rules! m { () => { $crate::main(); } }
t!(format!("{:?}", -0.0), "-0");
t!(format!("{:?}", 0.0), "0");
+ // sign aware zero padding
+ t!(format!("{:<3}", 1), "1 ");
+ t!(format!("{:>3}", 1), " 1");
+ t!(format!("{:^3}", 1), " 1 ");
+ t!(format!("{:03}", 1), "001");
+ t!(format!("{:<03}", 1), "001");
+ t!(format!("{:>03}", 1), "001");
+ t!(format!("{:^03}", 1), "001");
+ t!(format!("{:+03}", 1), "+01");
+ t!(format!("{:<+03}", 1), "+01");
+ t!(format!("{:>+03}", 1), "+01");
+ t!(format!("{:^+03}", 1), "+01");
+ t!(format!("{:#05x}", 1), "0x001");
+ t!(format!("{:<#05x}", 1), "0x001");
+ t!(format!("{:>#05x}", 1), "0x001");
+ t!(format!("{:^#05x}", 1), "0x001");
+ t!(format!("{:05}", 1.2), "001.2");
+ t!(format!("{:<05}", 1.2), "001.2");
+ t!(format!("{:>05}", 1.2), "001.2");
+ t!(format!("{:^05}", 1.2), "001.2");
+ t!(format!("{:05}", -1.2), "-01.2");
+ t!(format!("{:<05}", -1.2), "-01.2");
+ t!(format!("{:>05}", -1.2), "-01.2");
+ t!(format!("{:^05}", -1.2), "-01.2");
+ t!(format!("{:+05}", 1.2), "+01.2");
+ t!(format!("{:<+05}", 1.2), "+01.2");
+ t!(format!("{:>+05}", 1.2), "+01.2");
+ t!(format!("{:^+05}", 1.2), "+01.2");
// Ergonomic format_args!
t!(format!("{0:x} {0:X}", 15), "f F");
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::panic;
+
+impl<'a> panic::UnwindSafe for Foo<'a> {}
+impl<'a> panic::RefUnwindSafe for Foo<'a> {}
+
struct Foo<'a>(&'a mut bool);
impl<'a> Drop for Foo<'a> {
f(x);
}
assert!(ran_drop);
-}
+ let mut ran_drop = false;
+ {
+ let x = Foo(&mut ran_drop);
+ let result = panic::catch_unwind(move || {
+ let x = move || { let _ = x; panic!() };
+ f(x);
+ });
+ assert!(result.is_err());
+ }
+ assert!(ran_drop);
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub struct Struct<K: 'static> {
+ pub field: K,
+}
+
+// Partial fix for #31260, doesn't work without {...}.
+static STRUCT: Struct<&'static [u8]> = Struct {
+ field: {&[1]}
+};
+
+fn main() {}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[repr(u8)]
+enum Foo {
+ Foo(u8),
+}
+
+fn main() {
+ match Foo::Foo(1) {
+ _ => ()
+ }
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ println!("{}", 0E+10);
+ println!("{}", 0e+10);
+ println!("{}", 00e+10);
+ println!("{}", 00E+10);
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-pretty issue #37195
+
+#![allow(dead_code)]
+
+include!("auxiliary/issue_40469.rs");
+fn f() { m!(); }
+
+fn main() {}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(fn_traits)]
+#![feature(never_type)]
+
+use std::panic;
+
+fn foo(x: u32, y: u32) -> u32 { x/y }
+fn foo_diverges() -> ! { panic!() }
+
+fn test_fn_ptr<T>(mut t: T)
+ where T: Fn(u32, u32) -> u32,
+{
+ let as_fn = <T as Fn<(u32, u32)>>::call;
+ assert_eq!(as_fn(&t, (9, 3)), 3);
+ let as_fn_mut = <T as FnMut<(u32, u32)>>::call_mut;
+ assert_eq!(as_fn_mut(&mut t, (18, 3)), 6);
+ let as_fn_once = <T as FnOnce<(u32, u32)>>::call_once;
+ assert_eq!(as_fn_once(t, (24, 3)), 8);
+}
+
+fn assert_panics<F>(f: F) where F: FnOnce() {
+ let f = panic::AssertUnwindSafe(f);
+ let result = panic::catch_unwind(move || {
+ f.0()
+ });
+ if let Ok(..) = result {
+ panic!("diverging function returned");
+ }
+}
+
+fn test_fn_ptr_panic<T>(mut t: T)
+ where T: Fn() -> !
+{
+ let as_fn = <T as Fn<()>>::call;
+ assert_panics(|| as_fn(&t, ()));
+ let as_fn_mut = <T as FnMut<()>>::call_mut;
+ assert_panics(|| as_fn_mut(&mut t, ()));
+ let as_fn_once = <T as FnOnce<()>>::call_once;
+ assert_panics(|| as_fn_once(t, ()));
+}
+
+fn main() {
+ test_fn_ptr(foo);
+ test_fn_ptr(foo as fn(u32, u32) -> u32);
+ test_fn_ptr_panic(foo_diverges);
+ test_fn_ptr_panic(foo_diverges as fn() -> !);
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rvalue_static_promotion)]
+
+#[allow(unused_variables)]
+fn main() {
+ let x: &'static u32 = &42;
+ let y: &'static Option<u32> = &None;
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(associated_consts)]
+
+use std::marker::PhantomData;
+
+pub struct Foo<'a> {
+ f: PhantomData<&'a u32>,
+}
+
+pub struct ContentType {
+ pub ttype: Foo<'static>,
+ pub subtype: Foo<'static>,
+ pub params: Option<Foo<'static>>,
+}
+
+impl ContentType {
+ // @has const_doc/struct.ContentType.html
+ // @has - '//*[@class="docblock"]' 'Any: ContentType = ContentType{ttype: Foo{f: '
+ pub const Any: ContentType = ContentType { ttype: Foo { f: PhantomData, },
+ subtype: Foo { f: PhantomData, },
+ params: None, };
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type="lib"]
+
+#![feature(const_fn)]
+
+pub struct Foo;
+
+impl Foo {
+ // @has const/struct.Foo.html '//*[@id="new.v"]//code' 'const unsafe fn new'
+ pub const unsafe fn new() -> Foo {
+ Foo
+ }
+}
error: cannot borrow immutable field `z.x` as mutable
--> $DIR/issue-39544.rs:21:18
|
+20 | let z = Z { x: X::Y };
+ | - consider changing this to `mut z`
21 | let _ = &mut z.x;
- | ^^^
+ | ^^^ cannot mutably borrow immutable field
error: aborting due to previous error
--> $DIR/E0536.rs:11:7
|
11 | #[cfg(not())] //~ ERROR E0536
- | ^^^^^
+ | ^^^
error: aborting due to previous error
--> $DIR/E0537.rs:11:7
|
11 | #[cfg(unknown())] //~ ERROR E0537
- | ^^^^^^^^^
+ | ^^^^^^^
error: aborting due to previous error
use std::collections::HashSet;
use std::env;
use std::fmt;
-use std::fs::{self, File};
+use std::fs::{self, File, create_dir_all};
use std::io::prelude::*;
use std::io::{self, BufReader};
use std::path::{Path, PathBuf};
let out_dir = self.output_base_name().with_extension("pretty-out");
let _ = fs::remove_dir_all(&out_dir);
- self.create_dir_racy(&out_dir);
+ create_dir_all(&out_dir).unwrap();
// FIXME (#9639): This needs to handle non-utf8 paths
let mut args = vec!["-".to_owned(),
fn compose_and_run_compiler(&self, args: ProcArgs, input: Option<String>) -> ProcRes {
if !self.props.aux_builds.is_empty() {
- self.create_dir_racy(&self.aux_output_dir_name());
+ create_dir_all(&self.aux_output_dir_name()).unwrap();
}
let aux_dir = self.aux_output_dir_name();
input)
}
- // Like std::fs::create_dir_all, except handles concurrent calls among multiple
- // threads or processes.
- fn create_dir_racy(&self, path: &Path) {
- match fs::create_dir(path) {
- Ok(()) => return,
- Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => return,
- Err(ref e) if e.kind() == io::ErrorKind::NotFound => {}
- Err(e) => panic!("failed to create dir {:?}: {}", path, e),
- }
- self.create_dir_racy(path.parent().unwrap());
- match fs::create_dir(path) {
- Ok(()) => {}
- Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => {}
- Err(e) => panic!("failed to create dir {:?}: {}", path, e),
- }
- }
fn compose_and_run(&self,
ProcArgs{ args, prog }: ProcArgs,
let mir_dump_dir = self.get_mir_dump_dir();
- self.create_dir_racy(mir_dump_dir.as_path());
+ create_dir_all(mir_dump_dir.as_path()).unwrap();
let mut dir_opt = "dump-mir-dir=".to_string();
dir_opt.push_str(mir_dump_dir.to_str().unwrap());
debug!("dir_opt: {:?}", dir_opt);
let out_dir = self.output_base_name();
let _ = fs::remove_dir_all(&out_dir);
- self.create_dir_racy(&out_dir);
+ create_dir_all(&out_dir).unwrap();
let proc_res = self.document(&out_dir);
if !proc_res.status.success() {
if tmpdir.exists() {
self.aggressive_rm_rf(&tmpdir).unwrap();
}
- self.create_dir_racy(&tmpdir);
+ create_dir_all(&tmpdir).unwrap();
let host = &self.config.host;
let make = if host.contains("bitrig") || host.contains("dragonfly") ||