- set SCCACHE_ERROR_LOG=%CD%/sccache.log
test_script:
- - appveyor-retry sh -c 'git submodule deinit -f . && git submodule update --init'
+ - if not exist C:\cache\rustsrc\NUL mkdir C:\cache\rustsrc
+ - sh src/ci/init_repo.sh . /c/cache/rustsrc
- set SRC=.
- set NO_CCACHE=1
- sh src/ci/run.sh
on_failure:
- - cat %CD%/sccache.log
+ - cat %CD%\sccache.log
+ - cat C:\Users\appveyor\AppData\Local\Temp\1\build-cache-logs\*.log
cache:
+ - C:\cache\rustsrc
- "build/i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-rebuild-trigger"
- "build/x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-rebuild-trigger"
- "i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-rebuild-trigger"
cputype = 'i686'
elif cputype in {'xscale', 'arm'}:
cputype = 'arm'
- elif cputype in {'armv6l', 'armv7l', 'armv8l'}:
+ elif cputype == 'armv6l':
cputype = 'arm'
ostype += 'eabihf'
- elif cputype == 'armv7l':
+ elif cputype in {'armv7l', 'armv8l'}:
cputype = 'armv7'
ostype += 'eabihf'
elif cputype == 'aarch64':
copy(&build.src.join(item), &dst_src.join(item));
}
- // Get cargo-vendor installed, if it isn't already.
- let mut has_cargo_vendor = false;
- let mut cmd = Command::new(&build.cargo);
- for line in output(cmd.arg("install").arg("--list")).lines() {
- has_cargo_vendor |= line.starts_with("cargo-vendor ");
- }
- if !has_cargo_vendor {
+ // If we're building from git sources, we need to vendor a complete distribution.
+ if build.src_is_git {
+ // Get cargo-vendor installed, if it isn't already.
+ let mut has_cargo_vendor = false;
+ let mut cmd = Command::new(&build.cargo);
+ for line in output(cmd.arg("install").arg("--list")).lines() {
+ has_cargo_vendor |= line.starts_with("cargo-vendor ");
+ }
+ if !has_cargo_vendor {
+ let mut cmd = Command::new(&build.cargo);
+ cmd.arg("install")
+ .arg("--force")
+ .arg("--debug")
+ .arg("--vers").arg(CARGO_VENDOR_VERSION)
+ .arg("cargo-vendor")
+ .env("RUSTC", &build.rustc);
+ build.run(&mut cmd);
+ }
+
+ // Vendor all Cargo dependencies
let mut cmd = Command::new(&build.cargo);
- cmd.arg("install")
- .arg("--force")
- .arg("--debug")
- .arg("--vers").arg(CARGO_VENDOR_VERSION)
- .arg("cargo-vendor")
- .env("RUSTC", &build.rustc);
+ cmd.arg("vendor")
+ .current_dir(&dst_src.join("src"));
build.run(&mut cmd);
}
- // Vendor all Cargo dependencies
- let mut cmd = Command::new(&build.cargo);
- cmd.arg("vendor")
- .current_dir(&dst_src.join("src"));
- build.run(&mut cmd);
-
// Create source tarball in rust-installer format
let mut cmd = Command::new(SH_CMD);
cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
cxx: HashMap<String, gcc::Tool>,
crates: HashMap<String, Crate>,
is_sudo: bool,
+ src_is_git: bool,
}
#[derive(Debug)]
};
let rust_info = channel::GitInfo::new(&src);
let cargo_info = channel::GitInfo::new(&src.join("cargo"));
+ let src_is_git = src.join(".git").exists();
Build {
flags: flags,
lldb_version: None,
lldb_python_dir: None,
is_sudo: is_sudo,
+ src_is_git: src_is_git,
}
}
OutOfSync,
}
- if !self.config.submodules {
- return
- }
- if fs::metadata(self.src.join(".git")).is_err() {
+ if !self.src_is_git || !self.config.submodules {
return
}
let git = || {
// If we've got a git directory we're gona need git to update
// submodules and learn about various other aspects.
- if fs::metadata(build.src.join(".git")).is_ok() {
+ if build.src_is_git {
need_cmd("git".as_ref());
}
- Path and misc options > Patches origin = Bundled, then local
- Path and misc options > Local patch directory = /tmp/patches
- Target options > Target Architecture = powerpc
-- Target options > Emit assembly for CPU = power4 -- (+)
-- Target options > Tune for CPU = power6 -- (+)
+- Target options > Emit assembly for CPU = powerpc -- pure 32-bit PowerPC
- Operating System > Target OS = linux
- Operating System > Linux kernel version = 2.6.32.68 -- ~RHEL6 kernel
- C-library > glibc version = 2.12.2 -- ~RHEL6 glibc
- C compiler > gcc version = 4.9.3
-- C compiler > Core gcc extra config = --with-cpu-32=power4 --with-cpu=default32 -- (+)
-- C compiler > gcc extra config = --with-cpu-32=power4 --with-cpu=default32 -- (+)
- C compiler > C++ = ENABLE -- to cross compile LLVM
-(+) These CPU options match the configuration of the toolchains in RHEL6.
-
## `powerpc64-linux-gnu.config`
For targets: `powerpc64-unknown-linux-gnu`
cmake \
unzip \
expect \
- openjdk-9-jre \
+ openjdk-9-jre-headless \
sudo \
libstdc++6:i386 \
xz-utils \
# except according to those terms.
set -ex
-ANDROID_EMULATOR_FORCE_32BIT=true \
- nohup nohup emulator @arm-18 -no-window -partition-size 2047 \
- 0<&- &>/dev/null &
+
+# Setting SHELL to a file instead on a symlink helps android
+# emulator identify the system
+export SHELL=/bin/bash
+nohup nohup emulator @arm-18 -no-window -partition-size 2047 0<&- &>/dev/null &
exec "$@"
xz-utils \
swig \
libedit-dev \
- libncurses5-dev
+ libncurses5-dev \
+ patch
RUN curl -L https://cmake.org/files/v3.8/cmake-3.8.0-rc1-Linux-x86_64.tar.gz | \
tar xzf - -C /usr/local --strip-components=1
WORKDIR /tmp
-COPY shared.sh build-toolchain.sh /tmp/
+COPY shared.sh build-toolchain.sh compiler-rt-dso-handle.patch /tmp/
RUN /tmp/build-toolchain.sh
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
# option. This file may not be copied, modified, or distributed
# except according to those terms.
+# ignore-tidy-linelength
+
set -ex
source shared.sh
# Download sources
SRCS=(
- "https://fuchsia.googlesource.com/magenta magenta ac69119"
- "https://fuchsia.googlesource.com/third_party/llvm llvm 5463083"
- "https://fuchsia.googlesource.com/third_party/clang llvm/tools/clang 4ff7b4b"
- "https://fuchsia.googlesource.com/third_party/lld llvm/tools/lld fd465a3"
- "https://fuchsia.googlesource.com/third_party/lldb llvm/tools/lldb 6bb11f8"
- "https://fuchsia.googlesource.com/third_party/compiler-rt llvm/runtimes/compiler-rt 52d4ecc"
- "https://fuchsia.googlesource.com/third_party/libcxx llvm/runtimes/libcxx e891cc8"
- "https://fuchsia.googlesource.com/third_party/libcxxabi llvm/runtimes/libcxxabi f0f0257"
- "https://fuchsia.googlesource.com/third_party/libunwind llvm/runtimes/libunwind 50bddc1"
+ "https://fuchsia.googlesource.com/magenta magenta d17073dc8de344ead3b65e8cc6a12280dec38c84"
+ "https://llvm.googlesource.com/llvm llvm 3f58a16d8eec385e2b3ebdfbb84ff9d3bf27e025"
+ "https://llvm.googlesource.com/clang llvm/tools/clang 727ea63e6e82677f6e10e05e08bc7d6bdbae3111"
+ "https://llvm.googlesource.com/lld llvm/tools/lld a31286c1366e5e89b8872803fded13805a1a084b"
+ "https://llvm.googlesource.com/lldb llvm/tools/lldb 0b2384abec4cb99ad66687712e07dee4dd9d187e"
+ "https://llvm.googlesource.com/compiler-rt llvm/runtimes/compiler-rt 9093a35c599fe41278606a20b51095ea8bd5a081"
+ "https://llvm.googlesource.com/libcxx llvm/runtimes/libcxx 607e0c71ec4f7fd377ad3f6c47b08dbe89f66eaa"
+ "https://llvm.googlesource.com/libcxxabi llvm/runtimes/libcxxabi 0a3a1a8a5ca5ef69e0f6b7d5b9d13e63e6fd2c19"
+ "https://llvm.googlesource.com/libunwind llvm/runtimes/libunwind e128003563d99d9ee62247c4cee40f07d21c03e3"
)
fetch() {
mkdir -p $2
pushd $2 > /dev/null
- curl -sL $1/+archive/$3.tar.gz | tar xzf -
+ git init
+ git remote add origin $1
+ git fetch --depth=1 origin $3
+ git reset --hard FETCH_HEAD
popd > /dev/null
}
fetch $i
done
+# Remove this once https://reviews.llvm.org/D28791 is resolved
+cd llvm/runtimes/compiler-rt
+patch -Np1 < /tmp/compiler-rt-dso-handle.patch
+cd ../../..
+
# Build toolchain
cd llvm
mkdir build
--- /dev/null
+diff --git a/lib/builtins/CMakeLists.txt b/lib/builtins/CMakeLists.txt
+index fc4384af2..b442264c0 100644
+--- a/lib/builtins/CMakeLists.txt
++++ b/lib/builtins/CMakeLists.txt
+@@ -194,6 +194,12 @@ if(APPLE)
+ atomic_thread_fence.c)
+ endif()
+
++if(FUCHSIA)
++ set(GENERIC_SOURCES
++ ${GENERIC_SOURCES}
++ dso_handle.c)
++endif()
++
+ if(NOT WIN32 OR MINGW)
+ set(GENERIC_SOURCES
+ ${GENERIC_SOURCES}
+diff --git a/lib/builtins/dso_handle.c b/lib/builtins/dso_handle.c
+new file mode 100644
+index 000000000..7766cd0aa
+--- /dev/null
++++ b/lib/builtins/dso_handle.c
+@@ -0,0 +1,18 @@
++/* ===-- dso_handle.c - Provide __dso_handle -------------------------------===
++ *
++ * The LLVM Compiler Infrastructure
++ *
++ * This file is dual licensed under the MIT and the University of Illinois Open
++ * Source Licenses. See LICENSE.TXT for details.
++ *
++ * ===----------------------------------------------------------------------===
++ */
++
++/* __dso_handle symbol is mandated by C++ ABI with a value which is an address
++ * in one of the object's segments, and as such this symbol has to be included
++ * statically and cannot be a part of a shared library. Traditionally, it has
++ * been defined in crtbegin.o but there's no principled reason for it to be
++ * there. We defined this symbol in the builtin library which is built as a
++ * static library and always included in the final link.
++ */
++__attribute__((visibility("hidden"))) void *const __dso_handle;
CT_ARCH_DEFAULT_BE=y
CT_ARCH_DEFAULT_32=y
CT_ARCH_ABI=""
-CT_ARCH_CPU="power4"
-CT_ARCH_TUNE="power6"
+CT_ARCH_CPU="powerpc"
+CT_ARCH_TUNE=""
CT_ARCH_BE=y
# CT_ARCH_LE is not set
CT_ARCH_32=y
CT_CC_GCC_VERSION="4.9.3"
# CT_CC_LANG_FORTRAN is not set
CT_CC_GCC_ENABLE_CXX_FLAGS=""
-CT_CC_GCC_CORE_EXTRA_CONFIG_ARRAY="--with-cpu-32=power4 --with-cpu=default32"
-CT_CC_GCC_EXTRA_CONFIG_ARRAY="--with-cpu-32=power4 --with-cpu=default32"
+CT_CC_GCC_CORE_EXTRA_CONFIG_ARRAY=""
+CT_CC_GCC_EXTRA_CONFIG_ARRAY=""
CT_CC_GCC_EXTRA_ENV_ARRAY=""
CT_CC_GCC_STATIC_LIBSTDCXX=y
# CT_CC_GCC_SYSTEM_ZLIB is not set
# Wipe the cache if it's not valid, or mark it as invalid while we update it
if [ ! -f "$cache_valid_file" ]; then
- rm -rf "$CACHE_DIR" && mkdir "$CACHE_DIR"
+ rm -rf "$CACHE_DIR"
+ mkdir "$CACHE_DIR"
else
- rm "$cache_valid_file"
+ set +o errexit
+ stat_lines=$(cd "$cache_src_dir" && git status --porcelain | wc -l)
+ stat_ec=$(cd "$cache_src_dir" && git status >/dev/null 2>&1; echo $?)
+ set -o errexit
+ if [ ! -d "$cache_src_dir/.git" -o $stat_lines != 0 -o $stat_ec != 0 ]; then
+ # Something is badly wrong - the cache valid file is here, but something
+ # about the git repo is fishy. Nuke it all, just in case
+ echo "WARNING: $cache_valid_file exists but bad repo: l:$stat_lines, ec:$stat_ec"
+ rm -rf "$CACHE_DIR"
+ mkdir "$CACHE_DIR"
+ else
+ rm "$cache_valid_file"
+ fi
fi
# Update the cache (a pristine copy of the rust source master)
[#38487]: https://github.com/rust-lang/rust/issues/38487
------------------------
+
+In the MSP430 architecture, interrupt handlers have a special calling
+convention. You can use the `"msp430-interrupt"` ABI to make the compiler apply
+the right calling convention to the interrupt handlers you define.
+
+<!-- NOTE(ignore) this example is specific to the msp430 target -->
+
+``` rust,ignore
+#![feature(abi_msp430_interrupt)]
+#![no_std]
+
+// Place the interrupt handler at the appropriate memory address
+// (Alternatively, you can use `#[used]` and remove `pub` and `#[no_mangle]`)
+#[link_section = "__interrupt_vector_10"]
+#[no_mangle]
+pub static TIM0_VECTOR: extern "msp430-interrupt" fn() = tim0;
+
+// The interrupt handler
+extern "msp430-interrupt" fn tim0() {
+ // ..
+}
+```
+
+``` text
+$ msp430-elf-objdump -CD ./target/msp430/release/app
+Disassembly of section __interrupt_vector_10:
+
+0000fff2 <TIM0_VECTOR>:
+ fff2: 00 c0 interrupt service routine at 0xc000
+
+Disassembly of section .text:
+
+0000c000 <int::tim0>:
+ c000: 00 13 reti
+```
# `abi_ptx`
-The tracking issue for this feature is: None.
+The tracking issue for this feature is: [#38788]
+
+[#38788]: https://github.com/rust-lang/rust/issues/38788
------------------------
+
+When emitting PTX code, all vanilla Rust functions (`fn`) get translated to
+"device" functions. These functions are *not* callable from the host via the
+CUDA API so a crate with only device functions is not too useful!
+
+OTOH, "global" functions *can* be called by the host; you can think of them
+as the real public API of your crate. To produce a global function use the
+`"ptx-kernel"` ABI.
+
+<!-- NOTE(ignore) this example is specific to the nvptx targets -->
+
+``` rust,ignore
+#![feature(abi_ptx)]
+#![no_std]
+
+pub unsafe extern "ptx-kernel" fn global_function() {
+ device_function();
+}
+
+pub fn device_function() {
+ // ..
+}
+```
+
+``` text
+$ xargo rustc --target nvptx64-nvidia-cuda --release -- --emit=asm
+
+$ cat $(find -name '*.s')
+//
+// Generated by LLVM NVPTX Back-End
+//
+
+.version 3.2
+.target sm_20
+.address_size 64
+
+ // .globl _ZN6kernel15global_function17h46111ebe6516b382E
+
+.visible .entry _ZN6kernel15global_function17h46111ebe6516b382E()
+{
+
+
+ ret;
+}
+
+ // .globl _ZN6kernel15device_function17hd6a0e4993bbf3f78E
+.visible .func _ZN6kernel15device_function17hd6a0e4993bbf3f78E()
+{
+
+
+ ret;
+}
+```
# `compiler_builtins_lib`
-This feature is internal to the Rust compiler and is not intended for general use.
+The tracking issue for this feature is: None.
------------------------
+
+This feature is required to link to the `compiler_builtins` crate which contains
+"compiler intrinsics". Compiler intrinsics are software implementations of basic
+operations like multiplication of `u64`s. These intrinsics are only required on
+platforms where these operations don't directly map to a hardware instruction.
+
+You should never need to explicitly link to the `compiler_builtins` crate when
+building "std" programs as `compiler_builtins` is already in the dependency
+graph of `std`. But you may need it when building `no_std` **binary** crates. If
+you get a *linker* error like:
+
+``` text
+$PWD/src/main.rs:11: undefined reference to `__aeabi_lmul'
+$PWD/src/main.rs:11: undefined reference to `__aeabi_uldivmod'
+```
+
+That means that you need to link to this crate.
+
+When you link to this crate, make sure it only appears once in your crate
+dependency graph. Also, it doesn't matter where in the dependency graph, you
+place the `compiler_builtins` crate.
+
+<!-- NOTE(ignore) doctests don't support `no_std` binaries -->
+
+``` rust,ignore
+#![feature(compiler_builtins_lib)]
+#![no_std]
+
+extern crate compiler_builtins;
+```
# `compiler_builtins`
-The tracking issue for this feature is: None.
+This feature is internal to the Rust compiler and is not intended for general use.
------------------------
-
// We can't use atomic_nand here because it can result in a bool with
// an invalid value. This happens because the atomic operation is done
// with an 8-bit integer internally, which would set the upper 7 bits.
- // So we just use a compare-exchange loop instead, which is what the
- // intrinsic actually expands to anyways on many platforms.
- let mut old = self.load(Relaxed);
- loop {
- let new = !(old && val);
- match self.compare_exchange_weak(old, new, order, Relaxed) {
- Ok(_) => break,
- Err(x) => old = x,
- }
+ // So we just use fetch_xor or swap instead.
+ if val {
+ // !(x & true) == !x
+ // We must invert the bool.
+ self.fetch_xor(true, order)
+ } else {
+ // !(x & false) == true
+ // We must set the bool to true.
+ self.swap(true, order)
}
- old
}
/// Logical "or" with a boolean value.
#[test]
fn bool_and() {
let a = AtomicBool::new(true);
- assert_eq!(a.fetch_and(false, SeqCst),true);
+ assert_eq!(a.fetch_and(false, SeqCst), true);
assert_eq!(a.load(SeqCst),false);
}
+#[test]
+fn bool_nand() {
+ let a = AtomicBool::new(false);
+ assert_eq!(a.fetch_nand(false, SeqCst), false);
+ assert_eq!(a.load(SeqCst), true);
+ assert_eq!(a.fetch_nand(false, SeqCst), true);
+ assert_eq!(a.load(SeqCst), true);
+ assert_eq!(a.fetch_nand(true, SeqCst), true);
+ assert_eq!(a.load(SeqCst), false);
+ assert_eq!(a.fetch_nand(true, SeqCst), false);
+ assert_eq!(a.load(SeqCst), true);
+}
+
#[test]
fn uint_and() {
let x = AtomicUsize::new(0xf731);
fn pat_ident_binding_mode(&mut self, span: Span, name: Name, bm: hir::BindingMode)
-> P<hir::Pat> {
let id = self.next_id();
- let parent_def = self.parent_def;
+ let parent_def = self.parent_def.unwrap();
let def_id = {
let defs = self.resolver.definitions();
let def_path_data = DefPathData::Binding(name.as_str());
}
}
- pub fn collect_root(&mut self) {
- let root = self.create_def_with_parent(None,
- CRATE_NODE_ID,
- DefPathData::CrateRoot,
- ITEM_LIKE_SPACE);
+ pub fn collect_root(&mut self, crate_name: &str, crate_disambiguator: &str) {
+ let root = self.definitions.create_root_def(crate_name,
+ crate_disambiguator);
assert_eq!(root, CRATE_DEF_INDEX);
self.parent_def = Some(root);
}
data: DefPathData,
address_space: DefIndexAddressSpace)
-> DefIndex {
- let parent_def = self.parent_def;
+ let parent_def = self.parent_def.unwrap();
debug!("create_def(node_id={:?}, data={:?}, parent_def={:?})", node_id, data, parent_def);
self.definitions.create_def_with_parent(parent_def, node_id, data, address_space)
}
- fn create_def_with_parent(&mut self,
- parent: Option<DefIndex>,
- node_id: NodeId,
- data: DefPathData,
- address_space: DefIndexAddressSpace)
- -> DefIndex {
- self.definitions.create_def_with_parent(parent, node_id, data, address_space)
- }
-
pub fn with_parent<F: FnOnce(&mut Self)>(&mut self, parent_def: DefIndex, f: F) {
let parent = self.parent_def;
self.parent_def = Some(parent_def);
use rustc_data_structures::stable_hasher::StableHasher;
use serialize::{Encodable, Decodable, Encoder, Decoder};
use std::fmt::Write;
-use std::hash::{Hash, Hasher};
+use std::hash::Hash;
use syntax::ast;
use syntax::symbol::{Symbol, InternedString};
use ty::TyCtxt;
pub struct DefPathTable {
index_to_key: [Vec<DefKey>; 2],
key_to_index: FxHashMap<DefKey, DefIndex>,
+ def_path_hashes: [Vec<u64>; 2],
}
// Unfortunately we have to provide a manual impl of Clone because of the
index_to_key: [self.index_to_key[0].clone(),
self.index_to_key[1].clone()],
key_to_index: self.key_to_index.clone(),
+ def_path_hashes: [self.def_path_hashes[0].clone(),
+ self.def_path_hashes[1].clone()],
}
}
}
fn allocate(&mut self,
key: DefKey,
+ def_path_hash: u64,
address_space: DefIndexAddressSpace)
-> DefIndex {
let index = {
index
};
self.key_to_index.insert(key, index);
+ self.def_path_hashes[address_space.index()].push(def_path_hash);
+ debug_assert!(self.def_path_hashes[address_space.index()].len() ==
+ self.index_to_key[address_space.index()].len());
index
}
[index.as_array_index()].clone()
}
+ #[inline(always)]
+ pub fn def_path_hash(&self, index: DefIndex) -> u64 {
+ self.def_path_hashes[index.address_space().index()]
+ [index.as_array_index()]
+ }
+
#[inline(always)]
pub fn def_index_for_def_key(&self, key: &DefKey) -> Option<DefIndex> {
self.key_to_index.get(key).cloned()
impl Encodable for DefPathTable {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+ // Index to key
self.index_to_key[DefIndexAddressSpace::Low.index()].encode(s)?;
- self.index_to_key[DefIndexAddressSpace::High.index()].encode(s)
+ self.index_to_key[DefIndexAddressSpace::High.index()].encode(s)?;
+
+ // DefPath hashes
+ self.def_path_hashes[DefIndexAddressSpace::Low.index()].encode(s)?;
+ self.def_path_hashes[DefIndexAddressSpace::High.index()].encode(s)?;
+
+ Ok(())
}
}
impl Decodable for DefPathTable {
fn decode<D: Decoder>(d: &mut D) -> Result<DefPathTable, D::Error> {
let index_to_key_lo: Vec<DefKey> = Decodable::decode(d)?;
- let index_to_key_high: Vec<DefKey> = Decodable::decode(d)?;
+ let index_to_key_hi: Vec<DefKey> = Decodable::decode(d)?;
- let index_to_key = [index_to_key_lo, index_to_key_high];
+ let def_path_hashes_lo: Vec<u64> = Decodable::decode(d)?;
+ let def_path_hashes_hi: Vec<u64> = Decodable::decode(d)?;
+
+ let index_to_key = [index_to_key_lo, index_to_key_hi];
+ let def_path_hashes = [def_path_hashes_lo, def_path_hashes_hi];
let mut key_to_index = FxHashMap();
Ok(DefPathTable {
index_to_key: index_to_key,
key_to_index: key_to_index,
+ def_path_hashes: def_path_hashes,
})
}
}
pub disambiguated_data: DisambiguatedDefPathData,
}
+impl DefKey {
+ fn compute_stable_hash(&self, parent_hash: u64) -> u64 {
+ let mut hasher = StableHasher::new();
+
+ // We hash a 0u8 here to disambiguate between regular DefPath hashes,
+ // and the special "root_parent" below.
+ 0u8.hash(&mut hasher);
+ parent_hash.hash(&mut hasher);
+ self.disambiguated_data.hash(&mut hasher);
+ hasher.finish()
+ }
+
+ fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> u64 {
+ let mut hasher = StableHasher::new();
+ // Disambiguate this from a regular DefPath hash,
+ // see compute_stable_hash() above.
+ 1u8.hash(&mut hasher);
+ crate_name.hash(&mut hasher);
+ crate_disambiguator.hash(&mut hasher);
+ hasher.finish()
+ }
+}
+
/// Pair of `DefPathData` and an integer disambiguator. The integer is
/// normally 0, but in the event that there are multiple defs with the
/// same `parent` and `data`, we use this field to disambiguate
s
}
-
- pub fn deterministic_hash(&self, tcx: TyCtxt) -> u64 {
- debug!("deterministic_hash({:?})", self);
- let mut state = StableHasher::new();
- self.deterministic_hash_to(tcx, &mut state);
- state.finish()
- }
-
- pub fn deterministic_hash_to<H: Hasher>(&self, tcx: TyCtxt, state: &mut H) {
- tcx.original_crate_name(self.krate).as_str().hash(state);
- tcx.crate_disambiguator(self.krate).as_str().hash(state);
- self.data.hash(state);
- }
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
table: DefPathTable {
index_to_key: [vec![], vec![]],
key_to_index: FxHashMap(),
+ def_path_hashes: [vec![], vec![]],
},
node_to_def_index: NodeMap(),
def_index_to_node: [vec![], vec![]],
self.table.def_key(index)
}
+ #[inline(always)]
+ pub fn def_path_hash(&self, index: DefIndex) -> u64 {
+ self.table.def_path_hash(index)
+ }
+
pub fn def_index_for_def_key(&self, key: DefKey) -> Option<DefIndex> {
self.table.def_index_for_def_key(&key)
}
self.node_to_hir_id[node_id]
}
+ /// Add a definition with a parent definition.
+ pub fn create_root_def(&mut self,
+ crate_name: &str,
+ crate_disambiguator: &str)
+ -> DefIndex {
+ let key = DefKey {
+ parent: None,
+ disambiguated_data: DisambiguatedDefPathData {
+ data: DefPathData::CrateRoot,
+ disambiguator: 0
+ }
+ };
+
+ let parent_hash = DefKey::root_parent_stable_hash(crate_name,
+ crate_disambiguator);
+ let def_path_hash = key.compute_stable_hash(parent_hash);
+
+ // Create the definition.
+ let address_space = super::ITEM_LIKE_SPACE;
+ let index = self.table.allocate(key, def_path_hash, address_space);
+ assert!(self.def_index_to_node[address_space.index()].is_empty());
+ self.def_index_to_node[address_space.index()].push(ast::CRATE_NODE_ID);
+ self.node_to_def_index.insert(ast::CRATE_NODE_ID, index);
+
+ index
+ }
+
/// Add a definition with a parent definition.
pub fn create_def_with_parent(&mut self,
- parent: Option<DefIndex>,
+ parent: DefIndex,
node_id: ast::NodeId,
data: DefPathData,
- // is_owner: bool)
address_space: DefIndexAddressSpace)
-> DefIndex {
debug!("create_def_with_parent(parent={:?}, node_id={:?}, data={:?})",
data,
self.table.def_key(self.node_to_def_index[&node_id]));
- assert_eq!(parent.is_some(), data != DefPathData::CrateRoot);
+ // The root node must be created with create_root_def()
+ assert!(data != DefPathData::CrateRoot);
// Find a unique DefKey. This basically means incrementing the disambiguator
// until we get no match.
let mut key = DefKey {
- parent: parent,
+ parent: Some(parent),
disambiguated_data: DisambiguatedDefPathData {
data: data,
disambiguator: 0
key.disambiguated_data.disambiguator += 1;
}
+ let parent_hash = self.table.def_path_hash(parent);
+ let def_path_hash = key.compute_stable_hash(parent_hash);
+
debug!("create_def_with_parent: after disambiguation, key = {:?}", key);
// Create the definition.
- let index = self.table.allocate(key, address_space);
+ let index = self.table.allocate(key, def_path_hash, address_space);
assert_eq!(index.as_array_index(),
self.def_index_to_node[address_space.index()].len());
self.def_index_to_node[address_space.index()].push(node_id);
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use hir::def_id::DefId;
-use ty::TyCtxt;
-use util::nodemap::DefIdMap;
-
-pub struct DefPathHashes<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- data: DefIdMap<u64>,
-}
-
-impl<'a, 'tcx> DefPathHashes<'a, 'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
- DefPathHashes {
- tcx: tcx,
- data: DefIdMap()
- }
- }
-
- pub fn hash(&mut self, def_id: DefId) -> u64 {
- let tcx = self.tcx;
- *self.data.entry(def_id)
- .or_insert_with(|| {
- let def_path = tcx.def_path(def_id);
- def_path.deterministic_hash(tcx)
- })
- }
-}
use hir;
use hir::def_id::DefId;
-use ich::{self, CachingCodemapView, DefPathHashes};
+use ich::{self, CachingCodemapView};
use session::config::DebugInfoLevel::NoDebugInfo;
use ty;
/// things (e.g. each DefId/DefPath is only hashed once).
pub struct StableHashingContext<'a, 'tcx: 'a> {
tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
- def_path_hashes: DefPathHashes<'a, 'tcx>,
codemap: CachingCodemapView<'tcx>,
hash_spans: bool,
hash_bodies: bool,
StableHashingContext {
tcx: tcx,
- def_path_hashes: DefPathHashes::new(tcx),
codemap: CachingCodemapView::new(tcx),
hash_spans: hash_spans_initial,
hash_bodies: true,
#[inline]
pub fn def_path_hash(&mut self, def_id: DefId) -> u64 {
- self.def_path_hashes.hash(def_id)
+ self.tcx.def_path_hash(def_id)
}
#[inline]
//! ICH - Incremental Compilation Hash
pub use self::fingerprint::Fingerprint;
-pub use self::def_path_hash::DefPathHashes;
pub use self::caching_codemap_view::CachingCodemapView;
pub use self::hcx::{StableHashingContext, NodeIdHashingMode};
mod fingerprint;
-mod def_path_hash;
mod caching_codemap_view;
mod hcx;
fn stability(&self, def: DefId) -> Option<attr::Stability>;
fn deprecation(&self, def: DefId) -> Option<attr::Deprecation>;
fn visibility(&self, def: DefId) -> ty::Visibility;
- fn visible_parent_map<'a>(&'a self) -> ::std::cell::RefMut<'a, DefIdMap<DefId>>;
+ fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap<DefId>>;
fn item_generics_cloned(&self, def: DefId) -> ty::Generics;
fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute>;
fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>;
-> Option<DefId>;
fn def_key(&self, def: DefId) -> DefKey;
fn def_path(&self, def: DefId) -> hir_map::DefPath;
+ fn def_path_hash(&self, def: DefId) -> u64;
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name>;
fn item_children(&self, did: DefId) -> Vec<def::Export>;
fn load_macro(&self, did: DefId, sess: &Session) -> LoadedMacro;
fn stability(&self, def: DefId) -> Option<attr::Stability> { bug!("stability") }
fn deprecation(&self, def: DefId) -> Option<attr::Deprecation> { bug!("deprecation") }
fn visibility(&self, def: DefId) -> ty::Visibility { bug!("visibility") }
- fn visible_parent_map<'a>(&'a self) -> ::std::cell::RefMut<'a, DefIdMap<DefId>> {
+ fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap<DefId>> {
bug!("visible_parent_map")
}
fn item_generics_cloned(&self, def: DefId) -> ty::Generics
fn def_path(&self, def: DefId) -> hir_map::DefPath {
bug!("relative_def_path")
}
+ fn def_path_hash(&self, def: DefId) -> u64 {
+ bug!("wa")
+ }
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name> { bug!("struct_field_names") }
fn item_children(&self, did: DefId) -> Vec<def::Export> { bug!("item_children") }
fn load_macro(&self, did: DefId, sess: &Session) -> LoadedMacro { bug!("load_macro") }
}
}
+ #[inline]
+ pub fn def_path_hash(self, def_id: DefId) -> u64 {
+ if def_id.is_local() {
+ self.hir.definitions().def_path_hash(def_id.index)
+ } else {
+ self.sess.cstore.def_path_hash(def_id)
+ }
+ }
+
pub fn def_span(self, def_id: DefId) -> Span {
if let Some(id) = self.hir.as_local_node_id(def_id) {
self.hir.span(id)
use hir::def_id::{DefId, LOCAL_CRATE};
use hir::map::DefPathData;
use infer::InferCtxt;
-use hir::map as hir_map;
+// use hir::map as hir_map;
use traits::{self, Reveal};
use ty::{self, Ty, TyCtxt, TypeAndMut, TypeFlags, TypeFoldable};
use ty::ParameterEnvironment;
fn def_id(&mut self, did: DefId) {
// Hash the DefPath corresponding to the DefId, which is independent
- // of compiler internal state.
- let path = self.tcx.def_path(did);
- self.def_path(&path)
- }
-
- pub fn def_path(&mut self, def_path: &hir_map::DefPath) {
- def_path.deterministic_hash_to(self.tcx, &mut self.state);
+ // of compiler internal state. We already have a stable hash value of
+ // all DefPaths available via tcx.def_path_hash(), so we just feed that
+ // into the hasher.
+ let hash = self.tcx.def_path_hash(did);
+ self.hash(hash);
}
}
ALIASABLE(LV, MQ)
```
-### Checking mutability of immutable pointer types
+### Checking aliasability of immutable pointer types
Immutable pointer types like `&T` are aliasable, and hence can only be
borrowed immutably:
TYPE(LV) = &Ty
```
-### Checking mutability of mutable pointer types
+### Checking aliasability of mutable pointer types
`&mut T` can be frozen, so it is acceptable to borrow it as either imm or mut:
```rust
// Test region-reborrow-from-shorter-mut-ref.rs:
-fn copy_pointer<'a,'b,T>(x: &'a mut &'b mut T) -> &'b mut T {
+fn copy_borrowed_ptr<'a,'b,T>(x: &'a mut &'b mut T) -> &'b mut T {
&mut **p // ERROR due to clause (1)
}
fn main() {
let mut crate_loader = CrateLoader::new(sess, &cstore, crate_name);
crate_loader.preprocess(&krate);
let resolver_arenas = Resolver::arenas();
- let mut resolver =
- Resolver::new(sess, &krate, make_glob_map, &mut crate_loader, &resolver_arenas);
+ let mut resolver = Resolver::new(sess,
+ &krate,
+ crate_name,
+ make_glob_map,
+ &mut crate_loader,
+ &resolver_arenas);
resolver.whitelisted_legacy_custom_derives = whitelisted_legacy_custom_derives;
syntax_ext::register_builtins(&mut resolver, syntax_exts, sess.features.borrow().quote);
item_like: T)
where T: HashStable<StableHashingContext<'a, 'tcx>>
{
+ if !hash_bodies && !self.hcx.tcx().sess.opts.build_dep_graph() {
+ // If we just need the hashes in order to compute the SVH, we don't
+ // need have two hashes per item. Just the one containing also the
+ // item's body is sufficient.
+ return
+ }
+
let mut hasher = IchHasher::new();
self.hcx.while_hashing_hir_bodies(hash_bodies, |hcx| {
item_like.hash_stable(hcx, &mut hasher);
(item_dep_node, item_hash)
})
.collect();
- item_hashes.sort(); // avoid artificial dependencies on item ordering
+ item_hashes.sort_unstable(); // avoid artificial dependencies on item ordering
item_hashes.hash(&mut crate_state);
}
#![feature(staged_api)]
#![feature(rand)]
#![feature(conservative_impl_trait)]
+#![feature(sort_unstable)]
#![cfg_attr(stage0, feature(pub_restricted))]
extern crate graphviz;
.clone()
}
- pub fn lookup_def_path(&self, id: DefPathIndex) -> &DefPath {
- &self.directory.paths[id.index as usize]
- }
-
pub fn map(&mut self, node: &DepNode<DefId>) -> DepNode<DefPathIndex> {
node.map_def(|&def_id| Some(self.add(def_id))).unwrap()
}
index_map: FxHashMap()
};
- let mut def_id_hashes = FxHashMap();
-
for (index, target) in preds.reduced_graph.all_nodes().iter().enumerate() {
let index = NodeIndex(index);
let def_id = match *target.data {
_ => continue,
};
- let mut def_id_hash = |def_id: DefId| -> u64 {
- *def_id_hashes.entry(def_id)
- .or_insert_with(|| {
- let index = builder.add(def_id);
- let path = builder.lookup_def_path(index);
- path.deterministic_hash(tcx)
- })
- };
-
// To create the hash for each item `X`, we don't hash the raw
// bytes of the metadata (though in principle we
// could). Instead, we walk the predecessors of `MetaData(X)`
.map(|index| preds.reduced_graph.node_data(index))
.filter(|dep_node| HashContext::is_hashable(dep_node))
.map(|dep_node| {
- let hash_dep_node = dep_node.map_def(|&def_id| Some(def_id_hash(def_id)))
+ let hash_dep_node = dep_node.map_def(|&def_id| Some(tcx.def_path_hash(def_id)))
.unwrap();
let hash = preds.hashes[dep_node];
(hash_dep_node, hash)
predicates => { cdata.get_predicates(def_id.index, tcx) }
super_predicates => { cdata.get_super_predicates(def_id.index, tcx) }
trait_def => {
- tcx.alloc_trait_def(cdata.get_trait_def(def_id.index, tcx))
+ tcx.alloc_trait_def(cdata.get_trait_def(def_id.index))
}
adt_def => { cdata.get_adt_def(def_id.index, tcx) }
adt_destructor => {
self.get_crate_data(def.krate).def_path(def.index)
}
+ fn def_path_hash(&self, def: DefId) -> u64 {
+ self.get_crate_data(def.krate).def_path_hash(def.index)
+ }
+
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name>
{
self.dep_graph.read(DepNode::MetaData(def));
/// Returns a map from a sufficiently visible external item (i.e. an external item that is
/// visible from at least one local module) to a sufficiently visible parent (considering
/// modules that re-export the external item to be parents).
- fn visible_parent_map<'a>(&'a self) -> ::std::cell::RefMut<'a, DefIdMap<DefId>> {
- let mut visible_parent_map = self.visible_parent_map.borrow_mut();
- if !visible_parent_map.is_empty() { return visible_parent_map; }
+ fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap<DefId>> {
+ {
+ let visible_parent_map = self.visible_parent_map.borrow();
+ if !visible_parent_map.is_empty() {
+ return visible_parent_map;
+ }
+ }
use std::collections::vec_deque::VecDeque;
use std::collections::hash_map::Entry;
+
+ let mut visible_parent_map = self.visible_parent_map.borrow_mut();
+
for cnum in (1 .. self.next_crate_num().as_usize()).map(CrateNum::new) {
let cdata = self.get_crate_data(cnum);
}
}
- visible_parent_map
+ drop(visible_parent_map);
+ self.visible_parent_map.borrow()
}
}
}
}
- pub fn get_trait_def(&self,
- item_id: DefIndex,
- tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> ty::TraitDef {
+ pub fn get_trait_def(&self, item_id: DefIndex) -> ty::TraitDef {
let data = match self.entry(item_id).kind {
EntryKind::Trait(data) => data.decode(self),
_ => bug!(),
let def = ty::TraitDef::new(self.local_def_id(item_id),
data.unsafety,
data.paren_sugar,
- self.def_path(item_id).deterministic_hash(tcx));
+ self.def_path_table.def_path_hash(item_id));
if data.has_default_impl {
def.record_has_default_impl();
}
}
+ #[inline]
pub fn def_key(&self, index: DefIndex) -> DefKey {
self.def_path_table.def_key(index)
}
DefPath::make(self.cnum, id, |parent| self.def_path_table.def_key(parent))
}
+ #[inline]
+ pub fn def_path_hash(&self, index: DefIndex) -> u64 {
+ self.def_path_table.def_path_hash(index)
+ }
+
/// Imports the codemap from an external crate into the codemap of the crate
/// currently being compiled (the "local crate").
///
impl<'a> Resolver<'a> {
pub fn new(session: &'a Session,
krate: &Crate,
+ crate_name: &str,
make_glob_map: MakeGlobMap,
crate_loader: &'a mut CrateLoader,
arenas: &'a ResolverArenas<'a>)
module_map.insert(DefId::local(CRATE_DEF_INDEX), graph_root);
let mut definitions = Definitions::new();
- DefCollector::new(&mut definitions).collect_root();
+ DefCollector::new(&mut definitions)
+ .collect_root(crate_name, &session.local_crate_disambiguator().as_str());
let mut invocations = FxHashMap();
invocations.insert(Mark::root(),
-> PathResolution {
let ns = source.namespace();
let is_expected = &|def| source.is_expected(def);
+ let is_enum_variant = &|def| if let Def::Variant(..) = def { true } else { false };
// Base error is amended with one short label and possibly some longer helps/notes.
let report_errors = |this: &mut Self, def: Option<Def>| {
if !candidates.is_empty() {
// Report import candidates as help and proceed searching for labels.
show_candidates(&mut err, &candidates, def.is_some());
+ } else if is_expected(Def::Enum(DefId::local(CRATE_DEF_INDEX))) {
+ let enum_candidates = this.lookup_import_candidates(name, ns, is_enum_variant);
+ let mut enum_candidates = enum_candidates.iter()
+ .map(|suggestion| import_candidate_to_paths(&suggestion)).collect::<Vec<_>>();
+ enum_candidates.sort();
+ for (sp, variant_path, enum_path) in enum_candidates {
+ let msg = format!("there is an enum variant `{}`, did you mean to use `{}`?",
+ variant_path,
+ enum_path);
+ if sp == DUMMY_SP {
+ err.help(&msg);
+ } else {
+ err.span_help(sp, &msg);
+ }
+ }
}
if path.len() == 1 && this.self_type_is_available() {
if let Some(candidate) = this.lookup_assoc_candidate(name, ns, is_expected) {
names_to_string(&path.segments.iter().map(|seg| seg.identifier).collect::<Vec<_>>())
}
+/// Get the path for an enum and the variant from an `ImportSuggestion` for an enum variant.
+fn import_candidate_to_paths(suggestion: &ImportSuggestion) -> (Span, String, String) {
+ let variant_path = &suggestion.path;
+ let variant_path_string = path_names_to_string(variant_path);
+
+ let path_len = suggestion.path.segments.len();
+ let enum_path = ast::Path {
+ span: suggestion.path.span,
+ segments: suggestion.path.segments[0..path_len - 1].to_vec(),
+ };
+ let enum_path_string = path_names_to_string(&enum_path);
+
+ (suggestion.path.span, variant_path_string, enum_path_string)
+}
+
+
/// When an entity with a given name is not available in scope, we search for
/// entities with that name in all crates. This method allows outputting the
/// results of this search in a programmer-friendly way
use monomorphize::Instance;
use rustc::middle::weak_lang_items;
-use rustc::hir::def_id::LOCAL_CRATE;
+use rustc::hir::def_id::DefId;
use rustc::hir::map as hir_map;
use rustc::ty::{self, Ty, TypeFoldable};
use rustc::ty::fold::TypeVisitor;
use rustc::ty::item_path::{self, ItemPathBuffer, RootMode};
use rustc::ty::subst::Substs;
-use rustc::hir::map::definitions::{DefPath, DefPathData};
+use rustc::hir::map::definitions::DefPathData;
use rustc::util::common::record_time;
use syntax::attr;
fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
- // path to the item this name is for
- def_path: &DefPath,
+ // the DefId of the item this name is for
+ def_id: Option<DefId>,
// type of the item, without any generic
// parameters substituted; this is
// if any.
substs: Option<&'tcx Substs<'tcx>>)
-> String {
- debug!("get_symbol_hash(def_path={:?}, parameters={:?})",
- def_path, substs);
+ debug!("get_symbol_hash(def_id={:?}, parameters={:?})", def_id, substs);
let tcx = scx.tcx();
// the main symbol name is not necessarily unique; hash in the
// compiler's internal def-path, guaranteeing each symbol has a
// truly unique path
- hasher.def_path(def_path);
+ hasher.hash(def_id.map(|def_id| tcx.def_path_hash(def_id)));
// Include the main item-type. Note that, in this case, the
// assertions about `needs_subst` may not hold, but this item-type
return scx.tcx().item_name(def_id).as_str().to_string();
}
- let def_path = scx.tcx().def_path(def_id);
-
// We want to compute the "type" of this item. Unfortunately, some
// kinds of items (e.g., closures) don't have an entry in the
// item-type array. So walk back up the find the closest parent
// and should not matter anyhow.
let instance_ty = scx.tcx().erase_regions(&instance_ty);
- let hash = get_symbol_hash(scx, &def_path, instance_ty, Some(substs));
+ let hash = get_symbol_hash(scx, Some(def_id), instance_ty, Some(substs));
let mut buffer = SymbolPathBuffer {
- names: Vec::with_capacity(def_path.data.len())
+ names: Vec::new()
};
item_path::with_forced_absolute_paths(|| {
t: Ty<'tcx>,
prefix: &str)
-> String {
- let empty_def_path = DefPath {
- data: vec![],
- krate: LOCAL_CRATE,
- };
- let hash = get_symbol_hash(scx, &empty_def_path, t, None);
+ let hash = get_symbol_hash(scx, None, t, None);
let path = [Symbol::intern(prefix).as_str()];
mangle(path.iter().cloned(), &hash)
}
err.emit();
}
- let def_path_hash = tcx.def_path(def_id).deterministic_hash(tcx);
+ let def_path_hash = tcx.def_path_hash(def_id);
let def = ty::TraitDef::new(def_id, unsafety, paren_sugar, def_path_hash);
if tcx.hir.trait_is_auto(def_id) {
if (relatedDoc.is(".docblock")) {
if (relatedDoc.is(":visible")) {
if (animate === true) {
- relatedDoc.slideUp({duration: 'fast', easing: 'linear'});
- toggle.children(".toggle-label").fadeIn();
+ relatedDoc.slideUp({
+ duration: 'fast',
+ easing: 'linear',
+ complete: function() {
+ toggle.children(".toggle-label").fadeIn();
+ toggle.parent(".toggle-wrapper").addClass("collapsed");
+ toggle.children(".inner").text(labelForToggleButton(true));
+ },
+ });
} else {
relatedDoc.hide();
toggle.children(".toggle-label").show();
+ toggle.parent(".toggle-wrapper").addClass("collapsed");
+ toggle.children(".inner").text(labelForToggleButton(true));
}
- toggle.parent(".toggle-wrapper").addClass("collapsed");
- toggle.children(".inner").text(labelForToggleButton(true));
} else {
relatedDoc.slideDown({duration: 'fast', easing: 'linear'});
toggle.parent(".toggle-wrapper").removeClass("collapsed");
type Target = CStr;
fn deref(&self) -> &CStr {
- unsafe { mem::transmute(self.as_bytes_with_nul()) }
+ unsafe { CStr::from_bytes_with_nul_unchecked(self.as_bytes_with_nul()) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_ptr<'a>(ptr: *const c_char) -> &'a CStr {
let len = libc::strlen(ptr);
- mem::transmute(slice::from_raw_parts(ptr, len as usize + 1))
+ let ptr = ptr as *const u8;
+ CStr::from_bytes_with_nul_unchecked(slice::from_raw_parts(ptr, len as usize + 1))
}
/// Creates a C string wrapper from a byte slice.
// Allows the definition recursive static items.
(accepted, static_recursion, "1.17.0", Some(29719)),
// pub(restricted) visibilities (RFC 1422)
- (accepted, pub_restricted, "1.17.0", Some(32409)),
+ (accepted, pub_restricted, "1.18.0", Some(32409)),
// The #![windows_subsystem] attribute
(accepted, windows_subsystem, "1.18.0", Some(37499)),
);
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+enum Fruit {
+ Apple(i64),
+ //~^ HELP there is an enum variant `Fruit::Apple`, did you mean to use `Fruit`?
+ //~| HELP there is an enum variant `Fruit::Apple`, did you mean to use `Fruit`?
+ Orange(i64),
+}
+
+fn should_return_fruit() -> Apple {
+ //~^ ERROR cannot find type `Apple` in this scope
+ //~| NOTE not found in this scope
+ Apple(5)
+ //~^ ERROR cannot find function `Apple` in this scope
+ //~| NOTE not found in this scope
+ //~| HELP possible candidate is found in another module, you can import it into scope
+}
+
+fn should_return_fruit_too() -> Fruit::Apple {
+ //~^ ERROR expected type, found variant `Fruit::Apple`
+ //~| NOTE not a type
+ Apple(5)
+ //~^ ERROR cannot find function `Apple` in this scope
+ //~| NOTE not found in this scope
+ //~| HELP possible candidate is found in another module, you can import it into scope
+}
+
+fn foo() -> Ok {
+ //~^ ERROR expected type, found variant `Ok`
+ //~| NOTE not a type
+ //~| HELP there is an enum variant
+ //~| HELP there is an enum variant
+ Ok(())
+}
+
+fn bar() -> Variant3 {
+ //~^ ERROR cannot find type `Variant3` in this scope
+ //~| NOTE not found in this scope
+}
+
+fn qux() -> Some {
+ //~^ ERROR expected type, found variant `Some`
+ //~| NOTE not a type
+ //~| HELP there is an enum variant
+ //~| HELP there is an enum variant
+ Some(1)
+}
+
+fn main() {}
+
+mod x {
+ enum Enum {
+ Variant1,
+ Variant2(),
+ Variant3(usize),
+ //~^ HELP there is an enum variant `x::Enum::Variant3`, did you mean to use `x::Enum`?
+ Variant4 {},
+ }
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub struct Test;
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:issue_41053.rs
+
+pub trait Trait { fn foo(&self) {} }
+
+pub struct Foo;
+
+impl Iterator for Foo {
+ type Item = Box<Trait>;
+ fn next(&mut self) -> Option<Box<Trait>> {
+ extern crate issue_41053;
+ impl ::Trait for issue_41053::Test {
+ fn foo(&self) {}
+ }
+ Some(Box::new(issue_41053::Test))
+ }
+}
+
+fn main() {
+ Foo.next().unwrap().foo();
+}