LLVM: Update submodule to include SRet support patch for MSP430.
This patch is needed to fix #38824 on MSP430.
I know that LLVM 4 is coming soon, but it would be great to have at least one working nightly before the update.
cc @awygle
r? @alexcrichton
path = src/doc/nomicon
url = https://github.com/rust-lang-nursery/nomicon
[submodule "src/tools/cargo"]
- path = src/tools/cargo
+ path = cargo
url = https://github.com/rust-lang/cargo
[submodule "reference"]
path = src/doc/reference
- env: IMAGE=dist-armv7-aarch64-linux DEPLOY=1
- env: IMAGE=dist-freebsd DEPLOY=1
- env: IMAGE=dist-i586-gnu-i686-musl DEPLOY=1
+ - env: IMAGE=dist-fuchsia DEPLOY=1
- env: IMAGE=dist-mips-linux DEPLOY=1
- env: IMAGE=dist-mips64-linux DEPLOY=1
- env: IMAGE=dist-powerpc-linux DEPLOY=1
RUST_CHECK_TARGET=check
RUST_CONFIGURE_ARGS=--build=x86_64-apple-darwin
SRC=.
+ RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache
os: osx
osx_image: xcode8.2
install: &osx_install_sccache >
RUST_CHECK_TARGET=check
RUST_CONFIGURE_ARGS=--build=i686-apple-darwin
SRC=.
+ RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache
os: osx
osx_image: xcode8.2
install: *osx_install_sccache
RUST_CONFIGURE_ARGS="--build=i686-apple-darwin --enable-extended"
SRC=.
DEPLOY=1
+ RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache
os: osx
osx_image: xcode8.2
install: >
travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-25-sccache-x86_64-apple-darwin &&
- chmod +x /usr/local/bin/sccache &&
- brew uninstall --ignore-dependencies openssl &&
- brew install openssl --universal --without-test
+ chmod +x /usr/local/bin/sccache
- env: >
RUST_CHECK_TARGET=dist
RUST_CONFIGURE_ARGS="--target=aarch64-apple-ios,armv7-apple-ios,armv7s-apple-ios,i386-apple-ios,x86_64-apple-ios --enable-extended"
SRC=.
DEPLOY=1
+ RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache
os: osx
osx_image: xcode8.2
install: *osx_install_sccache
RUST_CONFIGURE_ARGS="--enable-extended"
SRC=.
DEPLOY_ALT=1
+ RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache
os: osx
osx_image: xcode8.2
install: *osx_install_sccache
echo "#### Build failed; Disk usage after running script:";
df -h;
du . | sort -nr | head -n100
+ - cat obj/tmp/sccache.log
+ - cat /tmp/sccache.log
# Save tagged docker images we created and load them if they're available
before_cache:
- set PATH=%PATH%;%CD%\handle
- handle.exe -accepteula -help
+ # Attempt to debug sccache failures
+ - set RUST_LOG=sccache
+ - set SCCACHE_ERROR_LOG=%CD%/sccache.log
+
test_script:
- appveyor-retry sh -c 'git submodule deinit -f . && git submodule update --init'
- set SRC=.
- set NO_CCACHE=1
- sh src/ci/run.sh
+on_failure:
+ - cat %CD%/sccache.log
+
cache:
- "build/i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- "build/x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
--- /dev/null
+Subproject commit 5f3b9c4c6a7be1f177d6024cb83d150b6479148a
need_cmd file
need_cmd make
-msg "inspecting environment"
-
-CFG_OSTYPE=$(uname -s)
-CFG_CPUTYPE=$(uname -m)
-
-if [ $CFG_OSTYPE = Darwin -a $CFG_CPUTYPE = i386 ]
-then
- # Darwin's `uname -s` lies and always returns i386. We have to use sysctl
- # instead.
- if sysctl hw.optional.x86_64 | grep -q ': 1'
- then
- CFG_CPUTYPE=x86_64
- fi
-fi
-
-# The goal here is to come up with the same triple as LLVM would,
-# at least for the subset of platforms we're willing to target.
-
-case $CFG_OSTYPE in
-
- Linux)
- CFG_OSTYPE=unknown-linux-gnu
- ;;
-
- FreeBSD)
- CFG_OSTYPE=unknown-freebsd
- ;;
-
- DragonFly)
- CFG_OSTYPE=unknown-dragonfly
- ;;
-
- Bitrig)
- CFG_OSTYPE=unknown-bitrig
- ;;
-
- OpenBSD)
- CFG_OSTYPE=unknown-openbsd
- ;;
-
- NetBSD)
- CFG_OSTYPE=unknown-netbsd
- ;;
-
- Darwin)
- CFG_OSTYPE=apple-darwin
- ;;
-
- SunOS)
- CFG_OSTYPE=sun-solaris
- CFG_CPUTYPE=$(isainfo -n)
- ;;
-
- Haiku)
- CFG_OSTYPE=unknown-haiku
- ;;
-
- MINGW*)
- # msys' `uname` does not print gcc configuration, but prints msys
- # configuration. so we cannot believe `uname -m`:
- # msys1 is always i686 and msys2 is always x86_64.
- # instead, msys defines $MSYSTEM which is MINGW32 on i686 and
- # MINGW64 on x86_64.
- CFG_CPUTYPE=i686
- CFG_OSTYPE=pc-windows-gnu
- if [ "$MSYSTEM" = MINGW64 ]
- then
- CFG_CPUTYPE=x86_64
- fi
- ;;
-
- MSYS*)
- CFG_OSTYPE=pc-windows-gnu
- ;;
-
-# Thad's Cygwin identifiers below
-
-# Vista 32 bit
- CYGWIN_NT-6.0)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=i686
- ;;
-
-# Vista 64 bit
- CYGWIN_NT-6.0-WOW64)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=x86_64
- ;;
-
-# Win 7 32 bit
- CYGWIN_NT-6.1)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=i686
- ;;
-
-# Win 7 64 bit
- CYGWIN_NT-6.1-WOW64)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=x86_64
- ;;
-
-# Win 8 # uname -s on 64-bit cygwin does not contain WOW64, so simply use uname -m to detect arch (works in my install)
- CYGWIN_NT-6.3)
- CFG_OSTYPE=pc-windows-gnu
- ;;
-# We do not detect other OS such as XP/2003 using 64 bit using uname.
-# If we want to in the future, we will need to use Cygwin - Chuck's csih helper in /usr/lib/csih/winProductName.exe or alternative.
- *)
- err "unknown OS type: $CFG_OSTYPE"
- ;;
-esac
-
-
-case $CFG_CPUTYPE in
-
- i386 | i486 | i686 | i786 | x86)
- CFG_CPUTYPE=i686
- ;;
-
- xscale | arm)
- CFG_CPUTYPE=arm
- ;;
-
- armv6l)
- CFG_CPUTYPE=arm
- CFG_OSTYPE="${CFG_OSTYPE}eabihf"
- ;;
-
- armv7l)
- CFG_CPUTYPE=armv7
- CFG_OSTYPE="${CFG_OSTYPE}eabihf"
- ;;
-
- aarch64 | arm64)
- CFG_CPUTYPE=aarch64
- ;;
-
- powerpc | ppc)
- CFG_CPUTYPE=powerpc
- ;;
-
- powerpc64 | ppc64)
- CFG_CPUTYPE=powerpc64
- ;;
-
- powerpc64le | ppc64le)
- CFG_CPUTYPE=powerpc64le
- ;;
-
- s390x)
- CFG_CPUTYPE=s390x
- ;;
-
- x86_64 | x86-64 | x64 | amd64)
- CFG_CPUTYPE=x86_64
- ;;
-
- mips | mips64)
- if [ "$CFG_CPUTYPE" = "mips64" ]; then
- CFG_OSTYPE="${CFG_OSTYPE}abi64"
- fi
- ENDIAN=$(printf '\1' | od -dAn)
- if [ "$ENDIAN" -eq 1 ]; then
- CFG_CPUTYPE="${CFG_CPUTYPE}el"
- elif [ "$ENDIAN" -ne 256 ]; then
- err "unknown endianness: $ENDIAN (expecting 1 for little or 256 for big)"
- fi
- ;;
-
- BePC)
- CFG_CPUTYPE=i686
- ;;
-
- *)
- err "unknown CPU type: $CFG_CPUTYPE"
-esac
-
-# Detect 64 bit linux systems with 32 bit userland and force 32 bit compilation
-if [ $CFG_OSTYPE = unknown-linux-gnu -a $CFG_CPUTYPE = x86_64 ]
-then
- # $SHELL does not exist in standard 'sh', so probably only exists
- # if configure is running in an interactive bash shell. /usr/bin/env
- # exists *everywhere*.
- BIN_TO_PROBE="$SHELL"
- if [ ! -r "$BIN_TO_PROBE" ]; then
- if [ -r "/usr/bin/env" ]; then
- BIN_TO_PROBE="/usr/bin/env"
- else
- warn "Cannot check if the userland is i686 or x86_64"
- fi
- fi
- file -L "$BIN_TO_PROBE" | grep -q "x86[_-]64"
- if [ $? != 0 ]; then
- msg "i686 userland on x86_64 Linux kernel"
- CFG_CPUTYPE=i686
- fi
-fi
-
-
-DEFAULT_BUILD="${CFG_CPUTYPE}-${CFG_OSTYPE}"
-
CFG_SRC_DIR="$(abs_path $(dirname $0))/"
CFG_SRC_DIR_RELATIVE="$(dirname $0)/"
CFG_BUILD_DIR="$(pwd)/"
valopt llvm-root "" "set LLVM root"
valopt python "" "set path to python"
valopt jemalloc-root "" "set directory where libjemalloc_pic.a is located"
-valopt build "${DEFAULT_BUILD}" "GNUs ./configure syntax LLVM build triple"
+valopt build "" "GNUs ./configure syntax LLVM build triple"
valopt android-cross-path "" "Android NDK standalone path (deprecated)"
valopt i686-linux-android-ndk "" "i686-linux-android NDK standalone path"
valopt arm-linux-androideabi-ndk "" "arm-linux-androideabi NDK standalone path"
err "Found $python_version, but Python 2.7 is required"
fi
-# If we have no git directory then we are probably a tarball distribution
-# and shouldn't attempt to load submodules
-if [ ! -e ${CFG_SRC_DIR}.git ]
-then
- probe CFG_GIT git
- msg "git: no git directory. disabling submodules"
- CFG_DISABLE_MANAGE_SUBMODULES=1
-else
- probe_need CFG_GIT git
-fi
-
-# Use `md5sum` on GNU platforms, or `md5 -q` on BSD
-probe CFG_MD5 md5
-probe CFG_MD5SUM md5sum
-if [ -n "$CFG_MD5" ]
-then
- CFG_HASH_COMMAND="$CFG_MD5 -q | cut -c 1-8"
-elif [ -n "$CFG_MD5SUM" ]
-then
- CFG_HASH_COMMAND="$CFG_MD5SUM | cut -c 1-8"
-else
- err 'could not find one of: md5 md5sum'
-fi
-putvar CFG_HASH_COMMAND
-
-probe CFG_CLANG clang++
-probe CFG_CCACHE ccache
-probe CFG_GCC gcc
-probe CFG_LD ld
-probe CFG_VALGRIND valgrind
-probe CFG_PERF perf
-probe CFG_ISCC iscc
-probe CFG_ANTLR4 antlr4
-probe CFG_GRUN grun
-probe CFG_FLEX flex
-probe CFG_BISON bison
-probe CFG_GDB gdb
-probe CFG_LLDB lldb
-
-if [ -n "$CFG_ENABLE_NINJA" ]
-then
- probe CFG_NINJA ninja
- if [ -z "$CFG_NINJA" ]
- then
- # On Debian and Fedora, the `ninja` binary is an IRC bot, so the build tool was
- # renamed. Handle this case.
- probe CFG_NINJA ninja-build
- fi
-fi
-
-# For building LLVM
-if [ -z "$CFG_LLVM_ROOT" ]
-then
- probe_need CFG_CMAKE cmake
-fi
-
-# On MacOS X, invoking `javac` pops up a dialog if the JDK is not
-# installed. Since `javac` is only used if `antlr4` is available,
-# probe for it only in this case.
-if [ -n "$CFG_ANTLR4" ]
-then
- CFG_ANTLR4_JAR="\"$(find /usr/ -name antlr-complete.jar 2>/dev/null | head -n 1)\""
- if [ "x" = "x$CFG_ANTLR4_JAR" ]
- then
- CFG_ANTLR4_JAR="\"$(find ~ -name antlr-complete.jar 2>/dev/null | head -n 1)\""
- fi
- putvar CFG_ANTLR4_JAR $CFG_ANTLR4_JAR
- probe CFG_JAVAC javac
-fi
-
# the valgrind rpass tests will fail if you don't have a valgrind, but they're
# only disabled if you opt out.
if [ -z "$CFG_VALGRIND" ]
fi
fi
-if [ -n "$CFG_LLDB" ]
-then
- # Store LLDB's version
- CFG_LLDB_VERSION=$($CFG_LLDB --version 2>/dev/null | head -1)
- putvar CFG_LLDB_VERSION
-
- # If CFG_LLDB_PYTHON_DIR is not already set from the outside and valid, try to read it from
- # LLDB via the -P commandline options.
- if [ -z "$CFG_LLDB_PYTHON_DIR" ] || [ ! -d "$CFG_LLDB_PYTHON_DIR" ]
- then
- CFG_LLDB_PYTHON_DIR=$($CFG_LLDB -P)
-
- # If CFG_LLDB_PYTHON_DIR is not a valid directory, set it to something more readable
- if [ ! -d "$CFG_LLDB_PYTHON_DIR" ]
- then
- CFG_LLDB_PYTHON_DIR="LLDB_PYTHON_DIRECTORY_NOT_FOUND"
- fi
-
- putvar CFG_LLDB_PYTHON_DIR
- fi
-fi
-
-# LLDB tests on OSX require /usr/bin/python, not something like Homebrew's
-# /usr/local/bin/python. We're loading a compiled module for LLDB tests which is
-# only compatible with the system.
-case $CFG_BUILD in
- *-apple-darwin)
- CFG_LLDB_PYTHON=/usr/bin/python
- ;;
- *)
- CFG_LLDB_PYTHON=$CFG_PYTHON
- ;;
-esac
-putvar CFG_LLDB_PYTHON
-
# Do some sanity checks if running on buildbot
# (these env vars are set by rust-buildbot)
if [ -n "$RUST_DIST_SERVER" -a -n "$ALLOW_NONZERO_RLIMIT_CORE" ]; then
fi
fi
-step_msg "looking for target specific programs"
-
-probe CFG_ADB adb
-
BIN_SUF=
if [ "$CFG_OSTYPE" = "pc-windows-gnu" ] || [ "$CFG_OSTYPE" = "pc-windows-msvc" ]
then
CFG_DOCDIR=${CFG_DOCDIR%/}
CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')"
CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')"
-CFG_SUPPORTED_TARGET=""
-for target_file in ${CFG_SRC_DIR}mk/cfg/*.mk; do
- CFG_SUPPORTED_TARGET="${CFG_SUPPORTED_TARGET} $(basename "$target_file" .mk)"
-done
# copy build-triples to host-triples so that builds are a subset of hosts
V_TEMP=""
putvar CFG_DOCDIR
putvar CFG_USING_LIBCPP
-# Avoid spurious warnings from clang by feeding it original source on
-# ccache-miss rather than preprocessed input.
-if [ -n "$CFG_ENABLE_CCACHE" ] && [ -n "$CFG_USING_CLANG" ]
-then
- CFG_CCACHE_CPP2=1
- putvar CFG_CCACHE_CPP2
-fi
-
-if [ -n "$CFG_ENABLE_CCACHE" ]
-then
- CFG_CCACHE_BASEDIR=${CFG_SRC_DIR}
- putvar CFG_CCACHE_BASEDIR
-fi
-
-
-putvar CFG_LLVM_SRC_DIR
-
-for t in $CFG_HOST
-do
- CFG_LLVM_BUILD_DIR=$(echo CFG_LLVM_BUILD_DIR_${t} | tr - _)
- CFG_LLVM_INST_DIR=$(echo CFG_LLVM_INST_DIR_${t} | tr - _)
- putvar $CFG_LLVM_BUILD_DIR
- putvar $CFG_LLVM_INST_DIR
-done
-
msg
copy_if_changed ${CFG_SRC_DIR}src/bootstrap/mk/Makefile.in ./Makefile
move_if_changed config.tmp config.mk
"libc 0.0.0",
]
-[[package]]
-name = "advapi32-sys"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "aho-corasick"
-version = "0.5.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "aho-corasick"
version = "0.6.2"
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "bufstream"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "build-manifest"
version = "0.1.0"
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "cargo"
-version = "0.18.0"
-dependencies = [
- "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "bufstream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "cargotest 0.1.0",
- "crates-io 0.7.0",
- "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "env_logger 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "flate2 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
- "fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "handlebars 0.25.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "libgit2-sys 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "miow 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "num_cpus 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_derive 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_ignored 0.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "tar 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "toml 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "cargotest"
-version = "0.1.0"
-dependencies = [
- "bufstream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "cargo 0.18.0",
- "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "flate2 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "tar 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "cargotest2"
version = "0.1.0"
-[[package]]
-name = "cfg-if"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "clap"
version = "2.20.5"
name = "core"
version = "0.0.0"
-[[package]]
-name = "crates-io"
-version = "0.7.0"
-dependencies = [
- "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_derive 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "crossbeam"
-version = "0.2.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "curl"
-version = "0.4.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "curl-sys 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-probe 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "curl-sys"
-version = "0.3.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "docopt"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "dtoa"
version = "0.4.1"
"gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "flate2"
-version = "0.2.17"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "fmt_macros"
version = "0.0.0"
-[[package]]
-name = "foreign-types"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "fs2"
-version = "0.4.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "gcc"
version = "0.3.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "gdi32-sys"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "getopts"
version = "0.0.0"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "git2"
-version = "0.6.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "libgit2-sys 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-probe 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "git2-curl"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "glob"
-version = "0.2.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "graphviz"
version = "0.0.0"
-[[package]]
-name = "hamcrest"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "handlebars"
version = "0.25.1"
"serde_json 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "idna"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "itoa"
version = "0.3.1"
version = "0.2.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "libgit2-sys"
-version = "0.6.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cmake 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
- "curl-sys 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "libssh2-sys 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "libssh2-sys"
-version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cmake 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "libz-sys"
-version = "1.0.13"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "linkchecker"
version = "0.1.0"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "matches"
-version = "0.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "mdbook"
version = "0.0.17"
"toml 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "memchr"
-version = "0.1.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "memchr"
version = "1.0.1"
"libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "miniz-sys"
-version = "0.1.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "miow"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "net2 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "net2"
-version = "0.2.26"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num"
-version = "0.1.36"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-bigint 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-complex 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-iter 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-rational 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num-bigint"
-version = "0.1.35"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num-complex"
-version = "0.1.35"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num-integer"
-version = "0.1.32"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num-iter"
-version = "0.1.32"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num-rational"
-version = "0.1.35"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-bigint 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "num-traits"
version = "0.1.36"
"libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "num_cpus"
-version = "1.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "open"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "openssl"
-version = "0.9.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "openssl-probe"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "openssl-sys"
-version = "0.9.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "panic_abort"
version = "0.0.0"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "pkg-config"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "proc_macro"
version = "0.0.0"
"syntax_pos 0.0.0",
]
-[[package]]
-name = "psapi-sys"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "pulldown-cmark"
version = "0.0.8"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "quote"
-version = "0.3.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "rand"
version = "0.0.0"
"core 0.0.0",
]
-[[package]]
-name = "rand"
-version = "0.3.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "regex"
-version = "0.1.80"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "regex"
version = "0.2.1"
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "regex-syntax"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "regex-syntax"
version = "0.4.0"
"syntax_pos 0.0.0",
]
-[[package]]
-name = "semver"
-version = "0.6.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "semver-parser"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "serde"
version = "0.9.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "serde_codegen_internals"
-version = "0.14.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "syn 0.11.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "serde_derive"
-version = "0.9.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "quote 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_codegen_internals 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.11.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "serde_ignored"
-version = "0.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "serde_json"
version = "0.9.7"
name = "serialize"
version = "0.0.0"
-[[package]]
-name = "shell-escape"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "std"
version = "0.0.0"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "syn"
-version = "0.11.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "quote 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
- "synom 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "synom"
-version = "0.11.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "syntax"
version = "0.0.0"
"serialize 0.0.0",
]
-[[package]]
-name = "tar"
-version = "0.4.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "tempdir"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "term"
version = "0.0.0"
-[[package]]
-name = "term"
-version = "0.4.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "term_size"
version = "0.2.3"
"term 0.0.0",
]
-[[package]]
-name = "thread-id"
-version = "2.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "thread-id"
version = "3.0.0"
"libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "thread_local"
-version = "0.2.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "thread_local"
version = "0.3.3"
"serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "unicode-bidi"
-version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "unicode-normalization"
-version = "0.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "unicode-segmentation"
version = "1.1.0"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "unicode-xid"
-version = "0.0.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "unreachable"
version = "0.1.1"
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "url"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "user32-sys"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "utf8-ranges"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "utf8-ranges"
version = "1.0.0"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "ws2_32-sys"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[metadata]
-"checksum advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a"
-"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66"
"checksum aho-corasick 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0638fd549427caa90c499814196d1b9e3725eb4d15d7339d6de073a680ed0ca2"
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
"checksum bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4f67931368edf3a9a51d29886d245f1c3db2f1ef0dcc9e35ff70341b78c10d23"
"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
-"checksum bufstream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7b48dbe2ff0e98fa2f03377d204a9637d3c9816cd431bfe05a8abbd0ea11d074"
-"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
"checksum clap 2.20.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7db281b0520e97fbd15cd615dcd8f8bcad0c26f5f7d5effe705f090f39e9a758"
"checksum cmake 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "e1acc68a3f714627af38f9f5d09706a28584ba60dfe2cca68f40bf779f941b25"
-"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97"
-"checksum curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c90e1240ef340dd4027ade439e5c7c2064dd9dc652682117bd50d1486a3add7b"
-"checksum curl-sys 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)" = "c0d909dc402ae80b6f7b0118c039203436061b9d9a3ca5d2c2546d93e0a61aaa"
-"checksum docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab32ea6e284d87987066f21a9e809a73c14720571ef34516f0890b3d355ccfd8"
"checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
"checksum env_logger 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "99971fb1b635fe7a0ee3c4d065845bb93cca80a23b5613b5613391ece5de4144"
"checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922"
-"checksum flate2 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "d4e4d0c15ef829cbc1b7cda651746be19cceeb238be7b1049227b14891df9e25"
-"checksum foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e4056b9bd47f8ac5ba12be771f77a0dae796d1bbaaf5fd0b9c2d38b69b8a29d"
-"checksum fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "34edaee07555859dc13ca387e6ae05686bb4d0364c95d649b6dab959511f4baf"
"checksum gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)" = "c07c758b972368e703a562686adb39125707cc1ef3399da8c019fc6c2498a75d"
-"checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518"
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
-"checksum git2 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "046ae03385257040b2a35e56d9669d950dd911ba2bf48202fbef73ee6aab27b2"
-"checksum git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "68676bc784bf0bef83278898929bf64a251e87c0340723d0b93fa096c9c5bf8e"
-"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
-"checksum hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bf088f042a467089e9baa4972f57f9247e42a0cc549ba264c7a04fbb8ecb89d4"
"checksum handlebars 0.25.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b2249f6f0dc5a3bb2b3b1a8f797dfccbc4b053344d773d654ad565e51427d335"
-"checksum idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1053236e00ce4f668aeca4a769a09b3bf5a682d802abd6f3cb39374f6b162c11"
"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6abe0ee2e758cd6bc8a2cd56726359007748fbf4128da998b65d0b70f881e19b"
"checksum libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)" = "684f330624d8c3784fb9558ca46c4ce488073a8d22450415c5eb4f4cfb0d11b5"
-"checksum libgit2-sys 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)" = "d951fd5eccae07c74e8c2c1075b05ea1e43be7f8952245af8c2840d1480b1d95"
-"checksum libssh2-sys 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "91e135645c2e198a39552c8c7686bb5b83b1b99f64831c040a6c2798a1195934"
-"checksum libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e5ee912a45d686d393d5ac87fac15ba0ba18daae14e8e7543c63ebf7fb7e970c"
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
-"checksum matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "efd7622e3022e1a6eaa602c4cea8912254e5582c9c692e9167714182244801b1"
"checksum mdbook 0.0.17 (registry+https://github.com/rust-lang/crates.io-index)" = "dbba458ca886cb082d026afd704eeeeb0531f7e4ffd6c619f72dc309c1c18fe4"
-"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
"checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4"
-"checksum miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "28eaee17666671fa872e567547e8428e83308ebe5808cdf6a0e28397dbe2c726"
-"checksum miow 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3a78d2605eb97302c10cf944b8d96b0a2a890c52957caf92fcd1f24f69049579"
-"checksum net2 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)" = "5edf9cb6be97212423aed9413dd4729d62b370b5e1c571750e882cebbbc1e3e2"
-"checksum num 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "bde7c03b09e7c6a301ee81f6ddf66d7a28ec305699e3d3b056d2fc56470e3120"
-"checksum num-bigint 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "88b14378471f7c2adc5262f05b4701ef53e8da376453a8d8fee48e51db745e49"
-"checksum num-complex 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "f0c78e054dd19c3fd03419ade63fa661e9c49bb890ce3beb4eee5b7baf93f92f"
-"checksum num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "fb24d9bfb3f222010df27995441ded1e954f8f69cd35021f6bef02ca9552fb92"
-"checksum num-iter 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "287a1c9969a847055e1122ec0ea7a5c5d6f72aad97934e131c83d5c08ab4e45c"
-"checksum num-rational 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "54ff603b8334a72fbb27fe66948aac0abaaa40231b3cecd189e76162f6f38aaf"
"checksum num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "a16a42856a256b39c6d3484f097f6713e14feacd9bfb02290917904fae46c81c"
"checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3"
-"checksum num_cpus 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a225d1e2717567599c24f88e49f00856c6e825a12125181ee42c4257e3688d39"
"checksum open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3478ed1686bd1300c8a981a940abc92b06fac9cbef747f4c668d4e032ff7b842"
-"checksum openssl 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)" = "f9871ecf7629da3760599e3e547d35940cff3cead49159b49f81cd1250f24f1d"
-"checksum openssl-probe 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "756d49c8424483a3df3b5d735112b4da22109ced9a8294f1f5cdf80fb3810919"
-"checksum openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5dd48381e9e8a6dce9c4c402db143b2e243f5f872354532f7a009c289b3998ca"
"checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8"
-"checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
-"checksum psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "abcd5d1a07d360e29727f757a9decb3ce8bc6e0efa8969cfaad669a8317a2478"
"checksum pulldown-cmark 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1058d7bb927ca067656537eec4e02c2b4b70eaaa129664c5b90c111e20326f41"
"checksum quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0aad603e8d7fb67da22dbdf1f4b826ce8829e406124109e73cf1b2454b93a71c"
-"checksum quote 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "7375cf7ad34a92e8fd18dd9c42f58b9a11def59ab48bec955bf359a788335592"
-"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d"
-"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
"checksum regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4278c17d0f6d62dfef0ab00028feb45bd7d2102843f80763474eeb1be8a10c01"
-"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
"checksum regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9191b1f57603095f105d317e375d19b1c9c5c3185ea9633a99a6dcbed04457"
"checksum rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "237546c689f20bb44980270c73c3b9edd0891c1be49cc1274406134a66d3957b"
-"checksum semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537"
-"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
"checksum serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1e0ed773960f90a78567fcfbe935284adf50c5d7cf119aa2cf43bb0b4afa69bb"
-"checksum serde_codegen_internals 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4d52006899f910528a10631e5b727973fe668f3228109d1707ccf5bad5490b6e"
-"checksum serde_derive 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)" = "789ee9f3cd78c850948b94121020147f5220b47dafbf230d7098a93a58f726cf"
-"checksum serde_ignored 0.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4b3f5576874721d14690657e9f0ed286e72a52be2f6fdc0cf2f024182bd8f64"
"checksum serde_json 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)" = "2eb96d30e4e6f9fc52e08f51176d078b6f79b981dc3ed4134f7b850be9f446a8"
-"checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
-"checksum syn 0.11.8 (registry+https://github.com/rust-lang/crates.io-index)" = "37c279fb816210c9bb28b2c292664581e7b87b4561e86b94df462664d8620bb8"
-"checksum synom 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "27e31aa4b09b9f4cb12dff3c30ba503e17b1a624413d764d32dab76e3920e5bc"
-"checksum tar 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "1eb3bf6ec92843ca93f4fcfb5fc6dfe30534815b147885db4b5759b8e2ff7d52"
-"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
-"checksum term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d168af3930b369cfe245132550579d47dfd873d69470755a19c2c6568dbbd989"
"checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a"
-"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
"checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a"
-"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
"checksum thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c85048c6260d17cf486ceae3282d9fb6b90be220bf5b28c400f5485ffc29f0c7"
"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"
"checksum toml 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "08272367dd2e766db3fa38f068067d17aa6a9dfd7259af24b3927db92f1e0c2f"
-"checksum unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d3a078ebdd62c0e71a709c3d53d2af693fe09fe93fbff8344aebe289b78f9032"
-"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff"
"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
-"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
-"checksum url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5ba8a749fb4479b043733416c244fa9d1d3af3d7c23804944651c8a448cb87e"
-"checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47"
-"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
"checksum vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cac5efe5cb0fa14ec2f84f83c701c562ee63f6dcc680861b21d65c682adfb05f"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
-"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
"tools/build-manifest",
"tools/qemu-test-client",
"tools/qemu-test-server",
- "tools/cargo",
]
# Curiously, compiletest will segfault if compiled with opt-level=3 on 64-bit
def get_mk(self, key):
for line in iter(self.config_mk.splitlines()):
- if line.startswith(key):
- return line[line.find(':=') + 2:].strip()
+ if line.startswith(key + ' '):
+ var = line[line.find(':=') + 2:].strip()
+ if var != '':
+ return var
return None
def cargo(self):
sys.exit(err)
elif ostype == 'Darwin':
ostype = 'apple-darwin'
+ elif ostype == 'Haiku':
+ ostype = 'unknown-haiku'
elif ostype.startswith('MINGW'):
# msys' `uname` does not print gcc configuration, but prints msys
# configuration. so we cannot believe `uname -m`:
cputype = 'i686'
elif cputype in {'xscale', 'arm'}:
cputype = 'arm'
- elif cputype == 'armv7l':
+ elif cputype in {'armv6l', 'armv7l', 'armv8l'}:
cputype = 'arm'
ostype += 'eabihf'
+ elif cputype == 'armv7l':
+ cputype = 'armv7'
+ ostype += 'eabihf'
elif cputype == 'aarch64':
cputype = 'aarch64'
elif cputype == 'arm64':
raise ValueError('unknown byteorder: ' + sys.byteorder)
# only the n64 ABI is supported, indicate it
ostype += 'abi64'
- elif cputype in {'powerpc', 'ppc', 'ppc64'}:
+ elif cputype in {'powerpc', 'ppc'}:
cputype = 'powerpc'
+ elif cputype in {'powerpc64', 'ppc64'}:
+ cputype = 'powerpc64'
+ elif cputype in {'powerpc64le', 'ppc64le'}:
+ cputype = 'powerpc64le'
elif cputype == 'sparcv9':
pass
elif cputype in {'amd64', 'x86_64', 'x86-64', 'x64'}:
cputype = 'x86_64'
+ elif cputype == 's390x':
+ cputype = 's390x'
+ elif cputype == 'BePC':
+ cputype = 'i686'
else:
err = "unknown cpu type: " + cputype
if self.verbose:
let filename = e.file_name().into_string().unwrap();
if (target.contains("windows") && filename.ends_with(".exe")) ||
(!target.contains("windows") && !filename.contains(".")) ||
- (target.contains("emscripten") && filename.contains(".js")){
+ (target.contains("emscripten") && filename.ends_with(".js")) {
dst.push(e.path());
}
}
// build.clear_if_dirty(&out_dir, &libstd_stamp(build, stage, &host, target));
let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
- let dir = build.src.join("src/tools").join(tool);
+ let mut dir = build.src.join(tool);
+ if !dir.exists() {
+ dir = build.src.join("src/tools").join(tool);
+ }
cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
// We don't want to build tools dynamically as they'll be running across
pub llvm_static_stdcpp: bool,
pub llvm_link_shared: bool,
pub llvm_targets: Option<String>,
+ pub llvm_link_jobs: Option<u32>,
// rust codegen options
pub rust_optimize: bool,
version_check: Option<bool>,
static_libstdcpp: Option<bool>,
targets: Option<String>,
+ link_jobs: Option<u32>,
}
#[derive(RustcDecodable, Default, Clone)]
set(&mut config.llvm_version_check, llvm.version_check);
set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
config.llvm_targets = llvm.targets.clone();
+ config.llvm_link_jobs = llvm.link_jobs;
}
if let Some(ref rust) = toml.rust {
}
match key {
- "CFG_BUILD" => self.build = value.to_string(),
- "CFG_HOST" => {
- self.host = value.split(" ").map(|s| s.to_string())
- .collect();
- }
- "CFG_TARGET" => {
- self.target = value.split(" ").map(|s| s.to_string())
- .collect();
+ "CFG_BUILD" if value.len() > 0 => self.build = value.to_string(),
+ "CFG_HOST" if value.len() > 0 => {
+ self.host.extend(value.split(" ").map(|s| s.to_string()));
+
+ }
+ "CFG_TARGET" if value.len() > 0 => {
+ self.target.extend(value.split(" ").map(|s| s.to_string()));
}
"CFG_MUSL_ROOT" if value.len() > 0 => {
self.musl_root = Some(parse_configure_path(value));
# Rust team and file an issue if you need assistance in porting!
#targets = "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX"
+# Cap the number of parallel linker invocations when compiling LLVM.
+# This can be useful when building LLVM with debug info, which significantly
+# increases the size of binaries and consequently the memory required by
+# each linker process.
+# If absent or 0, linker invocations are treated like any other job and
+# controlled by rustbuild's -j parameter.
+#link-jobs = 0
+
# =============================================================================
# General build configuration options
# =============================================================================
let src_dirs = [
"man",
"src",
+ "cargo",
];
let filter_fn = move |path: &Path| {
println!("Dist cargo stage{} ({})", stage, target);
let compiler = Compiler::new(stage, &build.config.build);
- let src = build.src.join("src/tools/cargo");
+ let src = build.src.join("cargo");
let etc = src.join("src/etc");
- let release_num = &build.crates["cargo"].version;
- let name = format!("cargo-{}", build.package_vers(release_num));
- let version = build.cargo_info.version(build, release_num);
+ let release_num = build.cargo_release_num();
+ let name = format!("cargo-{}", build.package_vers(&release_num));
+ let version = build.cargo_info.version(build, &release_num);
let tmp = tmpdir(build);
let image = tmp.join("cargo-image");
println!("Dist extended stage{} ({})", stage, target);
let dist = distdir(build);
- let cargo_vers = &build.crates["cargo"].version;
+ let cargo_vers = build.cargo_release_num();
let rustc_installer = dist.join(format!("{}-{}.tar.gz",
pkgname(build, "rustc"),
target));
cmd.arg(distdir(build));
cmd.arg(today.trim());
cmd.arg(build.rust_package_vers());
- cmd.arg(build.cargo_info.version(build, &build.crates["cargo"].version));
+ cmd.arg(build.package_vers(&build.cargo_release_num()));
cmd.arg(addr);
t!(fs::create_dir_all(distdir(build)));
use std::fs::{self, File};
use std::io::prelude::*;
+use std::io;
+use std::path::Path;
use std::process::Command;
use {Build, Compiler, Mode};
-use util::cp_r;
+use util::{cp_r, symlink_dir};
use build_helper::up_to_date;
/// Invoke `rustbook` as compiled in `stage` for `target` for the doc book
.join(target).join("doc");
let rustdoc = build.rustdoc(&compiler);
- build.clear_if_dirty(&out_dir, &rustdoc);
+ // Here what we're doing is creating a *symlink* (directory junction on
+ // Windows) to the final output location. This is not done as an
+ // optimization but rather for correctness. We've got three trees of
+ // documentation, one for std, one for test, and one for rustc. It's then
+ // our job to merge them all together.
+ //
+ // Unfortunately rustbuild doesn't know nearly as well how to merge doc
+ // trees as rustdoc does itself, so instead of actually having three
+ // separate trees we just have rustdoc output to the same location across
+ // all of them.
+ //
+ // This way rustdoc generates output directly into the output, and rustdoc
+ // will also directly handle merging.
+ let my_out = build.crate_doc_out(target);
+ build.clear_if_dirty(&my_out, &rustdoc);
+ t!(symlink_dir_force(&my_out, &out_dir));
let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
cargo.arg("--manifest-path")
build.run(&mut cargo);
- cp_r(&out_dir, &out)
+ cp_r(&my_out, &out);
}
/// Compile all libtest documentation.
.join(target).join("doc");
let rustdoc = build.rustdoc(&compiler);
- build.clear_if_dirty(&out_dir, &rustdoc);
+ // See docs in std above for why we symlink
+ let my_out = build.crate_doc_out(target);
+ build.clear_if_dirty(&my_out, &rustdoc);
+ t!(symlink_dir_force(&my_out, &out_dir));
let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
cargo.arg("--manifest-path")
.arg(build.src.join("src/libtest/Cargo.toml"));
build.run(&mut cargo);
- cp_r(&out_dir, &out)
+ cp_r(&my_out, &out);
}
/// Generate all compiler documentation.
let out_dir = build.stage_out(&compiler, Mode::Librustc)
.join(target).join("doc");
let rustdoc = build.rustdoc(&compiler);
- if !up_to_date(&rustdoc, &out_dir.join("rustc/index.html")) && out_dir.exists() {
- t!(fs::remove_dir_all(&out_dir));
- }
+
+ // See docs in std above for why we symlink
+ let my_out = build.crate_doc_out(target);
+ build.clear_if_dirty(&my_out, &rustdoc);
+ t!(symlink_dir_force(&my_out, &out_dir));
+
let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
cargo.arg("--manifest-path")
.arg(build.src.join("src/rustc/Cargo.toml"))
.arg("--features").arg(build.rustc_features());
+
+ // Like with libstd above if compiler docs aren't enabled then we're not
+ // documenting internal dependencies, so we have a whitelist.
+ if !build.config.compiler_docs {
+ cargo.arg("--no-deps");
+ for krate in &["proc_macro"] {
+ cargo.arg("-p").arg(krate);
+ }
+ }
+
build.run(&mut cargo);
- cp_r(&out_dir, &out)
+ cp_r(&my_out, &out);
}
/// Generates the HTML rendered error-index by running the
build.run(&mut index);
}
+
+fn symlink_dir_force(src: &Path, dst: &Path) -> io::Result<()> {
+ if let Ok(m) = fs::symlink_metadata(dst) {
+ if m.file_type().is_dir() {
+ try!(fs::remove_dir_all(dst));
+ } else {
+ // handle directory junctions on windows by falling back to
+ // `remove_dir`.
+ try!(fs::remove_file(dst).or_else(|_| {
+ fs::remove_dir(dst)
+ }));
+ }
+ }
+
+ symlink_dir(src, dst)
+}
extern crate rustc_serialize;
extern crate toml;
-use std::collections::HashMap;
use std::cmp;
+use std::collections::HashMap;
use std::env;
use std::ffi::OsString;
use std::fs::{self, File};
+use std::io::Read;
use std::path::{Component, PathBuf, Path};
use std::process::Command;
self.out.join(target).join("doc")
}
+ /// Output directory for all crate documentation for a target (temporary)
+ ///
+ /// The artifacts here are then copied into `doc_out` above.
+ fn crate_doc_out(&self, target: &str) -> PathBuf {
+ self.out.join(target).join("crate-docs")
+ }
+
/// Returns true if no custom `llvm-config` is set for the specified target.
///
/// If no custom `llvm-config` was specified then Rust's llvm will be used.
self.rust_info.version(self, channel::CFG_RELEASE_NUM)
}
+ /// Returns the `a.b.c` version that Cargo is at.
+ fn cargo_release_num(&self) -> String {
+ let mut toml = String::new();
+ t!(t!(File::open(self.src.join("cargo/Cargo.toml"))).read_to_string(&mut toml));
+ for line in toml.lines() {
+ let prefix = "version = \"";
+ let suffix = "\"";
+ if line.starts_with(prefix) && line.ends_with(suffix) {
+ return line[prefix.len()..line.len() - suffix.len()].to_string()
+ }
+ }
+
+ panic!("failed to find version in cargo's Cargo.toml")
+ }
+
/// Returns whether unstable features should be enabled for the compiler
/// we're building.
fn unstable_features(&self) -> bool {
cfg.define("LLVM_BUILD_32_BITS", "ON");
}
+ if let Some(num_linkers) = build.config.llvm_link_jobs {
+ if num_linkers > 0 {
+ cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
+ }
+ }
+
// http://llvm.org/docs/HowToCrossCompileLLVM.html
if target != build.config.build {
// FIXME: if the llvm root for the build triple is overridden then we
rules.build("tool-qemu-test-client", "src/tools/qemu-test-client")
.dep(|s| s.name("libstd"))
.run(move |s| compile::tool(build, s.stage, s.target, "qemu-test-client"));
- rules.build("tool-cargo", "src/tools/cargo")
+ rules.build("tool-cargo", "cargo")
.dep(|s| s.name("libstd"))
.dep(|s| s.stage(0).host(s.target).name("openssl"))
.dep(move |s| {
rules.doc(&krate.doc_step, path)
.dep(|s| s.name("librustc-link"))
.host(true)
- .default(default && build.config.compiler_docs)
+ .default(default && build.config.docs)
.run(move |s| doc::rustc(build, s.stage, s.target));
}
use std::env;
use std::ffi::OsString;
use std::fs;
+use std::io;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::Instant;
time.subsec_nanos() / 1_000_000);
}
}
+
+/// Symlinks two directories, using junctions on Windows and normal symlinks on
+/// Unix.
+pub fn symlink_dir(src: &Path, dest: &Path) -> io::Result<()> {
+ let _ = fs::remove_dir(dest);
+ return symlink_dir_inner(src, dest);
+
+ #[cfg(not(windows))]
+ fn symlink_dir_inner(src: &Path, dest: &Path) -> io::Result<()> {
+ use std::os::unix::fs;
+ fs::symlink(src, dest)
+ }
+
+ // Creating a directory junction on windows involves dealing with reparse
+ // points and the DeviceIoControl function, and this code is a skeleton of
+ // what can be found here:
+ //
+ // http://www.flexhex.com/docs/articles/hard-links.phtml
+ //
+ // Copied from std
+ #[cfg(windows)]
+ #[allow(bad_style)]
+ fn symlink_dir_inner(target: &Path, junction: &Path) -> io::Result<()> {
+ use std::ptr;
+ use std::ffi::OsStr;
+ use std::os::windows::ffi::OsStrExt;
+
+ const MAXIMUM_REPARSE_DATA_BUFFER_SIZE: usize = 16 * 1024;
+ const GENERIC_WRITE: DWORD = 0x40000000;
+ const OPEN_EXISTING: DWORD = 3;
+ const FILE_FLAG_OPEN_REPARSE_POINT: DWORD = 0x00200000;
+ const FILE_FLAG_BACKUP_SEMANTICS: DWORD = 0x02000000;
+ const FSCTL_SET_REPARSE_POINT: DWORD = 0x900a4;
+ const IO_REPARSE_TAG_MOUNT_POINT: DWORD = 0xa0000003;
+ const FILE_SHARE_DELETE: DWORD = 0x4;
+ const FILE_SHARE_READ: DWORD = 0x1;
+ const FILE_SHARE_WRITE: DWORD = 0x2;
+
+ type BOOL = i32;
+ type DWORD = u32;
+ type HANDLE = *mut u8;
+ type LPCWSTR = *const u16;
+ type LPDWORD = *mut DWORD;
+ type LPOVERLAPPED = *mut u8;
+ type LPSECURITY_ATTRIBUTES = *mut u8;
+ type LPVOID = *mut u8;
+ type WCHAR = u16;
+ type WORD = u16;
+
+ #[repr(C)]
+ struct REPARSE_MOUNTPOINT_DATA_BUFFER {
+ ReparseTag: DWORD,
+ ReparseDataLength: DWORD,
+ Reserved: WORD,
+ ReparseTargetLength: WORD,
+ ReparseTargetMaximumLength: WORD,
+ Reserved1: WORD,
+ ReparseTarget: WCHAR,
+ }
+
+ extern "system" {
+ fn CreateFileW(lpFileName: LPCWSTR,
+ dwDesiredAccess: DWORD,
+ dwShareMode: DWORD,
+ lpSecurityAttributes: LPSECURITY_ATTRIBUTES,
+ dwCreationDisposition: DWORD,
+ dwFlagsAndAttributes: DWORD,
+ hTemplateFile: HANDLE)
+ -> HANDLE;
+ fn DeviceIoControl(hDevice: HANDLE,
+ dwIoControlCode: DWORD,
+ lpInBuffer: LPVOID,
+ nInBufferSize: DWORD,
+ lpOutBuffer: LPVOID,
+ nOutBufferSize: DWORD,
+ lpBytesReturned: LPDWORD,
+ lpOverlapped: LPOVERLAPPED) -> BOOL;
+ }
+
+ fn to_u16s<S: AsRef<OsStr>>(s: S) -> io::Result<Vec<u16>> {
+ Ok(s.as_ref().encode_wide().chain(Some(0)).collect())
+ }
+
+ // We're using low-level APIs to create the junction, and these are more
+ // picky about paths. For example, forward slashes cannot be used as a
+ // path separator, so we should try to canonicalize the path first.
+ let target = try!(fs::canonicalize(target));
+
+ try!(fs::create_dir(junction));
+
+ let path = try!(to_u16s(junction));
+
+ unsafe {
+ let h = CreateFileW(path.as_ptr(),
+ GENERIC_WRITE,
+ FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
+ 0 as *mut _,
+ OPEN_EXISTING,
+ FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
+ ptr::null_mut());
+
+ let mut data = [0u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE];
+ let mut db = data.as_mut_ptr()
+ as *mut REPARSE_MOUNTPOINT_DATA_BUFFER;
+ let buf = &mut (*db).ReparseTarget as *mut _;
+ let mut i = 0;
+ // FIXME: this conversion is very hacky
+ let v = br"\??\";
+ let v = v.iter().map(|x| *x as u16);
+ for c in v.chain(target.as_os_str().encode_wide().skip(4)) {
+ *buf.offset(i) = c;
+ i += 1;
+ }
+ *buf.offset(i) = 0;
+ i += 1;
+ (*db).ReparseTag = IO_REPARSE_TAG_MOUNT_POINT;
+ (*db).ReparseTargetMaximumLength = (i * 2) as WORD;
+ (*db).ReparseTargetLength = ((i - 1) * 2) as WORD;
+ (*db).ReparseDataLength =
+ (*db).ReparseTargetLength as DWORD + 12;
+
+ let mut ret = 0;
+ let res = DeviceIoControl(h as *mut _,
+ FSCTL_SET_REPARSE_POINT,
+ data.as_ptr() as *mut _,
+ (*db).ReparseDataLength + 8,
+ ptr::null_mut(), 0,
+ &mut ret,
+ ptr::null_mut());
+
+ if res == 0 {
+ Err(io::Error::last_os_error())
+ } else {
+ Ok(())
+ }
+ }
+ }
+}
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ g++ \
+ make \
+ ninja-build \
+ file \
+ curl \
+ ca-certificates \
+ python2.7-dev \
+ git \
+ sudo \
+ bzip2 \
+ xz-utils \
+ swig \
+ libedit-dev \
+ libncurses5-dev
+
+RUN curl -L https://cmake.org/files/v3.8/cmake-3.8.0-rc1-Linux-x86_64.tar.gz | \
+ tar xzf - -C /usr/local --strip-components=1
+
+WORKDIR /tmp
+COPY shared.sh build-toolchain.sh /tmp/
+RUN /tmp/build-toolchain.sh
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-25-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENV \
+ AR_x86_64_unknown_fuchsia=x86_64-unknown-fuchsia-ar \
+ CC_x86_64_unknown_fuchsia=x86_64-unknown-fuchsia-clang \
+ CXX_x86_64_unknown_fuchsia=x86_64-unknown-fuchsia-clang++ \
+ AR_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-ar \
+ CC_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-clang \
+ CXX_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-clang++
+
+ENV TARGETS=x86_64-unknown-fuchsia
+ENV TARGETS=$TARGETS,aarch64-unknown-fuchsia
+
+ENV RUST_CONFIGURE_ARGS --target=$TARGETS
+ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
--- /dev/null
+#!/bin/bash
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+source shared.sh
+
+# Download sources
+SRCS=(
+ "https://fuchsia.googlesource.com/magenta magenta ac69119"
+ "https://fuchsia.googlesource.com/third_party/llvm llvm 5463083"
+ "https://fuchsia.googlesource.com/third_party/clang llvm/tools/clang 4ff7b4b"
+ "https://fuchsia.googlesource.com/third_party/lld llvm/tools/lld fd465a3"
+ "https://fuchsia.googlesource.com/third_party/lldb llvm/tools/lldb 6bb11f8"
+ "https://fuchsia.googlesource.com/third_party/compiler-rt llvm/runtimes/compiler-rt 52d4ecc"
+ "https://fuchsia.googlesource.com/third_party/libcxx llvm/runtimes/libcxx e891cc8"
+ "https://fuchsia.googlesource.com/third_party/libcxxabi llvm/runtimes/libcxxabi f0f0257"
+ "https://fuchsia.googlesource.com/third_party/libunwind llvm/runtimes/libunwind 50bddc1"
+)
+
+fetch() {
+ mkdir -p $2
+ pushd $2 > /dev/null
+ curl -sL $1/+archive/$3.tar.gz | tar xzf -
+ popd > /dev/null
+}
+
+for i in "${SRCS[@]}"; do
+ fetch $i
+done
+
+# Build toolchain
+cd llvm
+mkdir build
+cd build
+hide_output cmake -GNinja \
+ -DFUCHSIA_SYSROOT=${PWD}/../../magenta/third_party/ulib/musl \
+ -DLLVM_ENABLE_LTO=OFF \
+ -DCLANG_BOOTSTRAP_PASSTHROUGH=LLVM_ENABLE_LTO \
+ -C ../tools/clang/cmake/caches/Fuchsia.cmake \
+ ..
+hide_output ninja stage2-distribution
+hide_output ninja stage2-install-distribution
+cd ../..
+
+# Build sysroot
+rm -rf llvm/runtimes/compiler-rt
+./magenta/scripts/download-toolchain
+
+build_sysroot() {
+ local arch="$1"
+
+ case "${arch}" in
+ x86_64) tgt="magenta-pc-x86-64" ;;
+ aarch64) tgt="magenta-qemu-arm64" ;;
+ esac
+
+ hide_output make -C magenta -j$(getconf _NPROCESSORS_ONLN) $tgt
+ dst=/usr/local/${arch}-unknown-fuchsia
+ mkdir -p $dst
+ cp -r magenta/build-${tgt}/sysroot/include $dst/
+ cp -r magenta/build-${tgt}/sysroot/lib $dst/
+
+ cd llvm
+ mkdir build-runtimes-${arch}
+ cd build-runtimes-${arch}
+ hide_output cmake -GNinja \
+ -DCMAKE_C_COMPILER=clang \
+ -DCMAKE_CXX_COMPILER=clang++ \
+ -DCMAKE_AR=/usr/local/bin/llvm-ar \
+ -DCMAKE_RANLIB=/usr/local/bin/llvm-ranlib \
+ -DCMAKE_INSTALL_PREFIX= \
+ -DLLVM_MAIN_SRC_DIR=${PWD}/.. \
+ -DLLVM_BINARY_DIR=${PWD}/../build \
+ -DLLVM_ENABLE_WERROR=OFF \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DLLVM_INCLUDE_TESTS=ON \
+ -DCMAKE_SYSTEM_NAME=Fuchsia \
+ -DCMAKE_C_COMPILER_TARGET=${arch}-fuchsia \
+ -DCMAKE_CXX_COMPILER_TARGET=${arch}-fuchsia \
+ -DUNIX=1 \
+ -DLIBCXX_HAS_MUSL_LIBC=ON \
+ -DLIBCXXABI_USE_LLVM_UNWINDER=ON \
+ -DCMAKE_SYSROOT=${dst} \
+ -DCMAKE_C_COMPILER_FORCED=TRUE \
+ -DCMAKE_CXX_COMPILER_FORCED=TRUE \
+ -DLLVM_ENABLE_LIBCXX=ON \
+ -DCMAKE_EXE_LINKER_FLAGS="-nodefaultlibs -lc" \
+ -DCMAKE_SHARED_LINKER_FLAGS="$(clang --target=${arch}-fuchsia -print-libgcc-file-name)" \
+ ../runtimes
+ hide_output env DESTDIR="${dst}" ninja install
+ cd ../..
+}
+
+build_sysroot "x86_64"
+build_sysroot "aarch64"
+
+rm -rf magenta llvm
+
+for arch in x86_64 aarch64; do
+ for tool in clang clang++; do
+ cat >/usr/local/bin/${arch}-unknown-fuchsia-${tool} <<EOF
+#!/bin/sh
+${tool} --target=${arch}-unknown-fuchsia --sysroot=/usr/local/${arch}-unknown-fuchsia "\$@"
+EOF
+ chmod +x /usr/local/bin/${arch}-unknown-fuchsia-${tool}
+ done
+ ln -s /usr/local/bin/llvm-ar /usr/local/bin/${arch}-unknown-fuchsia-ar
+done
--- /dev/null
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+hide_output() {
+ set +x
+ on_err="
+echo ERROR: An error was encountered with the build.
+cat /tmp/build.log
+exit 1
+"
+ trap "$on_err" ERR
+ bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
+ PING_LOOP_PID=$!
+ "$@" &> /tmp/build.log
+ trap - ERR
+ kill $PING_LOOP_PID
+ set -x
+}
objdir=$root_dir/obj
mkdir -p $HOME/.cargo
-mkdir -p $objdir
+mkdir -p $objdir/tmp
args=
if [ "$SCCACHE_BUCKET" != "" ]; then
args="$args --env SCCACHE_BUCKET=$SCCACHE_BUCKET"
args="$args --env AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID"
args="$args --env AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY"
+ args="$args --env SCCACHE_ERROR_LOG=/tmp/sccache/sccache.log"
+ args="$args --env SCCACHE_LOG_LEVEL=debug"
+ args="$args --env RUST_LOG=sccache=debug"
+ args="$args --volume $objdir/tmp:/tmp/sccache"
else
mkdir -p $HOME/.cache/sccache
args="$args --env SCCACHE_DIR=/sccache --volume $HOME/.cache/sccache:/sccache"
--env DEPLOY_ALT=$DEPLOY_ALT \
--env LOCAL_USER_ID=`id -u` \
--volume "$HOME/.cargo:/cargo" \
+ --privileged \
--rm \
rust-ci \
/checkout/src/ci/run.sh
impl<T> Box<T> {
/// Allocates memory on the heap and then places `x` into it.
///
+ /// This doesn't actually allocate if `T` is zero-sized.
+ ///
/// # Examples
///
/// ```
use core::fmt;
use core::iter::{repeat, FromIterator, FusedIterator};
use core::mem;
-use core::ops::{Index, IndexMut};
+use core::ops::{Index, IndexMut, Place, Placer, InPlace};
use core::ptr;
use core::ptr::Shared;
use core::slice;
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_front(&mut self, value: T) {
- if self.is_full() {
- let old_cap = self.cap();
- self.buf.double();
- unsafe {
- self.handle_cap_increase(old_cap);
- }
- debug_assert!(!self.is_full());
- }
+ self.grow_if_necessary();
self.tail = self.wrap_sub(self.tail, 1);
let tail = self.tail;
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_back(&mut self, value: T) {
- if self.is_full() {
- let old_cap = self.cap();
- self.buf.double();
- unsafe {
- self.handle_cap_increase(old_cap);
- }
- debug_assert!(!self.is_full());
- }
+ self.grow_if_necessary();
let head = self.head;
self.head = self.wrap_add(self.head, 1);
#[stable(feature = "deque_extras_15", since = "1.5.0")]
pub fn insert(&mut self, index: usize, value: T) {
assert!(index <= self.len(), "index out of bounds");
- if self.is_full() {
- let old_cap = self.cap();
- self.buf.double();
- unsafe {
- self.handle_cap_increase(old_cap);
- }
- debug_assert!(!self.is_full());
- }
+ self.grow_if_necessary();
// Move the least number of elements in the ring buffer and insert
// the given object
self.truncate(len - del);
}
}
+
+ // This may panic or abort
+ #[inline]
+ fn grow_if_necessary(&mut self) {
+ if self.is_full() {
+ let old_cap = self.cap();
+ self.buf.double();
+ unsafe {
+ self.handle_cap_increase(old_cap);
+ }
+ debug_assert!(!self.is_full());
+ }
+ }
+
+ /// Returns a place for insertion at the back of the `VecDeque`.
+ ///
+ /// Using this method with placement syntax is equivalent to [`push_back`](#method.push_back),
+ /// but may be more efficient.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(collection_placement)]
+ /// #![feature(placement_in_syntax)]
+ ///
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.place_back() <- 3;
+ /// buf.place_back() <- 4;
+ /// assert_eq!(&buf, &[3, 4]);
+ /// ```
+ #[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+ pub fn place_back(&mut self) -> PlaceBack<T> {
+ PlaceBack { vec_deque: self }
+ }
+
+ /// Returns a place for insertion at the front of the `VecDeque`.
+ ///
+ /// Using this method with placement syntax is equivalent to [`push_front`](#method.push_front),
+ /// but may be more efficient.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(collection_placement)]
+ /// #![feature(placement_in_syntax)]
+ ///
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.place_front() <- 3;
+ /// buf.place_front() <- 4;
+ /// assert_eq!(&buf, &[4, 3]);
+ /// ```
+ #[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+ pub fn place_front(&mut self) -> PlaceFront<T> {
+ PlaceFront { vec_deque: self }
+ }
}
impl<T: Clone> VecDeque<T> {
/// Modifies the `VecDeque` in-place so that `len()` is equal to new_len,
- /// either by removing excess elements or by appending copies of a value to the back.
+ /// either by removing excess elements or by appending clones of `value` to the back.
///
/// # Examples
///
}
}
+/// A place for insertion at the back of a `VecDeque`.
+///
+/// See [`VecDeque::place_back`](struct.VecDeque.html#method.place_back) for details.
+#[must_use = "places do nothing unless written to with `<-` syntax"]
+#[unstable(feature = "collection_placement",
+ reason = "struct name and placement protocol are subject to change",
+ issue = "30172")]
+#[derive(Debug)]
+pub struct PlaceBack<'a, T: 'a> {
+ vec_deque: &'a mut VecDeque<T>,
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> Placer<T> for PlaceBack<'a, T> {
+ type Place = PlaceBack<'a, T>;
+
+ fn make_place(self) -> Self {
+ self.vec_deque.grow_if_necessary();
+ self
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> Place<T> for PlaceBack<'a, T> {
+ fn pointer(&mut self) -> *mut T {
+ unsafe { self.vec_deque.ptr().offset(self.vec_deque.head as isize) }
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> InPlace<T> for PlaceBack<'a, T> {
+ type Owner = &'a mut T;
+
+ unsafe fn finalize(mut self) -> &'a mut T {
+ let head = self.vec_deque.head;
+ self.vec_deque.head = self.vec_deque.wrap_add(head, 1);
+ &mut *(self.vec_deque.ptr().offset(head as isize))
+ }
+}
+
+/// A place for insertion at the front of a `VecDeque`.
+///
+/// See [`VecDeque::place_front`](struct.VecDeque.html#method.place_front) for details.
+#[must_use = "places do nothing unless written to with `<-` syntax"]
+#[unstable(feature = "collection_placement",
+ reason = "struct name and placement protocol are subject to change",
+ issue = "30172")]
+#[derive(Debug)]
+pub struct PlaceFront<'a, T: 'a> {
+ vec_deque: &'a mut VecDeque<T>,
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> Placer<T> for PlaceFront<'a, T> {
+ type Place = PlaceFront<'a, T>;
+
+ fn make_place(self) -> Self {
+ self.vec_deque.grow_if_necessary();
+ self
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> Place<T> for PlaceFront<'a, T> {
+ fn pointer(&mut self) -> *mut T {
+ let tail = self.vec_deque.wrap_sub(self.vec_deque.tail, 1);
+ unsafe { self.vec_deque.ptr().offset(tail as isize) }
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> InPlace<T> for PlaceFront<'a, T> {
+ type Owner = &'a mut T;
+
+ unsafe fn finalize(mut self) -> &'a mut T {
+ self.vec_deque.tail = self.vec_deque.wrap_sub(self.vec_deque.tail, 1);
+ &mut *(self.vec_deque.ptr().offset(self.vec_deque.tail as isize))
+ }
+}
+
#[cfg(test)]
mod tests {
use test;
}
}
}
+
}
extern crate collections;
extern crate test;
extern crate std_unicode;
+extern crate core;
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
use std::collections::VecDeque;
use std::fmt::Debug;
-use std::collections::vec_deque::Drain;
+use std::collections::vec_deque::{Drain};
use self::Taggy::*;
use self::Taggypar::*;
assert!(v.iter_mut().is_empty());
assert!(v.into_iter().is_empty());
}
+
+#[test]
+fn test_placement_in() {
+ let mut buf: VecDeque<isize> = VecDeque::new();
+ buf.place_back() <- 1;
+ buf.place_back() <- 2;
+ assert_eq!(buf, [1,2]);
+
+ buf.place_front() <- 3;
+ buf.place_front() <- 4;
+ assert_eq!(buf, [4,3,1,2]);
+
+ {
+ let ptr_head = buf.place_front() <- 5;
+ assert_eq!(*ptr_head, 5);
+ }
+ {
+ let ptr_tail = buf.place_back() <- 6;
+ assert_eq!(*ptr_tail, 6);
+ }
+ assert_eq!(buf, [5,4,3,1,2,6]);
+}
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Display {
/// Formats the value using the given formatter.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::fmt;
+ ///
+ /// struct Position {
+ /// longitude: f32,
+ /// latitude: f32,
+ /// }
+ ///
+ /// impl fmt::Display for Position {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "({}, {})", self.longitude, self.latitude)
+ /// }
+ /// }
+ ///
+ /// assert_eq!("(1.987, 2.983)".to_owned(),
+ /// format!("{}", Position { longitude: 1.987, latitude: 2.983, }));
+ /// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter) -> Result;
}
}
impl<'a> Formatter<'a> {
-
// First up is the collection of functions used to execute a format string
// at runtime. This consumes all of the compile-time statics generated by
// the format! syntax extension.
//! This functionality is intended to be expanded over time as more surface
//! area for macro authors is stabilized.
//!
-//! See [the book](../../book/procedural-macros.html) for more.
+//! See [the book](../book/procedural-macros.html) for more.
#![crate_name = "proc_macro"]
#![stable(feature = "proc_macro_lib", since = "1.15.0")]
use super::dep_node::{DepNode, WorkProductId};
use super::query::DepGraphQuery;
use super::raii;
+use super::safe::DepGraphSafe;
use super::thread::{DepGraphThreadData, DepMessage};
#[derive(Clone)]
op()
}
- pub fn with_task<OP,R>(&self, key: DepNode<DefId>, op: OP) -> R
- where OP: FnOnce() -> R
+ /// Starts a new dep-graph task. Dep-graph tasks are specified
+ /// using a free function (`task`) and **not** a closure -- this
+ /// is intentional because we want to exercise tight control over
+ /// what state they have access to. In particular, we want to
+ /// prevent implicit 'leaks' of tracked state into the task (which
+ /// could then be read without generating correct edges in the
+ /// dep-graph -- see the [README] for more details on the
+ /// dep-graph). To this end, the task function gets exactly two
+ /// pieces of state: the context `cx` and an argument `arg`. Both
+ /// of these bits of state must be of some type that implements
+ /// `DepGraphSafe` and hence does not leak.
+ ///
+ /// The choice of two arguments is not fundamental. One argument
+ /// would work just as well, since multiple values can be
+ /// collected using tuples. However, using two arguments works out
+ /// to be quite convenient, since it is common to need a context
+ /// (`cx`) and some argument (e.g., a `DefId` identifying what
+ /// item to process).
+ ///
+ /// For cases where you need some other number of arguments:
+ ///
+ /// - If you only need one argument, just use `()` for the `arg`
+ /// parameter.
+ /// - If you need 3+ arguments, use a tuple for the
+ /// `arg` parameter.
+ ///
+ /// [README]: README.md
+ pub fn with_task<C, A, R>(&self, key: DepNode<DefId>, cx: C, arg: A, task: fn(C, A) -> R) -> R
+ where C: DepGraphSafe, A: DepGraphSafe
{
let _task = self.in_task(key);
- op()
+ task(cx, arg)
}
pub fn read(&self, v: DepNode<DefId>) {
mod graph;
mod query;
mod raii;
+mod safe;
mod shadow;
mod thread;
mod visit;
pub use self::graph::DepGraph;
pub use self::graph::WorkProduct;
pub use self::query::DepGraphQuery;
+pub use self::safe::AssertDepGraphSafe;
+pub use self::safe::DepGraphSafe;
pub use self::visit::visit_all_bodies_in_krate;
pub use self::visit::visit_all_item_likes_in_krate;
pub use self::raii::DepTask;
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use hir::BodyId;
+use hir::def_id::DefId;
+use syntax::ast::NodeId;
+use ty::TyCtxt;
+
+/// The `DepGraphSafe` trait is used to specify what kinds of values
+/// are safe to "leak" into a task. The idea is that this should be
+/// only be implemented for things like the tcx as well as various id
+/// types, which will create reads in the dep-graph whenever the trait
+/// loads anything that might depend on the input program.
+pub trait DepGraphSafe {
+}
+
+/// A `BodyId` on its own doesn't give access to any particular state.
+/// You must fetch the state from the various maps or generate
+/// on-demand queries, all of which create reads.
+impl DepGraphSafe for BodyId {
+}
+
+/// A `NodeId` on its own doesn't give access to any particular state.
+/// You must fetch the state from the various maps or generate
+/// on-demand queries, all of which create reads.
+impl DepGraphSafe for NodeId {
+}
+
+/// A `DefId` on its own doesn't give access to any particular state.
+/// You must fetch the state from the various maps or generate
+/// on-demand queries, all of which create reads.
+impl DepGraphSafe for DefId {
+}
+
+/// The type context itself can be used to access all kinds of tracked
+/// state, but those accesses should always generate read events.
+impl<'a, 'gcx, 'tcx> DepGraphSafe for TyCtxt<'a, 'gcx, 'tcx> {
+}
+
+/// Tuples make it easy to build up state.
+impl<A, B> DepGraphSafe for (A, B)
+ where A: DepGraphSafe, B: DepGraphSafe
+{
+}
+
+/// No data here! :)
+impl DepGraphSafe for () {
+}
+
+/// A convenient override that lets you pass arbitrary state into a
+/// task. Every use should be accompanied by a comment explaining why
+/// it makes sense (or how it could be refactored away in the future).
+pub struct AssertDepGraphSafe<T>(pub T);
+
+impl<T> DepGraphSafe for AssertDepGraphSafe<T> {
+}
trait_items: BTreeMap<hir::TraitItemId, hir::TraitItem>,
impl_items: BTreeMap<hir::ImplItemId, hir::ImplItem>,
bodies: BTreeMap<hir::BodyId, hir::Body>,
+ exported_macros: Vec<hir::MacroDef>,
trait_impls: BTreeMap<DefId, Vec<NodeId>>,
trait_default_impl: BTreeMap<DefId, NodeId>,
bodies: BTreeMap::new(),
trait_impls: BTreeMap::new(),
trait_default_impl: BTreeMap::new(),
+ exported_macros: Vec::new(),
loop_scopes: Vec::new(),
is_in_loop_condition: false,
type_def_lifetime_params: DefIdMap(),
impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> {
fn visit_item(&mut self, item: &'lcx Item) {
- let hir_item = self.lctx.lower_item(item);
- self.lctx.items.insert(item.id, hir_item);
- visit::walk_item(self, item);
+ if let Some(hir_item) = self.lctx.lower_item(item) {
+ self.lctx.items.insert(item.id, hir_item);
+ visit::walk_item(self, item);
+ }
}
fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
let module = self.lower_mod(&c.module);
let attrs = self.lower_attrs(&c.attrs);
- let exported_macros = c.exported_macros.iter().map(|m| self.lower_macro_def(m)).collect();
let body_ids = body_ids(&self.bodies);
hir::Crate {
module: module,
attrs: attrs,
span: c.span,
- exported_macros: exported_macros,
+ exported_macros: hir::HirVec::from(self.exported_macros),
items: self.items,
trait_items: self.trait_items,
impl_items: self.impl_items,
bounds,
items)
}
- ItemKind::Mac(_) => panic!("Shouldn't still be around"),
+ ItemKind::MacroDef(..) | ItemKind::Mac(..) => panic!("Shouldn't still be around"),
}
}
}
}
- fn lower_macro_def(&mut self, m: &MacroDef) -> hir::MacroDef {
- hir::MacroDef {
- name: m.ident.name,
- attrs: self.lower_attrs(&m.attrs),
- id: m.id,
- span: m.span,
- body: m.body.clone().into(),
- }
- }
-
fn lower_item_id(&mut self, i: &Item) -> SmallVector<hir::ItemId> {
- if let ItemKind::Use(ref view_path) = i.node {
- if let ViewPathList(_, ref imports) = view_path.node {
- return iter::once(i.id).chain(imports.iter().map(|import| import.node.id))
- .map(|id| hir::ItemId { id: id }).collect();
+ match i.node {
+ ItemKind::Use(ref view_path) => {
+ if let ViewPathList(_, ref imports) = view_path.node {
+ return iter::once(i.id).chain(imports.iter().map(|import| import.node.id))
+ .map(|id| hir::ItemId { id: id }).collect();
+ }
}
+ ItemKind::MacroDef(..) => return SmallVector::new(),
+ _ => {}
}
SmallVector::one(hir::ItemId { id: i.id })
}
- pub fn lower_item(&mut self, i: &Item) -> hir::Item {
+ pub fn lower_item(&mut self, i: &Item) -> Option<hir::Item> {
let mut name = i.ident.name;
let attrs = self.lower_attrs(&i.attrs);
let mut vis = self.lower_visibility(&i.vis);
+ if let ItemKind::MacroDef(ref tts) = i.node {
+ if i.attrs.iter().any(|attr| attr.name() == "macro_export") {
+ self.exported_macros.push(hir::MacroDef {
+ name: name, attrs: attrs, id: i.id, span: i.span, body: tts.clone().into(),
+ });
+ }
+ return None;
+ }
+
let node = self.with_parent_def(i.id, |this| {
this.lower_item_kind(i.id, &mut name, &attrs, &mut vis, &i.node)
});
- hir::Item {
+ Some(hir::Item {
id: i.id,
name: name,
attrs: attrs,
node: node,
vis: vis,
span: i.span,
- }
+ })
}
fn lower_foreign_item(&mut self, i: &ForeignItem) -> hir::ForeignItem {
ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_str()),
ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) =>
DefPathData::ValueNs(i.ident.name.as_str()),
- ItemKind::Mac(..) if i.id == DUMMY_NODE_ID => return, // Scope placeholder
+ ItemKind::MacroDef(..) => DefPathData::MacroDef(i.ident.name.as_str()),
ItemKind::Mac(..) => return self.visit_macro_invoc(i.id, false),
ItemKind::Use(ref view_path) => {
match view_path.node {
self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str()));
}
- fn visit_macro_def(&mut self, macro_def: &'a MacroDef) {
- self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name.as_str()));
- }
-
fn visit_stmt(&mut self, stmt: &'a Stmt) {
match stmt.node {
StmtKind::Mac(..) => self.visit_macro_invoc(stmt.id, false),
The main case which fails today that I would like to support is:
-```text
+```rust
fn foo<T>(x: T, y: T) { ... }
fn bar() {
`X`, and thus inherits its UB/LB of `@mut int`. This leaves no
flexibility for `T` to later adjust to accommodate `@int`.
+Note: `@` and `@mut` are replaced with `Rc<T>` and `Rc<RefCell<T>>` in current Rust.
+
### What to do when not all bounds are present
In the prior discussion we assumed that A.ub was not top and B.lb was
"execute" by testing the value they are applied to and creating any
relevant bindings). So, for example:
- fn foo(x: isize, y: isize) { // -+
- // +------------+ // |
- // | +-----+ // |
- // | +-+ +-+ +-+ // |
- // | | | | | | | // |
- // v v v v v v v // |
- let z = x + y; // |
- ... // |
- } // -+
-
- fn bar() { ... }
+```rust
+fn foo(x: isize, y: isize) { // -+
+// +------------+ // |
+// | +-----+ // |
+// | +-+ +-+ +-+ // |
+// | | | | | | | // |
+// v v v v v v v // |
+ let z = x + y; // |
+ ... // |
+} // -+
+
+fn bar() { ... }
+```
In this example, there is a region for the fn body block as a whole,
and then a subregion for the declaration of the local variable.
particular when combined with `&mut` functions. For example, a call
like this one
- self.foo(self.bar())
+```rust
+self.foo(self.bar())
+```
where both `foo` and `bar` are `&mut self` functions will always yield
an error.
Here is a more involved example (which is safe) so we can see what's
going on:
- struct Foo { f: usize, g: usize }
- ...
- fn add(p: &mut usize, v: usize) {
- *p += v;
- }
- ...
- fn inc(p: &mut usize) -> usize {
- *p += 1; *p
- }
- fn weird() {
- let mut x: Box<Foo> = box Foo { ... };
- 'a: add(&mut (*x).f,
- 'b: inc(&mut (*x).f)) // (..)
- }
+```rust
+struct Foo { f: usize, g: usize }
+// ...
+fn add(p: &mut usize, v: usize) {
+ *p += v;
+}
+// ...
+fn inc(p: &mut usize) -> usize {
+ *p += 1; *p
+}
+fn weird() {
+ let mut x: Box<Foo> = box Foo { /* ... */ };
+ 'a: add(&mut (*x).f,
+ 'b: inc(&mut (*x).f)) // (..)
+}
+```
The important part is the line marked `(..)` which contains a call to
`add()`. The first argument is a mutable borrow of the field `f`. The
involved with `'a` in detail. We'll break apart all the steps involved
in a call expression:
- 'a: {
- 'a_arg1: let a_temp1: ... = add;
- 'a_arg2: let a_temp2: &'a mut usize = &'a mut (*x).f;
- 'a_arg3: let a_temp3: usize = {
- let b_temp1: ... = inc;
- let b_temp2: &'b = &'b mut (*x).f;
- 'b_call: b_temp1(b_temp2)
- };
- 'a_call: a_temp1(a_temp2, a_temp3) // (**)
- }
+```rust
+'a: {
+ 'a_arg1: let a_temp1: ... = add;
+ 'a_arg2: let a_temp2: &'a mut usize = &'a mut (*x).f;
+ 'a_arg3: let a_temp3: usize = {
+ let b_temp1: ... = inc;
+ let b_temp2: &'b = &'b mut (*x).f;
+ 'b_call: b_temp1(b_temp2)
+ };
+ 'a_call: a_temp1(a_temp2, a_temp3) // (**)
+}
+```
Here we see that the lifetime `'a` includes a number of substatements.
In particular, there is this lifetime I've called `'a_call` that
argument, it can still be *invalidated* by that evaluation. Consider
this similar but unsound example:
- struct Foo { f: usize, g: usize }
- ...
- fn add(p: &mut usize, v: usize) {
- *p += v;
- }
- ...
- fn consume(x: Box<Foo>) -> usize {
- x.f + x.g
- }
- fn weird() {
- let mut x: Box<Foo> = box Foo { ... };
- 'a: add(&mut (*x).f, consume(x)) // (..)
- }
+```rust
+struct Foo { f: usize, g: usize }
+// ...
+fn add(p: &mut usize, v: usize) {
+ *p += v;
+}
+// ...
+fn consume(x: Box<Foo>) -> usize {
+ x.f + x.g
+}
+fn weird() {
+ let mut x: Box<Foo> = box Foo { ... };
+ 'a: add(&mut (*x).f, consume(x)) // (..)
+}
+```
In this case, the second argument to `add` actually consumes `x`, thus
invalidating the first argument.
};
if output_template.is_empty() {
- bug!("empty string provided as RUST_REGION_GRAPH");
+ panic!("empty string provided as RUST_REGION_GRAPH");
}
if output_template.contains('%') {
self.tables = old_tables;
}
+ fn visit_body(&mut self, body: &'tcx hir::Body) {
+ run_lints!(self, check_body, late_passes, body);
+ hir_visit::walk_body(self, body);
+ run_lints!(self, check_body_post, late_passes, body);
+ }
+
fn visit_item(&mut self, it: &'tcx hir::Item) {
self.with_lint_attrs(&it.attrs, |cx| {
run_lints!(cx, check_item, late_passes, it);
// FIXME: eliminate the duplication with `Visitor`. But this also
// contains a few lint-specific methods with no equivalent in `Visitor`.
pub trait LateLintPass<'a, 'tcx>: LintPass {
+ fn check_body(&mut self, _: &LateContext, _: &'tcx hir::Body) { }
+ fn check_body_post(&mut self, _: &LateContext, _: &'tcx hir::Body) { }
fn check_name(&mut self, _: &LateContext, _: Span, _: ast::Name) { }
fn check_crate(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Crate) { }
fn check_crate_post(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Crate) { }
}
pub enum LoadedMacro {
- MacroRules(ast::MacroDef),
+ MacroDef(ast::Item),
ProcMacro(Rc<SyntaxExtension>),
}
use hir::def_id::DefId;
use ty::subst::Substs;
use ty::{self, AdtDef, ClosureSubsts, Region, Ty};
+use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
use util::ppaux;
use rustc_back::slice;
use hir::InlineAsm;
}
/// Lowered representation of a single function.
-// Do not implement clone for Mir, which can be accidently done and kind of expensive.
-#[derive(RustcEncodable, RustcDecodable, Debug)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Mir<'tcx> {
/// List of basic blocks. References to basic block use a newtyped index type `BasicBlock`
/// that indexes into this vector.
}
}
}
+
+
+/*
+ * TypeFoldable implementations for MIR types
+ */
+
+impl<'tcx> TypeFoldable<'tcx> for Mir<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ Mir {
+ basic_blocks: self.basic_blocks.fold_with(folder),
+ visibility_scopes: self.visibility_scopes.clone(),
+ promoted: self.promoted.fold_with(folder),
+ return_ty: self.return_ty.fold_with(folder),
+ local_decls: self.local_decls.fold_with(folder),
+ arg_count: self.arg_count,
+ upvar_decls: self.upvar_decls.clone(),
+ spread_arg: self.spread_arg,
+ span: self.span,
+ cache: cache::Cache::new()
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.basic_blocks.visit_with(visitor) ||
+ self.promoted.visit_with(visitor) ||
+ self.return_ty.visit_with(visitor) ||
+ self.local_decls.visit_with(visitor)
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for LocalDecl<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ LocalDecl {
+ ty: self.ty.fold_with(folder),
+ ..self.clone()
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.ty.visit_with(visitor)
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for BasicBlockData<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ BasicBlockData {
+ statements: self.statements.fold_with(folder),
+ terminator: self.terminator.fold_with(folder),
+ is_cleanup: self.is_cleanup
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.statements.visit_with(visitor) || self.terminator.visit_with(visitor)
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Statement<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ use mir::StatementKind::*;
+
+ let kind = match self.kind {
+ Assign(ref lval, ref rval) => Assign(lval.fold_with(folder), rval.fold_with(folder)),
+ SetDiscriminant { ref lvalue, variant_index } => SetDiscriminant {
+ lvalue: lvalue.fold_with(folder),
+ variant_index: variant_index
+ },
+ StorageLive(ref lval) => StorageLive(lval.fold_with(folder)),
+ StorageDead(ref lval) => StorageDead(lval.fold_with(folder)),
+ InlineAsm { ref asm, ref outputs, ref inputs } => InlineAsm {
+ asm: asm.clone(),
+ outputs: outputs.fold_with(folder),
+ inputs: inputs.fold_with(folder)
+ },
+ Nop => Nop,
+ };
+ Statement {
+ source_info: self.source_info,
+ kind: kind
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ use mir::StatementKind::*;
+
+ match self.kind {
+ Assign(ref lval, ref rval) => { lval.visit_with(visitor) || rval.visit_with(visitor) }
+ SetDiscriminant { ref lvalue, .. } |
+ StorageLive(ref lvalue) |
+ StorageDead(ref lvalue) => lvalue.visit_with(visitor),
+ InlineAsm { ref outputs, ref inputs, .. } =>
+ outputs.visit_with(visitor) || inputs.visit_with(visitor),
+ Nop => false,
+ }
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ use mir::TerminatorKind::*;
+
+ let kind = match self.kind {
+ Goto { target } => Goto { target: target },
+ SwitchInt { ref discr, switch_ty, ref values, ref targets } => SwitchInt {
+ discr: discr.fold_with(folder),
+ switch_ty: switch_ty.fold_with(folder),
+ values: values.clone(),
+ targets: targets.clone()
+ },
+ Drop { ref location, target, unwind } => Drop {
+ location: location.fold_with(folder),
+ target: target,
+ unwind: unwind
+ },
+ DropAndReplace { ref location, ref value, target, unwind } => DropAndReplace {
+ location: location.fold_with(folder),
+ value: value.fold_with(folder),
+ target: target,
+ unwind: unwind
+ },
+ Call { ref func, ref args, ref destination, cleanup } => {
+ let dest = destination.as_ref().map(|&(ref loc, dest)| {
+ (loc.fold_with(folder), dest)
+ });
+
+ Call {
+ func: func.fold_with(folder),
+ args: args.fold_with(folder),
+ destination: dest,
+ cleanup: cleanup
+ }
+ },
+ Assert { ref cond, expected, ref msg, target, cleanup } => {
+ let msg = if let AssertMessage::BoundsCheck { ref len, ref index } = *msg {
+ AssertMessage::BoundsCheck {
+ len: len.fold_with(folder),
+ index: index.fold_with(folder),
+ }
+ } else {
+ msg.clone()
+ };
+ Assert {
+ cond: cond.fold_with(folder),
+ expected: expected,
+ msg: msg,
+ target: target,
+ cleanup: cleanup
+ }
+ },
+ Resume => Resume,
+ Return => Return,
+ Unreachable => Unreachable,
+ };
+ Terminator {
+ source_info: self.source_info,
+ kind: kind
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ use mir::TerminatorKind::*;
+
+ match self.kind {
+ SwitchInt { ref discr, switch_ty, .. } =>
+ discr.visit_with(visitor) || switch_ty.visit_with(visitor),
+ Drop { ref location, ..} => location.visit_with(visitor),
+ DropAndReplace { ref location, ref value, ..} =>
+ location.visit_with(visitor) || value.visit_with(visitor),
+ Call { ref func, ref args, ref destination, .. } => {
+ let dest = if let Some((ref loc, _)) = *destination {
+ loc.visit_with(visitor)
+ } else { false };
+ dest || func.visit_with(visitor) || args.visit_with(visitor)
+ },
+ Assert { ref cond, ref msg, .. } => {
+ if cond.visit_with(visitor) {
+ if let AssertMessage::BoundsCheck { ref len, ref index } = *msg {
+ len.visit_with(visitor) || index.visit_with(visitor)
+ } else {
+ false
+ }
+ } else {
+ false
+ }
+ },
+ Goto { .. } |
+ Resume |
+ Return |
+ Unreachable => false
+ }
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Lvalue<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ match self {
+ &Lvalue::Projection(ref p) => Lvalue::Projection(p.fold_with(folder)),
+ _ => self.clone()
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ if let &Lvalue::Projection(ref p) = self {
+ p.visit_with(visitor)
+ } else {
+ false
+ }
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Rvalue<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ use mir::Rvalue::*;
+ match *self {
+ Use(ref op) => Use(op.fold_with(folder)),
+ Repeat(ref op, len) => Repeat(op.fold_with(folder), len),
+ Ref(region, bk, ref lval) => Ref(region.fold_with(folder), bk, lval.fold_with(folder)),
+ Len(ref lval) => Len(lval.fold_with(folder)),
+ Cast(kind, ref op, ty) => Cast(kind, op.fold_with(folder), ty.fold_with(folder)),
+ BinaryOp(op, ref rhs, ref lhs) =>
+ BinaryOp(op, rhs.fold_with(folder), lhs.fold_with(folder)),
+ CheckedBinaryOp(op, ref rhs, ref lhs) =>
+ CheckedBinaryOp(op, rhs.fold_with(folder), lhs.fold_with(folder)),
+ UnaryOp(op, ref val) => UnaryOp(op, val.fold_with(folder)),
+ Discriminant(ref lval) => Discriminant(lval.fold_with(folder)),
+ Box(ty) => Box(ty.fold_with(folder)),
+ Aggregate(ref kind, ref fields) => {
+ let kind = match *kind {
+ AggregateKind::Array(ty) => AggregateKind::Array(ty.fold_with(folder)),
+ AggregateKind::Tuple => AggregateKind::Tuple,
+ AggregateKind::Adt(def, v, substs, n) =>
+ AggregateKind::Adt(def, v, substs.fold_with(folder), n),
+ AggregateKind::Closure(id, substs) =>
+ AggregateKind::Closure(id, substs.fold_with(folder))
+ };
+ Aggregate(kind, fields.fold_with(folder))
+ }
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ use mir::Rvalue::*;
+ match *self {
+ Use(ref op) => op.visit_with(visitor),
+ Repeat(ref op, _) => op.visit_with(visitor),
+ Ref(region, _, ref lval) => region.visit_with(visitor) || lval.visit_with(visitor),
+ Len(ref lval) => lval.visit_with(visitor),
+ Cast(_, ref op, ty) => op.visit_with(visitor) || ty.visit_with(visitor),
+ BinaryOp(_, ref rhs, ref lhs) |
+ CheckedBinaryOp(_, ref rhs, ref lhs) =>
+ rhs.visit_with(visitor) || lhs.visit_with(visitor),
+ UnaryOp(_, ref val) => val.visit_with(visitor),
+ Discriminant(ref lval) => lval.visit_with(visitor),
+ Box(ty) => ty.visit_with(visitor),
+ Aggregate(ref kind, ref fields) => {
+ (match *kind {
+ AggregateKind::Array(ty) => ty.visit_with(visitor),
+ AggregateKind::Tuple => false,
+ AggregateKind::Adt(_, _, substs, _) => substs.visit_with(visitor),
+ AggregateKind::Closure(_, substs) => substs.visit_with(visitor)
+ }) || fields.visit_with(visitor)
+ }
+ }
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Operand<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ match *self {
+ Operand::Consume(ref lval) => Operand::Consume(lval.fold_with(folder)),
+ Operand::Constant(ref c) => Operand::Constant(c.fold_with(folder)),
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ match *self {
+ Operand::Consume(ref lval) => lval.visit_with(visitor),
+ Operand::Constant(ref c) => c.visit_with(visitor)
+ }
+ }
+}
+
+impl<'tcx, B, V> TypeFoldable<'tcx> for Projection<'tcx, B, V>
+ where B: TypeFoldable<'tcx>, V: TypeFoldable<'tcx>
+{
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ use mir::ProjectionElem::*;
+
+ let base = self.base.fold_with(folder);
+ let elem = match self.elem {
+ Deref => Deref,
+ Field(f, ty) => Field(f, ty.fold_with(folder)),
+ Index(ref v) => Index(v.fold_with(folder)),
+ ref elem => elem.clone()
+ };
+
+ Projection {
+ base: base,
+ elem: elem
+ }
+ }
+
+ fn super_visit_with<Vs: TypeVisitor<'tcx>>(&self, visitor: &mut Vs) -> bool {
+ use mir::ProjectionElem::*;
+
+ self.base.visit_with(visitor) ||
+ match self.elem {
+ Field(_, ty) => ty.visit_with(visitor),
+ Index(ref v) => v.visit_with(visitor),
+ _ => false
+ }
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Constant<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ Constant {
+ span: self.span.clone(),
+ ty: self.ty.fold_with(folder),
+ literal: self.literal.fold_with(folder)
+ }
+ }
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.ty.visit_with(visitor) || self.literal.visit_with(visitor)
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Literal<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ match *self {
+ Literal::Item { def_id, substs } => Literal::Item {
+ def_id: def_id,
+ substs: substs.fold_with(folder)
+ },
+ _ => self.clone()
+ }
+ }
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ match *self {
+ Literal::Item { substs, .. } => substs.visit_with(visitor),
+ _ => false
+ }
+ }
+}
ObjectSafetyViolation,
};
+use errors::DiagnosticBuilder;
use fmt_macros::{Parser, Piece, Position};
+use hir::{intravisit, Local, Pat};
+use hir::intravisit::{Visitor, NestedVisitorMap};
+use hir::map::NodeExpr;
use hir::def_id::DefId;
use infer::{self, InferCtxt};
use infer::type_variable::TypeVariableOrigin;
use rustc::lint::builtin::EXTRA_REQUIREMENT_IN_IMPL;
+use std::fmt;
+use syntax::ast;
use ty::{self, AdtKind, ToPredicate, ToPolyTraitRef, Ty, TyCtxt, TypeFoldable};
use ty::error::ExpectedFound;
use ty::fast_reject;
use ty::subst::Subst;
use util::nodemap::{FxHashMap, FxHashSet};
-use std::fmt;
-use syntax::ast;
-use hir::{intravisit, Local, Pat};
-use hir::intravisit::{Visitor, NestedVisitorMap};
use syntax_pos::{DUMMY_SP, Span};
-use errors::DiagnosticBuilder;
+
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct TraitErrorKey<'tcx> {
err.span_label(cause.span, &format!("cannot infer type for `{}`", name));
- let expr = self.tcx.hir.expect_expr(cause.body_id);
-
let mut local_visitor = FindLocalByTypeVisitor {
infcx: &self,
target_ty: &ty,
found_pattern: None,
};
- local_visitor.visit_expr(expr);
+ // #40294: cause.body_id can also be a fn declaration.
+ // Currently, if it's anything other than NodeExpr, we just ignore it
+ match self.tcx.hir.find(cause.body_id) {
+ Some(NodeExpr(expr)) => local_visitor.visit_expr(expr),
+ _ => ()
+ }
if let Some(pattern) = local_visitor.found_pattern {
let pattern_span = pattern.span;
let new_trait = tcx.mk_dynamic(
ty::Binder(tcx.mk_existential_predicates(iter)), r_b);
let InferOk { obligations, .. } =
- self.infcx.sub_types(false, &obligation.cause, new_trait, target)
+ self.infcx.eq_types(false, &obligation.cause, new_trait, target)
.map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
// [T; n] -> [T].
(&ty::TyArray(a, _), &ty::TySlice(b)) => {
let InferOk { obligations, .. } =
- self.infcx.sub_types(false, &obligation.cause, a, b)
+ self.infcx.eq_types(false, &obligation.cause, a, b)
.map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
}
});
let new_struct = tcx.mk_adt(def, tcx.mk_substs(params));
let InferOk { obligations, .. } =
- self.infcx.sub_types(false, &obligation.cause, new_struct, target)
+ self.infcx.eq_types(false, &obligation.cause, new_struct, target)
.map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
queries::mir::get(self, DUMMY_SP, did).borrow()
}
+ /// Given the DefId of an item, returns its MIR, borrowed immutably.
+ /// Returns None if there is no MIR for the DefId
+ pub fn maybe_item_mir(self, did: DefId) -> Option<Ref<'gcx, Mir<'gcx>>> {
+ if did.is_local() && !self.maps.mir.borrow().contains_key(&did) {
+ return None;
+ }
+
+ if !did.is_local() && !self.sess.cstore.is_item_mir_available(did) {
+ return None;
+ }
+
+ Some(self.item_mir(did))
+ }
+
/// If `type_needs_drop` returns true, then `ty` is definitely
/// non-copy and *might* have a destructor attached; if it returns
/// false, then `ty` definitely has no destructor (i.e. no drop glue).
use ty::{self, Lift, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
use rustc_data_structures::accumulate_vec::AccumulateVec;
+use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use std::rc::Rc;
use syntax::abi;
self.expected.visit_with(visitor) || self.found.visit_with(visitor)
}
}
+
+impl<'tcx, T: TypeFoldable<'tcx>, I: Idx> TypeFoldable<'tcx> for IndexVec<I, T> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ self.iter().map(|x| x.fold_with(folder)).collect()
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.iter().any(|t| t.visit_with(visitor))
+ }
+}
}
tcx.layout_depth.set(depth+1);
- let layout = Layout::compute_uncached(self, infcx)?;
+ let layout = Layout::compute_uncached(self, infcx);
+ tcx.layout_depth.set(depth);
+ let layout = layout?;
if can_cache {
tcx.layout_cache.borrow_mut().insert(self, layout);
}
- tcx.layout_depth.set(depth);
Ok(layout)
}
pub type LoanDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, LoanDataFlowOperator>;
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- tcx.dep_graph.with_task(DepNode::BorrowCheckKrate, || {
+ tcx.dep_graph.with_task(DepNode::BorrowCheckKrate, tcx, (), check_crate_task);
+
+ fn check_crate_task<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (): ()) {
tcx.visit_all_bodies_in_krate(|body_owner_def_id, body_id| {
- tcx.dep_graph.with_task(DepNode::BorrowCheck(body_owner_def_id), || {
- borrowck_fn(tcx, body_id);
- });
+ tcx.dep_graph.with_task(DepNode::BorrowCheck(body_owner_def_id),
+ tcx,
+ body_id,
+ borrowck_fn);
});
- });
+ }
}
/// Collection of conclusions determined via borrow checker analyses.
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::collections::range::RangeArgument;
use std::fmt::Debug;
use std::iter::{self, FromIterator};
use std::slice;
self.raw.iter_mut().enumerate().map(IntoIdx { _marker: PhantomData })
}
+ #[inline]
+ pub fn drain<'a, R: RangeArgument<usize>>(
+ &'a mut self, range: R) -> impl Iterator<Item=T> + 'a {
+ self.raw.drain(range)
+ }
+
+ #[inline]
+ pub fn drain_enumerated<'a, R: RangeArgument<usize>>(
+ &'a mut self, range: R) -> impl Iterator<Item=(I, T)> + 'a {
+ self.raw.drain(range).enumerate().map(IntoIdx { _marker: PhantomData })
+ }
+
#[inline]
pub fn last(&self) -> Option<I> {
self.len().checked_sub(1).map(I::new)
pub fn truncate(&mut self, a: usize) {
self.raw.truncate(a)
}
+
+ #[inline]
+ pub fn get(&self, index: I) -> Option<&T> {
+ self.raw.get(index.index())
+ }
+
+ #[inline]
+ pub fn get_mut(&mut self, index: I) -> Option<&mut T> {
+ self.raw.get_mut(index.index())
+ }
}
impl<I: Idx, T> Index<I> for IndexVec<I, T> {
#![feature(associated_consts)]
#![feature(unsize)]
#![feature(i128_type)]
+#![feature(conservative_impl_trait)]
#![cfg_attr(unix, feature(libc))]
#![cfg_attr(test, feature(test))]
use serialize::json;
use std::env;
-use std::mem;
use std::ffi::{OsString, OsStr};
use std::fs;
use std::io::{self, Write};
};
write_out_deps(sess, &outputs, &crate_name);
+ if sess.opts.output_types.contains_key(&OutputType::DepInfo) &&
+ sess.opts.output_types.keys().count() == 1 {
+ return Ok(())
+ }
let arena = DroplessArena::new();
let arenas = GlobalArenas::new();
let whitelisted_legacy_custom_derives = registry.take_whitelisted_custom_derives();
let Registry { syntax_exts, early_lint_passes, late_lint_passes, lint_groups,
- llvm_passes, attributes, mir_passes, .. } = registry;
+ llvm_passes, attributes, .. } = registry;
sess.track_errors(|| {
let mut ls = sess.lint_store.borrow_mut();
}
*sess.plugin_llvm_passes.borrow_mut() = llvm_passes;
- sess.mir_passes.borrow_mut().extend(mir_passes);
*sess.plugin_attributes.borrow_mut() = attributes.clone();
})?;
krate
});
- krate.exported_macros = mem::replace(&mut resolver.exported_macros, Vec::new());
-
krate = time(time_passes, "maybe building test harness", || {
syntax::test::modify_for_testing(&sess.parse_sess,
&mut resolver,
passes.push_pass(box mir::transform::simplify::SimplifyCfg::new("elaborate-drops"));
// No lifetime analysis based on borrowing can be done from here on out.
+ passes.push_pass(box mir::transform::inline::Inline);
passes.push_pass(box mir::transform::instcombine::InstCombine::new());
passes.push_pass(box mir::transform::deaggregator::Deaggregator);
passes.push_pass(box mir::transform::copy_prop::CopyPropagation);
const X86_WHITELIST: &'static [&'static str] = &["avx\0", "avx2\0", "bmi\0", "bmi2\0", "sse\0",
"sse2\0", "sse3\0", "sse4.1\0", "sse4.2\0",
"ssse3\0", "tbm\0", "lzcnt\0", "popcnt\0",
- "sse4a\0", "rdrnd\0", "rdseed\0"];
+ "sse4a\0", "rdrnd\0", "rdseed\0", "fma\0"];
/// Add `target_feature = "..."` cfgs for a variety of platform
/// specific features (SSE, NEON etc.).
let mut annotations_position = vec![];
let mut line_len = 0;
let mut p = 0;
- let mut ann_iter = annotations.iter().peekable();
- while let Some(annotation) = ann_iter.next() {
- let peek = ann_iter.peek();
- if let Some(next) = peek {
- if overlaps(next, annotation) && !annotation.is_line() && !next.is_line()
+ for (i, annotation) in annotations.iter().enumerate() {
+ for (j, next) in annotations.iter().enumerate() {
+ if overlaps(next, annotation, 0) // This label overlaps with another one and both
+ && !annotation.is_line() // take space (they have text and are not
+ && !next.is_line() // multiline lines).
&& annotation.has_label()
+ && j > i
+ && p == 0 // We're currently on the first line, move the label one line down
{
// This annotation needs a new line in the output.
p += 1;
+ break;
}
}
annotations_position.push((p, annotation));
- if let Some(next) = peek {
- let l = if let Some(ref label) = next.label {
- label.len() + 2
- } else {
- 0
- };
- if (overlaps(next, annotation) // Do not allow two labels to be in the same line
- || next.end_col + l > annotation.start_col) // if they overlap including
- // padding, to avoid situations like:
- //
- // fn foo(x: u32) {
- // -------^------
- // | |
- // fn_spanx_span
- //
- && !annotation.is_line() // Do not add a new line if this annotation or the
- && !next.is_line() // next are vertical line placeholders.
- && annotation.has_label() // Both labels must have some text, otherwise
- && next.has_label() // they are not overlapping.
- {
- p += 1;
+ for (j, next) in annotations.iter().enumerate() {
+ if j > i {
+ let l = if let Some(ref label) = next.label {
+ label.len() + 2
+ } else {
+ 0
+ };
+ if overlaps(next, annotation, l) // Do not allow two labels to be in the same
+ // line if they overlap including padding, to
+ // avoid situations like:
+ //
+ // fn foo(x: u32) {
+ // -------^------
+ // | |
+ // fn_spanx_span
+ //
+ && !annotation.is_line() // Do not add a new line if this annotation
+ && !next.is_line() // or the next are vertical line placeholders.
+ && annotation.has_label() // Both labels must have some text, otherwise
+ && next.has_label() // they are not overlapping.
+ {
+ p += 1;
+ break;
+ }
}
}
if line_len < p {
(b_start..b_end + extra).contains(a_start) ||
(a_start..a_end + extra).contains(b_start)
}
-fn overlaps(a1: &Annotation, a2: &Annotation) -> bool {
- num_overlap(a1.start_col, a1.end_col, a2.start_col, a2.end_col, false)
+fn overlaps(a1: &Annotation, a2: &Annotation, padding: usize) -> bool {
+ num_overlap(a1.start_col, a1.end_col + padding, a2.start_col, a2.end_col, false)
}
fn emit_to_destination(rendered_buffer: &Vec<Vec<StyledString>>,
hi_opt: Option<&Loc>) {
let (lo, hi_opt) = (lo.col.to_usize(), hi_opt.map(|hi| hi.col.to_usize()));
if let Some(line) = line_opt {
- if line.len() > lo {
+ if let Some(lo) = line.char_indices().map(|(i, _)| i).nth(lo) {
+ let hi_opt = hi_opt.and_then(|hi| line.char_indices().map(|(i, _)| i).nth(hi));
buf.push_str(match hi_opt {
Some(hi) => &line[lo..hi],
None => &line[lo..],
clean_work_products.insert(wp.clone());
}
- tcx.dep_graph.with_task(n, || ()); // create the node with no inputs
+ tcx.dep_graph.with_task(n, (), (), create_node);
+
+ fn create_node((): (), (): ()) {
+ // just create the node with no inputs
+ }
}
}
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedUnsafe {
fn check_expr(&mut self, cx: &LateContext, e: &hir::Expr) {
+ /// Return the NodeId for an enclosing scope that is also `unsafe`
+ fn is_enclosed(cx: &LateContext, id: ast::NodeId) -> Option<(String, ast::NodeId)> {
+ let parent_id = cx.tcx.hir.get_parent_node(id);
+ if parent_id != id {
+ if cx.tcx.used_unsafe.borrow().contains(&parent_id) {
+ Some(("block".to_string(), parent_id))
+ } else if let Some(hir::map::NodeItem(&hir::Item {
+ node: hir::ItemFn(_, hir::Unsafety::Unsafe, _, _, _, _),
+ ..
+ })) = cx.tcx.hir.find(parent_id) {
+ Some(("fn".to_string(), parent_id))
+ } else {
+ is_enclosed(cx, parent_id)
+ }
+ } else {
+ None
+ }
+ }
if let hir::ExprBlock(ref blk) = e.node {
// Don't warn about generated blocks, that'll just pollute the output.
if blk.rules == hir::UnsafeBlock(hir::UserProvided) &&
!cx.tcx.used_unsafe.borrow().contains(&blk.id) {
- cx.span_lint(UNUSED_UNSAFE, blk.span, "unnecessary `unsafe` block");
+
+ let mut db = cx.struct_span_lint(UNUSED_UNSAFE, blk.span,
+ "unnecessary `unsafe` block");
+
+ db.span_label(blk.span, &"unnecessary `unsafe` block");
+ if let Some((kind, id)) = is_enclosed(cx, blk.id) {
+ db.span_note(cx.tcx.hir.span(id),
+ &format!("because it's nested under this `unsafe` {}", kind));
+ }
+ db.emit();
}
}
}
sess.imported_macro_spans.borrow_mut()
.insert(local_span, (name.to_string(), data.get_span(id.index, sess)));
- LoadedMacro::MacroRules(ast::MacroDef {
+ LoadedMacro::MacroDef(ast::Item {
ident: ast::Ident::with_empty_ctxt(name),
id: ast::DUMMY_NODE_ID,
span: local_span,
attrs: attrs,
- body: body.into(),
+ node: ast::ItemKind::MacroDef(body.into()),
+ vis: ast::Visibility::Inherited,
})
}
(https://github.com/rust-lang/rust/issues/39283)");
}
- if temp_lifetime.is_some() {
+ if !expr_ty.is_never() && temp_lifetime.is_some() {
this.cfg.push(block, Statement {
source_info: source_info,
kind: StatementKind::StorageLive(temp.clone())
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! MIR-based callgraph.
+//!
+//! This only considers direct calls
+
+use rustc::hir::def_id::DefId;
+use rustc_data_structures::graph;
+
+use rustc::mir::*;
+use rustc::mir::visit::*;
+
+use rustc::ty;
+
+use rustc::util::nodemap::DefIdMap;
+
+pub struct CallGraph {
+ node_map: DefIdMap<graph::NodeIndex>,
+ graph: graph::Graph<DefId, ()>
+}
+
+impl CallGraph {
+ // FIXME: allow for construction of a callgraph that inspects
+ // cross-crate MIRs if available.
+ pub fn build<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>) -> CallGraph {
+ let def_ids = tcx.maps.mir.borrow().keys();
+
+ let mut callgraph = CallGraph {
+ node_map: DefIdMap(),
+ graph: graph::Graph::new()
+ };
+
+ for def_id in def_ids {
+ if !def_id.is_local() { continue; }
+
+ let idx = callgraph.add_node(def_id);
+
+ let mut call_visitor = CallVisitor {
+ caller: idx,
+ graph: &mut callgraph
+ };
+
+ let mir = tcx.item_mir(def_id);
+ call_visitor.visit_mir(&mir);
+ }
+
+ callgraph
+ }
+
+ // Iterate over the strongly-connected components of the graph
+ pub fn scc_iter(&self) -> SCCIterator {
+ SCCIterator::new(&self.graph)
+ }
+
+ // Get the def_id for the given graph node
+ pub fn def_id(&self, node: graph::NodeIndex) -> DefId {
+ *self.graph.node_data(node)
+ }
+
+ fn add_node(&mut self, id: DefId) -> graph::NodeIndex {
+ let graph = &mut self.graph;
+ *self.node_map.entry(id).or_insert_with(|| {
+ graph.add_node(id)
+ })
+ }
+}
+
+struct CallVisitor<'a> {
+ caller: graph::NodeIndex,
+ graph: &'a mut CallGraph
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for CallVisitor<'a> {
+ fn visit_terminator_kind(&mut self, _block: BasicBlock,
+ kind: &TerminatorKind<'tcx>, _loc: Location) {
+ if let TerminatorKind::Call {
+ func: Operand::Constant(ref f)
+ , .. } = *kind {
+ if let ty::TyFnDef(def_id, _, _) = f.ty.sty {
+ let callee = self.graph.add_node(def_id);
+ self.graph.graph.add_edge(self.caller, callee, ());
+ }
+ }
+ }
+}
+
+struct StackElement<'g> {
+ node: graph::NodeIndex,
+ lowlink: usize,
+ children: graph::AdjacentTargets<'g, DefId, ()>
+}
+
+/**
+ * Iterator over strongly-connected-components using Tarjan's algorithm[1]
+ *
+ * [1]: https://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
+ */
+pub struct SCCIterator<'g> {
+ graph: &'g graph::Graph<DefId, ()>,
+ index: usize,
+ node_indices: Vec<Option<usize>>,
+ scc_stack: Vec<graph::NodeIndex>,
+ current_scc: Vec<graph::NodeIndex>,
+ visit_stack: Vec<StackElement<'g>>,
+}
+
+impl<'g> SCCIterator<'g> {
+ pub fn new(graph: &'g graph::Graph<DefId, ()>) -> SCCIterator<'g> {
+ if graph.len_nodes() == 0 {
+ return SCCIterator {
+ graph: graph,
+ index: 0,
+ node_indices: Vec::new(),
+ scc_stack: Vec::new(),
+ current_scc: Vec::new(),
+ visit_stack: Vec::new()
+ };
+ }
+
+ let first = graph::NodeIndex(0);
+
+ SCCIterator::with_entry(graph, first)
+ }
+
+ pub fn with_entry(graph: &'g graph::Graph<DefId, ()>,
+ entry: graph::NodeIndex) -> SCCIterator<'g> {
+ let mut iter = SCCIterator {
+ graph: graph,
+ index: 0,
+ node_indices: Vec::with_capacity(graph.len_nodes()),
+ scc_stack: Vec::new(),
+ current_scc: Vec::new(),
+ visit_stack: Vec::new()
+ };
+
+ iter.visit_one(entry);
+
+ iter
+ }
+
+ fn get_next(&mut self) {
+ self.current_scc.clear();
+
+ while !self.visit_stack.is_empty() {
+ self.visit_children();
+
+ let node = self.visit_stack.pop().unwrap();
+
+ if let Some(last) = self.visit_stack.last_mut() {
+ if last.lowlink > node.lowlink {
+ last.lowlink = node.lowlink;
+ }
+ }
+
+ debug!("TarjanSCC: Popped node {:?} : lowlink = {:?}; index = {:?}",
+ node.node, node.lowlink, self.node_index(node.node).unwrap());
+
+ if node.lowlink != self.node_index(node.node).unwrap() {
+ continue;
+ }
+
+ loop {
+ let n = self.scc_stack.pop().unwrap();
+ self.current_scc.push(n);
+ self.set_node_index(n, !0);
+ if n == node.node { return; }
+ }
+ }
+ }
+
+ fn visit_one(&mut self, node: graph::NodeIndex) {
+ self.index += 1;
+ let idx = self.index;
+ self.set_node_index(node, idx);
+ self.scc_stack.push(node);
+ self.visit_stack.push(StackElement {
+ node: node,
+ lowlink: self.index,
+ children: self.graph.successor_nodes(node)
+ });
+ debug!("TarjanSCC: Node {:?} : index = {:?}", node, idx);
+ }
+
+ fn visit_children(&mut self) {
+ while let Some(child) = self.visit_stack.last_mut().unwrap().children.next() {
+ if let Some(child_num) = self.node_index(child) {
+ let cur = self.visit_stack.last_mut().unwrap();
+ if cur.lowlink > child_num {
+ cur.lowlink = child_num;
+ }
+ } else {
+ self.visit_one(child);
+ }
+ }
+ }
+
+ fn node_index(&self, node: graph::NodeIndex) -> Option<usize> {
+ self.node_indices.get(node.node_id()).and_then(|&idx| idx)
+ }
+
+ fn set_node_index(&mut self, node: graph::NodeIndex, idx: usize) {
+ let i = node.node_id();
+ if i >= self.node_indices.len() {
+ self.node_indices.resize(i + 1, None);
+ }
+ self.node_indices[i] = Some(idx);
+ }
+}
+
+impl<'g> Iterator for SCCIterator<'g> {
+ type Item = Vec<graph::NodeIndex>;
+
+ fn next(&mut self) -> Option<Vec<graph::NodeIndex>> {
+ self.get_next();
+
+ if self.current_scc.is_empty() {
+ // Try a new root for the next SCC, if the node_indices
+ // map is doesn't contain all nodes, use the smallest one
+ // with no entry, otherwise find the first empty node.
+ //
+ // FIXME: This should probably use a set of precomputed
+ // roots instead
+ if self.node_indices.len() < self.graph.len_nodes() {
+ let idx = graph::NodeIndex(self.node_indices.len());
+ self.visit_one(idx);
+ } else {
+ for idx in 0..self.node_indices.len() {
+ if self.node_indices[idx].is_none() {
+ let idx = graph::NodeIndex(idx);
+ self.visit_one(idx);
+ break;
+ }
+ }
+ }
+ self.get_next();
+ }
+
+ if self.current_scc.is_empty() {
+ None
+ } else {
+ Some(self.current_scc.clone())
+ }
+ }
+}
pub mod diagnostics;
pub mod build;
+pub mod callgraph;
pub mod def_use;
pub mod graphviz;
mod hair;
pub fn provide(providers: &mut Providers) {
mir_map::provide(providers);
transform::qualify_consts::provide(providers);
-}
+}
\ No newline at end of file
use std::mem;
pub fn build_mir_for_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- tcx.dep_graph.with_task(DepNode::MirKrate, || {
+ tcx.dep_graph.with_task(DepNode::MirKrate, tcx, (), build_mir_for_crate_task);
+
+ fn build_mir_for_crate_task<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (): ()) {
tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
tcx.item_mir(body_owner_def_id);
});
- });
+ }
}
pub fn provide(providers: &mut Providers) {
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Inlining pass for MIR functions
+
+use rustc::hir::def_id::DefId;
+
+use rustc_data_structures::bitvec::BitVector;
+use rustc_data_structures::indexed_vec::{Idx, IndexVec};
+use rustc_data_structures::graph;
+
+use rustc::dep_graph::DepNode;
+use rustc::mir::*;
+use rustc::mir::transform::{MirMapPass, MirPassHook, MirSource, Pass};
+use rustc::mir::visit::*;
+use rustc::traits;
+use rustc::ty::{self, Ty, TyCtxt};
+use rustc::ty::subst::{Subst,Substs};
+use rustc::util::nodemap::{DefIdSet};
+
+use super::simplify::{remove_dead_blocks, CfgSimplifier};
+
+use syntax::{attr};
+use syntax::abi::Abi;
+
+use callgraph;
+
+const DEFAULT_THRESHOLD: usize = 50;
+const HINT_THRESHOLD: usize = 100;
+
+const INSTR_COST: usize = 5;
+const CALL_PENALTY: usize = 25;
+
+const UNKNOWN_SIZE_COST: usize = 10;
+
+pub struct Inline;
+
+impl<'tcx> MirMapPass<'tcx> for Inline {
+ fn run_pass<'a>(
+ &mut self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ hooks: &mut [Box<for<'s> MirPassHook<'s>>]) {
+
+ if tcx.sess.opts.debugging_opts.mir_opt_level < 2 { return; }
+
+ let _ignore = tcx.dep_graph.in_ignore();
+
+ let callgraph = callgraph::CallGraph::build(tcx);
+
+ let mut inliner = Inliner {
+ tcx: tcx,
+ };
+
+ let def_ids = tcx.maps.mir.borrow().keys();
+ for &def_id in &def_ids {
+ if !def_id.is_local() { continue; }
+
+ let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
+ let mut mir = if let Some(mir) = tcx.maps.mir.borrow().get(&def_id) {
+ mir.borrow_mut()
+ } else {
+ continue;
+ };
+
+ tcx.dep_graph.write(DepNode::Mir(def_id));
+
+ let id = tcx.hir.as_local_node_id(def_id).unwrap();
+ let src = MirSource::from_node(tcx, id);
+
+ for hook in &mut *hooks {
+ hook.on_mir_pass(tcx, src, &mut mir, self, false);
+ }
+ }
+
+ for scc in callgraph.scc_iter() {
+ inliner.inline_scc(&callgraph, &scc);
+ }
+
+ for def_id in def_ids {
+ if !def_id.is_local() { continue; }
+
+ let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
+ let mut mir = tcx.maps.mir.borrow()[&def_id].borrow_mut();
+ tcx.dep_graph.write(DepNode::Mir(def_id));
+
+ let id = tcx.hir.as_local_node_id(def_id).unwrap();
+ let src = MirSource::from_node(tcx, id);
+
+ for hook in &mut *hooks {
+ hook.on_mir_pass(tcx, src, &mut mir, self, true);
+ }
+ }
+ }
+}
+
+impl<'tcx> Pass for Inline { }
+
+struct Inliner<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+}
+
+#[derive(Copy, Clone)]
+struct CallSite<'tcx> {
+ caller: DefId,
+ callee: DefId,
+ substs: &'tcx Substs<'tcx>,
+ bb: BasicBlock,
+ location: SourceInfo,
+}
+
+impl<'a, 'tcx> Inliner<'a, 'tcx> {
+ fn inline_scc(&mut self, callgraph: &callgraph::CallGraph, scc: &[graph::NodeIndex]) -> bool {
+ let mut callsites = Vec::new();
+ let mut in_scc = DefIdSet();
+
+ let mut inlined_into = DefIdSet();
+
+ for &node in scc {
+ let def_id = callgraph.def_id(node);
+
+ // Don't inspect functions from other crates
+ let id = if let Some(id) = self.tcx.hir.as_local_node_id(def_id) {
+ id
+ } else {
+ continue;
+ };
+ let src = MirSource::from_node(self.tcx, id);
+ if let MirSource::Fn(_) = src {
+ if let Some(mir) = self.tcx.maybe_item_mir(def_id) {
+ for (bb, bb_data) in mir.basic_blocks().iter_enumerated() {
+ // Don't inline calls that are in cleanup blocks.
+ if bb_data.is_cleanup { continue; }
+
+ // Only consider direct calls to functions
+ let terminator = bb_data.terminator();
+ if let TerminatorKind::Call {
+ func: Operand::Constant(ref f), .. } = terminator.kind {
+ if let ty::TyFnDef(callee_def_id, substs, _) = f.ty.sty {
+ callsites.push(CallSite {
+ caller: def_id,
+ callee: callee_def_id,
+ substs: substs,
+ bb: bb,
+ location: terminator.source_info
+ });
+ }
+ }
+ }
+
+ in_scc.insert(def_id);
+ }
+ }
+ }
+
+ // Move callsites that are in the the SCC to the end so
+ // they're inlined after calls to outside the SCC
+ let mut first_call_in_scc = callsites.len();
+
+ let mut i = 0;
+ while i < first_call_in_scc {
+ let f = callsites[i].caller;
+ if in_scc.contains(&f) {
+ first_call_in_scc -= 1;
+ callsites.swap(i, first_call_in_scc);
+ } else {
+ i += 1;
+ }
+ }
+
+ let mut local_change;
+ let mut changed = false;
+
+ loop {
+ local_change = false;
+ let mut csi = 0;
+ while csi < callsites.len() {
+ let callsite = callsites[csi];
+ csi += 1;
+
+ let _task = self.tcx.dep_graph.in_task(DepNode::Mir(callsite.caller));
+ self.tcx.dep_graph.write(DepNode::Mir(callsite.caller));
+
+ let callee_mir = {
+ if let Some(callee_mir) = self.tcx.maybe_item_mir(callsite.callee) {
+ if !self.should_inline(callsite, &callee_mir) {
+ continue;
+ }
+
+ callee_mir.subst(self.tcx, callsite.substs)
+ } else {
+ continue;
+ }
+
+ };
+
+ let mut caller_mir = {
+ let map = self.tcx.maps.mir.borrow();
+ let mir = map.get(&callsite.caller).unwrap();
+ mir.borrow_mut()
+ };
+
+ let start = caller_mir.basic_blocks().len();
+
+ if !self.inline_call(callsite, &mut caller_mir, callee_mir) {
+ continue;
+ }
+
+ inlined_into.insert(callsite.caller);
+
+ // Add callsites from inlined function
+ for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated().skip(start) {
+ // Only consider direct calls to functions
+ let terminator = bb_data.terminator();
+ if let TerminatorKind::Call {
+ func: Operand::Constant(ref f), .. } = terminator.kind {
+ if let ty::TyFnDef(callee_def_id, substs, _) = f.ty.sty {
+ // Don't inline the same function multiple times.
+ if callsite.callee != callee_def_id {
+ callsites.push(CallSite {
+ caller: callsite.caller,
+ callee: callee_def_id,
+ substs: substs,
+ bb: bb,
+ location: terminator.source_info
+ });
+ }
+ }
+ }
+ }
+
+ csi -= 1;
+ if scc.len() == 1 {
+ callsites.swap_remove(csi);
+ } else {
+ callsites.remove(csi);
+ }
+
+ local_change = true;
+ changed = true;
+ }
+
+ if !local_change {
+ break;
+ }
+ }
+
+ // Simplify functions we inlined into.
+ for def_id in inlined_into {
+ let _task = self.tcx.dep_graph.in_task(DepNode::Mir(def_id));
+ self.tcx.dep_graph.write(DepNode::Mir(def_id));
+
+ let mut caller_mir = {
+ let map = self.tcx.maps.mir.borrow();
+ let mir = map.get(&def_id).unwrap();
+ mir.borrow_mut()
+ };
+
+ debug!("Running simplify cfg on {:?}", def_id);
+ CfgSimplifier::new(&mut caller_mir).simplify();
+ remove_dead_blocks(&mut caller_mir);
+ }
+ changed
+ }
+
+ fn should_inline(&self, callsite: CallSite<'tcx>,
+ callee_mir: &'a Mir<'tcx>) -> bool {
+
+ let tcx = self.tcx;
+
+ // Don't inline closures that have captures
+ // FIXME: Handle closures better
+ if callee_mir.upvar_decls.len() > 0 {
+ return false;
+ }
+
+
+ let attrs = tcx.get_attrs(callsite.callee);
+ let hint = attr::find_inline_attr(None, &attrs[..]);
+
+ let hinted = match hint {
+ // Just treat inline(always) as a hint for now,
+ // there are cases that prevent inlining that we
+ // need to check for first.
+ attr::InlineAttr::Always => true,
+ attr::InlineAttr::Never => return false,
+ attr::InlineAttr::Hint => true,
+ attr::InlineAttr::None => false,
+ };
+
+ // Only inline local functions if they would be eligible for cross-crate
+ // inlining. This is to ensure that the final crate doesn't have MIR that
+ // reference unexported symbols
+ if callsite.callee.is_local() {
+ if callsite.substs.types().count() == 0 && !hinted {
+ return false;
+ }
+ }
+
+ let mut threshold = if hinted {
+ HINT_THRESHOLD
+ } else {
+ DEFAULT_THRESHOLD
+ };
+
+ // Significantly lower the threshold for inlining cold functions
+ if attr::contains_name(&attrs[..], "cold") {
+ threshold /= 5;
+ }
+
+ // Give a bonus functions with a small number of blocks,
+ // We normally have two or three blocks for even
+ // very small functions.
+ if callee_mir.basic_blocks().len() <= 3 {
+ threshold += threshold / 4;
+ }
+
+ // FIXME: Give a bonus to functions with only a single caller
+
+ let id = tcx.hir.as_local_node_id(callsite.caller).expect("Caller not local");
+ let param_env = ty::ParameterEnvironment::for_item(tcx, id);
+
+ let mut first_block = true;
+ let mut cost = 0;
+
+ // Traverse the MIR manually so we can account for the effects of
+ // inlining on the CFG.
+ let mut work_list = vec![START_BLOCK];
+ let mut visited = BitVector::new(callee_mir.basic_blocks().len());
+ while let Some(bb) = work_list.pop() {
+ if !visited.insert(bb.index()) { continue; }
+ let blk = &callee_mir.basic_blocks()[bb];
+
+ for stmt in &blk.statements {
+ // Don't count StorageLive/StorageDead in the inlining cost.
+ match stmt.kind {
+ StatementKind::StorageLive(_) |
+ StatementKind::StorageDead(_) |
+ StatementKind::Nop => {}
+ _ => cost += INSTR_COST
+ }
+ }
+ let term = blk.terminator();
+ let mut is_drop = false;
+ match term.kind {
+ TerminatorKind::Drop { ref location, target, unwind } |
+ TerminatorKind::DropAndReplace { ref location, target, unwind, .. } => {
+ is_drop = true;
+ work_list.push(target);
+ // If the location doesn't actually need dropping, treat it like
+ // a regular goto.
+ let ty = location.ty(&callee_mir, tcx).subst(tcx, callsite.substs);
+ let ty = ty.to_ty(tcx);
+ if tcx.type_needs_drop_given_env(ty, ¶m_env) {
+ cost += CALL_PENALTY;
+ if let Some(unwind) = unwind {
+ work_list.push(unwind);
+ }
+ } else {
+ cost += INSTR_COST;
+ }
+ }
+
+ TerminatorKind::Unreachable |
+ TerminatorKind::Call { destination: None, .. } if first_block => {
+ // If the function always diverges, don't inline
+ // unless the cost is zero
+ threshold = 0;
+ }
+
+ TerminatorKind::Call {func: Operand::Constant(ref f), .. } => {
+ if let ty::TyFnDef(.., f) = f.ty.sty {
+ // Don't give intrinsics the extra penalty for calls
+ if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
+ cost += INSTR_COST;
+ } else {
+ cost += CALL_PENALTY;
+ }
+ }
+ }
+ TerminatorKind::Assert { .. } => cost += CALL_PENALTY,
+ _ => cost += INSTR_COST
+ }
+
+ if !is_drop {
+ for &succ in &term.successors()[..] {
+ work_list.push(succ);
+ }
+ }
+
+ first_block = false;
+ }
+
+ // Count up the cost of local variables and temps, if we know the size
+ // use that, otherwise we use a moderately-large dummy cost.
+
+ let ptr_size = tcx.data_layout.pointer_size.bytes();
+
+ for v in callee_mir.vars_and_temps_iter() {
+ let v = &callee_mir.local_decls[v];
+ let ty = v.ty.subst(tcx, callsite.substs);
+ // Cost of the var is the size in machine-words, if we know
+ // it.
+ if let Some(size) = type_size_of(tcx, param_env.clone(), ty) {
+ cost += (size / ptr_size) as usize;
+ } else {
+ cost += UNKNOWN_SIZE_COST;
+ }
+ }
+
+ debug!("Inline cost for {:?} is {}", callsite.callee, cost);
+
+ if let attr::InlineAttr::Always = hint {
+ true
+ } else {
+ cost <= threshold
+ }
+ }
+
+
+ fn inline_call(&self, callsite: CallSite<'tcx>,
+ caller_mir: &mut Mir<'tcx>, mut callee_mir: Mir<'tcx>) -> bool {
+
+ // Don't inline a function into itself
+ if callsite.caller == callsite.callee { return false; }
+
+ let _task = self.tcx.dep_graph.in_task(DepNode::Mir(callsite.caller));
+
+
+ let terminator = caller_mir[callsite.bb].terminator.take().unwrap();
+ match terminator.kind {
+ // FIXME: Handle inlining of diverging calls
+ TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
+
+ debug!("Inlined {:?} into {:?}", callsite.callee, callsite.caller);
+
+ let is_box_free = Some(callsite.callee) == self.tcx.lang_items.box_free_fn();
+
+ let mut local_map = IndexVec::with_capacity(callee_mir.local_decls.len());
+ let mut scope_map = IndexVec::with_capacity(callee_mir.visibility_scopes.len());
+ let mut promoted_map = IndexVec::with_capacity(callee_mir.promoted.len());
+
+ for mut scope in callee_mir.visibility_scopes.iter().cloned() {
+ if scope.parent_scope.is_none() {
+ scope.parent_scope = Some(callsite.location.scope);
+ scope.span = callee_mir.span;
+ }
+
+ scope.span = callsite.location.span;
+
+ let idx = caller_mir.visibility_scopes.push(scope);
+ scope_map.push(idx);
+ }
+
+ for loc in callee_mir.vars_and_temps_iter() {
+ let mut local = callee_mir.local_decls[loc].clone();
+
+ if let Some(ref mut source_info) = local.source_info {
+ source_info.scope = scope_map[source_info.scope];
+
+ source_info.span = callsite.location.span;
+ }
+
+ let idx = caller_mir.local_decls.push(local);
+ local_map.push(idx);
+ }
+
+ for p in callee_mir.promoted.iter().cloned() {
+ let idx = caller_mir.promoted.push(p);
+ promoted_map.push(idx);
+ }
+
+ // If the call is something like `a[*i] = f(i)`, where
+ // `i : &mut usize`, then just duplicating the `a[*i]`
+ // Lvalue could result in two different locations if `f`
+ // writes to `i`. To prevent this we need to create a temporary
+ // borrow of the lvalue and pass the destination as `*temp` instead.
+ fn dest_needs_borrow(lval: &Lvalue) -> bool {
+ match *lval {
+ Lvalue::Projection(ref p) => {
+ match p.elem {
+ ProjectionElem::Deref |
+ ProjectionElem::Index(_) => true,
+ _ => dest_needs_borrow(&p.base)
+ }
+ }
+ // Static variables need a borrow because the callee
+ // might modify the same static.
+ Lvalue::Static(_) => true,
+ _ => false
+ }
+ }
+
+ let dest = if dest_needs_borrow(&destination.0) {
+ debug!("Creating temp for return destination");
+ let dest = Rvalue::Ref(
+ self.tcx.mk_region(ty::ReErased),
+ BorrowKind::Mut,
+ destination.0);
+
+ let ty = dest.ty(caller_mir, self.tcx);
+
+ let temp = LocalDecl::new_temp(ty);
+
+ let tmp = caller_mir.local_decls.push(temp);
+ let tmp = Lvalue::Local(tmp);
+
+ let stmt = Statement {
+ source_info: callsite.location,
+ kind: StatementKind::Assign(tmp.clone(), dest)
+ };
+ caller_mir[callsite.bb]
+ .statements.push(stmt);
+ tmp.deref()
+ } else {
+ destination.0
+ };
+
+ let return_block = destination.1;
+
+ let args : Vec<_> = if is_box_free {
+ assert!(args.len() == 1);
+ // box_free takes a Box, but is defined with a *mut T, inlining
+ // needs to generate the cast.
+ // FIXME: we should probably just generate correct MIR in the first place...
+
+ let arg = if let Operand::Consume(ref lval) = args[0] {
+ lval.clone()
+ } else {
+ bug!("Constant arg to \"box_free\"");
+ };
+
+ let ptr_ty = args[0].ty(caller_mir, self.tcx);
+ vec![self.cast_box_free_arg(arg, ptr_ty, &callsite, caller_mir)]
+ } else {
+ // Copy the arguments if needed.
+ self.make_call_args(args, &callsite, caller_mir)
+ };
+
+ let bb_len = caller_mir.basic_blocks().len();
+ let mut integrator = Integrator {
+ block_idx: bb_len,
+ args: &args,
+ local_map: local_map,
+ scope_map: scope_map,
+ promoted_map: promoted_map,
+ _callsite: callsite,
+ destination: dest,
+ return_block: return_block,
+ cleanup_block: cleanup,
+ in_cleanup_block: false
+ };
+
+
+ for (bb, mut block) in callee_mir.basic_blocks_mut().drain_enumerated(..) {
+ integrator.visit_basic_block_data(bb, &mut block);
+ caller_mir.basic_blocks_mut().push(block);
+ }
+
+ let terminator = Terminator {
+ source_info: callsite.location,
+ kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) }
+ };
+
+ caller_mir[callsite.bb].terminator = Some(terminator);
+
+ true
+ }
+ kind => {
+ caller_mir[callsite.bb].terminator = Some(Terminator {
+ source_info: terminator.source_info,
+ kind: kind
+ });
+ false
+ }
+ }
+ }
+
+ fn cast_box_free_arg(&self, arg: Lvalue<'tcx>, ptr_ty: Ty<'tcx>,
+ callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Operand<'tcx> {
+ let arg = Rvalue::Ref(
+ self.tcx.mk_region(ty::ReErased),
+ BorrowKind::Mut,
+ arg.deref());
+
+ let ty = arg.ty(caller_mir, self.tcx);
+ let ref_tmp = LocalDecl::new_temp(ty);
+ let ref_tmp = caller_mir.local_decls.push(ref_tmp);
+ let ref_tmp = Lvalue::Local(ref_tmp);
+
+ let ref_stmt = Statement {
+ source_info: callsite.location,
+ kind: StatementKind::Assign(ref_tmp.clone(), arg)
+ };
+
+ caller_mir[callsite.bb]
+ .statements.push(ref_stmt);
+
+ let pointee_ty = match ptr_ty.sty {
+ ty::TyRawPtr(tm) | ty::TyRef(_, tm) => tm.ty,
+ _ if ptr_ty.is_box() => ptr_ty.boxed_ty(),
+ _ => bug!("Invalid type `{:?}` for call to box_free", ptr_ty)
+ };
+ let ptr_ty = self.tcx.mk_mut_ptr(pointee_ty);
+
+ let raw_ptr = Rvalue::Cast(CastKind::Misc, Operand::Consume(ref_tmp), ptr_ty);
+
+ let cast_tmp = LocalDecl::new_temp(ptr_ty);
+ let cast_tmp = caller_mir.local_decls.push(cast_tmp);
+ let cast_tmp = Lvalue::Local(cast_tmp);
+
+ let cast_stmt = Statement {
+ source_info: callsite.location,
+ kind: StatementKind::Assign(cast_tmp.clone(), raw_ptr)
+ };
+
+ caller_mir[callsite.bb]
+ .statements.push(cast_stmt);
+
+ Operand::Consume(cast_tmp)
+ }
+
+ fn make_call_args(&self, args: Vec<Operand<'tcx>>,
+ callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Vec<Operand<'tcx>> {
+ let tcx = self.tcx;
+ // FIXME: Analysis of the usage of the arguments to avoid
+ // unnecessary temporaries.
+ args.into_iter().map(|a| {
+ if let Operand::Consume(Lvalue::Local(local)) = a {
+ if caller_mir.local_kind(local) == LocalKind::Temp {
+ // Reuse the operand if it's a temporary already
+ return a;
+ }
+ }
+
+ debug!("Creating temp for argument");
+ // Otherwise, create a temporary for the arg
+ let arg = Rvalue::Use(a);
+
+ let ty = arg.ty(caller_mir, tcx);
+
+ let arg_tmp = LocalDecl::new_temp(ty);
+ let arg_tmp = caller_mir.local_decls.push(arg_tmp);
+ let arg_tmp = Lvalue::Local(arg_tmp);
+
+ let stmt = Statement {
+ source_info: callsite.location,
+ kind: StatementKind::Assign(arg_tmp.clone(), arg)
+ };
+ caller_mir[callsite.bb].statements.push(stmt);
+ Operand::Consume(arg_tmp)
+ }).collect()
+ }
+}
+
+fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env: ty::ParameterEnvironment<'tcx>,
+ ty: Ty<'tcx>) -> Option<u64> {
+ tcx.infer_ctxt(param_env, traits::Reveal::All).enter(|infcx| {
+ ty.layout(&infcx).ok().map(|layout| {
+ layout.size(&tcx.data_layout).bytes()
+ })
+ })
+}
+
+/**
+ * Integrator.
+ *
+ * Integrates blocks from the callee function into the calling function.
+ * Updates block indices, references to locals and other control flow
+ * stuff.
+ */
+struct Integrator<'a, 'tcx: 'a> {
+ block_idx: usize,
+ args: &'a [Operand<'tcx>],
+ local_map: IndexVec<Local, Local>,
+ scope_map: IndexVec<VisibilityScope, VisibilityScope>,
+ promoted_map: IndexVec<Promoted, Promoted>,
+ _callsite: CallSite<'tcx>,
+ destination: Lvalue<'tcx>,
+ return_block: BasicBlock,
+ cleanup_block: Option<BasicBlock>,
+ in_cleanup_block: bool,
+}
+
+impl<'a, 'tcx> Integrator<'a, 'tcx> {
+ fn update_target(&self, tgt: BasicBlock) -> BasicBlock {
+ let new = BasicBlock::new(tgt.index() + self.block_idx);
+ debug!("Updating target `{:?}`, new: `{:?}`", tgt, new);
+ new
+ }
+
+ fn update_local(&self, local: Local) -> Option<Local> {
+ let idx = local.index();
+ if idx < (self.args.len() + 1) {
+ return None;
+ }
+ let idx = idx - (self.args.len() + 1);
+ let local = Local::new(idx);
+ self.local_map.get(local).cloned()
+ }
+
+ fn arg_index(&self, arg: Local) -> Option<usize> {
+ let idx = arg.index();
+ if idx > 0 && idx <= self.args.len() {
+ Some(idx - 1)
+ } else {
+ None
+ }
+ }
+}
+
+impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
+ fn visit_lvalue(&mut self,
+ lvalue: &mut Lvalue<'tcx>,
+ _ctxt: LvalueContext<'tcx>,
+ _location: Location) {
+ if let Lvalue::Local(ref mut local) = *lvalue {
+ if let Some(l) = self.update_local(*local) {
+ // Temp or Var; update the local reference
+ *local = l;
+ return;
+ }
+ }
+ if let Lvalue::Local(local) = *lvalue {
+ if local == RETURN_POINTER {
+ // Return pointer; update the lvalue itself
+ *lvalue = self.destination.clone();
+ } else if local.index() < (self.args.len() + 1) {
+ // Argument, once again update the the lvalue itself
+ let idx = local.index() - 1;
+ if let Operand::Consume(ref lval) = self.args[idx] {
+ *lvalue = lval.clone();
+ } else {
+ bug!("Arg operand `{:?}` is not an Lvalue use.", idx)
+ }
+ }
+ } else {
+ self.super_lvalue(lvalue, _ctxt, _location)
+ }
+ }
+
+ fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
+ if let Operand::Consume(Lvalue::Local(arg)) = *operand {
+ if let Some(idx) = self.arg_index(arg) {
+ let new_arg = self.args[idx].clone();
+ *operand = new_arg;
+ return;
+ }
+ }
+ self.super_operand(operand, location);
+ }
+
+ fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
+ self.in_cleanup_block = data.is_cleanup;
+ self.super_basic_block_data(block, data);
+ self.in_cleanup_block = false;
+ }
+
+ fn visit_terminator_kind(&mut self, block: BasicBlock,
+ kind: &mut TerminatorKind<'tcx>, loc: Location) {
+ self.super_terminator_kind(block, kind, loc);
+
+ match *kind {
+ TerminatorKind::Goto { ref mut target} => {
+ *target = self.update_target(*target);
+ }
+ TerminatorKind::SwitchInt { ref mut targets, .. } => {
+ for tgt in targets {
+ *tgt = self.update_target(*tgt);
+ }
+ }
+ TerminatorKind::Drop { ref mut target, ref mut unwind, .. } |
+ TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
+ *target = self.update_target(*target);
+ if let Some(tgt) = *unwind {
+ *unwind = Some(self.update_target(tgt));
+ } else if !self.in_cleanup_block {
+ // Unless this drop is in a cleanup block, add an unwind edge to
+ // the orignal call's cleanup block
+ *unwind = self.cleanup_block;
+ }
+ }
+ TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
+ if let Some((_, ref mut tgt)) = *destination {
+ *tgt = self.update_target(*tgt);
+ }
+ if let Some(tgt) = *cleanup {
+ *cleanup = Some(self.update_target(tgt));
+ } else if !self.in_cleanup_block {
+ // Unless this call is in a cleanup block, add an unwind edge to
+ // the orignal call's cleanup block
+ *cleanup = self.cleanup_block;
+ }
+ }
+ TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
+ *target = self.update_target(*target);
+ if let Some(tgt) = *cleanup {
+ *cleanup = Some(self.update_target(tgt));
+ } else if !self.in_cleanup_block {
+ // Unless this assert is in a cleanup block, add an unwind edge to
+ // the orignal call's cleanup block
+ *cleanup = self.cleanup_block;
+ }
+ }
+ TerminatorKind::Return => {
+ *kind = TerminatorKind::Goto { target: self.return_block };
+ }
+ TerminatorKind::Resume => {
+ if let Some(tgt) = self.cleanup_block {
+ *kind = TerminatorKind::Goto { target: tgt }
+ }
+ }
+ TerminatorKind::Unreachable => { }
+ }
+ }
+
+ fn visit_visibility_scope(&mut self, scope: &mut VisibilityScope) {
+ *scope = self.scope_map[*scope];
+ }
+
+ fn visit_literal(&mut self, literal: &mut Literal<'tcx>, loc: Location) {
+ if let Literal::Promoted { ref mut index } = *literal {
+ if let Some(p) = self.promoted_map.get(*index).cloned() {
+ *index = p;
+ }
+ } else {
+ self.super_literal(literal, loc);
+ }
+ }
+}
pub mod deaggregator;
pub mod instcombine;
pub mod copy_prop;
+pub mod inline;
}
impl<'a, 'tcx: 'a> CfgSimplifier<'a, 'tcx> {
- fn new(mir: &'a mut Mir<'tcx>) -> Self {
+ pub fn new(mir: &'a mut Mir<'tcx>) -> Self {
let mut pred_count = IndexVec::from_elem(0u32, mir.basic_blocks());
// we can't use mir.predecessors() here because that counts
}
}
- fn simplify(mut self) {
+ pub fn simplify(mut self) {
loop {
let mut changed = false;
if !changed { break }
}
+
+ self.strip_nops()
}
// Collapse a goto chain starting from `start`
terminator.kind = TerminatorKind::Goto { target: first_succ };
true
}
+
+ fn strip_nops(&mut self) {
+ for blk in self.basic_blocks.iter_mut() {
+ blk.statements.retain(|stmt| if let StatementKind::Nop = stmt.kind {
+ false
+ } else {
+ true
+ })
+ }
+ }
}
-fn remove_dead_blocks(mir: &mut Mir) {
+pub fn remove_dead_blocks(mir: &mut Mir) {
let mut seen = BitVector::new(mir.basic_blocks().len());
for (bb, _) in traversal::preorder(mir) {
seen.insert(bb.index());
fn visit_attribute(&mut self, attr: &'v ast::Attribute) {
self.record("Attribute", Id::None, attr);
}
-
- fn visit_macro_def(&mut self, macro_def: &'v ast::MacroDef) {
- self.record("MacroDef", Id::None, macro_def);
- ast_visit::walk_macro_def(self, macro_def)
- }
}
use rustc::lint::{EarlyLintPassObject, LateLintPassObject, LintId, Lint};
use rustc::session::Session;
-use rustc::mir::transform::MirMapPass;
-
use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT};
use syntax::ext::base::MacroExpanderFn;
use syntax::symbol::Symbol;
#[doc(hidden)]
pub late_lint_passes: Vec<LateLintPassObject>,
- #[doc(hidden)]
- pub mir_passes: Vec<Box<for<'pcx> MirMapPass<'pcx>>>,
-
#[doc(hidden)]
pub lint_groups: HashMap<&'static str, Vec<LintId>>,
lint_groups: HashMap::new(),
llvm_passes: vec![],
attributes: vec![],
- mir_passes: Vec::new(),
whitelisted_custom_derives: Vec::new(),
}
}
self.lint_groups.insert(name, to.into_iter().map(|x| LintId::of(x)).collect());
}
- /// Register a MIR pass
- pub fn register_mir_pass(&mut self, pass: Box<for<'pcx> MirMapPass<'pcx>>) {
- self.mir_passes.push(pass);
- }
-
/// Register an LLVM pass.
///
/// Registration with LLVM itself is handled through static C++ objects with
use syntax::ast::{Variant, ViewPathGlob, ViewPathList, ViewPathSimple};
use syntax::ext::base::SyntaxExtension;
use syntax::ext::base::Determinacy::Undetermined;
-use syntax::ext::expand::mark_tts;
use syntax::ext::hygiene::Mark;
use syntax::ext::tt::macro_rules;
use syntax::parse::token;
self.define(parent, ident, TypeNS, (module, vis, sp, expansion));
self.current_module = module;
}
- ItemKind::Mac(_) => panic!("unexpanded macro in resolve!"),
+ ItemKind::MacroDef(..) | ItemKind::Mac(_) => unreachable!(),
}
}
})
}
+ pub fn macro_def_scope(&mut self, expansion: Mark) -> Module<'a> {
+ let def_id = self.macro_defs[&expansion];
+ if let Some(id) = self.definitions.as_local_node_id(def_id) {
+ self.local_macro_def_scopes[&id]
+ } else {
+ let module_def_id = ty::DefIdTree::parent(&*self, def_id).unwrap();
+ self.get_extern_crate_root(module_def_id.krate)
+ }
+ }
+
pub fn get_macro(&mut self, def: Def) -> Rc<SyntaxExtension> {
let def_id = match def {
Def::Macro(def_id, ..) => def_id,
return ext.clone();
}
- let mut macro_rules = match self.session.cstore.load_macro(def_id, &self.session) {
- LoadedMacro::MacroRules(macro_rules) => macro_rules,
+ let macro_def = match self.session.cstore.load_macro(def_id, &self.session) {
+ LoadedMacro::MacroDef(macro_def) => macro_def,
LoadedMacro::ProcMacro(ext) => return ext,
};
- let mark = Mark::fresh();
- let invocation = self.arenas.alloc_invocation_data(InvocationData {
- module: Cell::new(self.get_extern_crate_root(def_id.krate)),
- def_index: CRATE_DEF_INDEX,
- const_expr: false,
- legacy_scope: Cell::new(LegacyScope::Empty),
- expansion: Cell::new(LegacyScope::Empty),
- });
- self.invocations.insert(mark, invocation);
- macro_rules.body = mark_tts(macro_rules.stream(), mark).into();
- let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, ¯o_rules));
+ let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, ¯o_def));
self.macro_map.insert(def_id, ext.clone());
ext
}
fn visit_item(&mut self, item: &'a Item) {
let macro_use = match item.node {
- ItemKind::Mac(ref mac) => {
- if mac.node.path.segments.is_empty() {
- self.legacy_scope = LegacyScope::Expansion(self.visit_invoc(item.id));
- } else {
- self.resolver.define_macro(item, &mut self.legacy_scope);
- }
+ ItemKind::MacroDef(..) => {
+ self.resolver.define_macro(item, &mut self.legacy_scope);
+ return
+ }
+ ItemKind::Mac(..) => {
+ self.legacy_scope = LegacyScope::Expansion(self.visit_invoc(item.id));
return
}
ItemKind::Mod(..) => self.resolver.contains_macro_use(&item.attrs),
// We passed through a module.
ModuleRibKind(Module<'a>),
- // We passed through a `macro_rules!` statement with the given expansion
- MacroDefinition(Mark),
+ // We passed through a `macro_rules!` statement
+ MacroDefinition(DefId),
// All bindings in this rib are type parameters that can't be used
// from the default of a type parameter because they're not declared
}
}
- fn get_macro(&self, resolver: &mut Resolver<'a>) -> Rc<SyntaxExtension> {
+ fn def_ignoring_ambiguity(&self) -> Def {
match self.kind {
- NameBindingKind::Import { binding, .. } => binding.get_macro(resolver),
- NameBindingKind::Ambiguity { b1, .. } => b1.get_macro(resolver),
- _ => resolver.get_macro(self.def()),
+ NameBindingKind::Import { binding, .. } => binding.def_ignoring_ambiguity(),
+ NameBindingKind::Ambiguity { b1, .. } => b1.def_ignoring_ambiguity(),
+ _ => self.def(),
}
}
+ fn get_macro(&self, resolver: &mut Resolver<'a>) -> Rc<SyntaxExtension> {
+ resolver.get_macro(self.def_ignoring_ambiguity())
+ }
+
// We sometimes need to treat variants as `pub` for backwards compatibility
fn pseudo_vis(&self) -> ty::Visibility {
if self.is_variant() { ty::Visibility::Public } else { self.vis }
pub definitions: Definitions,
- // Maps the node id of a statement to the expansions of the `macro_rules!`s
- // immediately above the statement (if appropriate).
- macros_at_scope: FxHashMap<NodeId, Vec<Mark>>,
-
graph_root: Module<'a>,
prelude: Option<Module<'a>>,
dummy_binding: &'a NameBinding<'a>,
use_extern_macros: bool, // true if `#![feature(use_extern_macros)]`
- pub exported_macros: Vec<ast::MacroDef>,
crate_loader: &'a mut CrateLoader,
macro_names: FxHashSet<Name>,
builtin_macros: FxHashMap<Name, &'a NameBinding<'a>>,
lexical_macro_resolutions: Vec<(Name, &'a Cell<LegacyScope<'a>>)>,
macro_map: FxHashMap<DefId, Rc<SyntaxExtension>>,
+ macro_defs: FxHashMap<Mark, DefId>,
+ local_macro_def_scopes: FxHashMap<NodeId, Module<'a>>,
macro_exports: Vec<Export>,
pub whitelisted_legacy_custom_derives: Vec<Name>,
pub found_unresolved_macro: bool,
let features = session.features.borrow();
+ let mut macro_defs = FxHashMap();
+ macro_defs.insert(Mark::root(), root_def_id);
+
Resolver {
session: session,
definitions: definitions,
- macros_at_scope: FxHashMap(),
// The outermost module has def ID 0; this is not reflected in the
// AST.
// `#![feature(proc_macro)]` implies `#[feature(extern_macros)]`
use_extern_macros: features.use_extern_macros || features.proc_macro,
- exported_macros: Vec::new(),
crate_loader: crate_loader,
macro_names: FxHashSet(),
builtin_macros: FxHashMap(),
macro_map: FxHashMap(),
macro_exports: Vec::new(),
invocations: invocations,
+ macro_defs: macro_defs,
+ local_macro_def_scopes: FxHashMap(),
name_already_seen: FxHashMap(),
whitelisted_legacy_custom_derives: Vec::new(),
proc_macro_enabled: features.proc_macro,
}
}
- if let MacroDefinition(mac) = self.ribs[ns][i].kind {
+ if let MacroDefinition(def) = self.ribs[ns][i].kind {
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
- let (source_ctxt, source_macro) = ident.ctxt.source();
- if source_macro == mac {
- ident.ctxt = source_ctxt;
+ let ctxt_data = ident.ctxt.data();
+ if def == self.macro_defs[&ctxt_data.outer_mark] {
+ ident.ctxt = ctxt_data.prev_ctxt;
}
}
}
None
}
- fn resolve_crate_var(&mut self, mut crate_var_ctxt: SyntaxContext) -> Module<'a> {
- while crate_var_ctxt.source().0 != SyntaxContext::empty() {
- crate_var_ctxt = crate_var_ctxt.source().0;
+ fn resolve_crate_var(&mut self, crate_var_ctxt: SyntaxContext) -> Module<'a> {
+ let mut ctxt_data = crate_var_ctxt.data();
+ while ctxt_data.prev_ctxt != SyntaxContext::empty() {
+ ctxt_data = ctxt_data.prev_ctxt.data();
}
- let module = self.invocations[&crate_var_ctxt.source().1].module.get();
+ let module = self.macro_def_scope(ctxt_data.outer_mark);
if module.is_local() { self.graph_root } else { module }
}
NormalRibKind => {
// Continue
}
- MacroDefinition(mac) => {
+ MacroDefinition(def) => {
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
- let (source_ctxt, source_macro) = ident.ctxt.source();
- if source_macro == mac {
- ident.ctxt = source_ctxt;
+ let ctxt_data = ident.ctxt.data();
+ if def == self.macro_defs[&ctxt_data.outer_mark] {
+ ident.ctxt = ctxt_data.prev_ctxt;
}
}
_ => {
}
}
- ItemKind::ExternCrate(_) => {
+ ItemKind::ExternCrate(_) | ItemKind::MacroDef(..) => {
// do nothing, these are just around to be encoded
}
// Descend into the block.
for stmt in &block.stmts {
- if let Some(marks) = self.macros_at_scope.remove(&stmt.id) {
- num_macro_definition_ribs += marks.len() as u32;
- for mark in marks {
- self.ribs[ValueNS].push(Rib::new(MacroDefinition(mark)));
- self.label_ribs.push(Rib::new(MacroDefinition(mark)));
+ if let ast::StmtKind::Item(ref item) = stmt.node {
+ if let ast::ItemKind::MacroDef(..) = item.node {
+ num_macro_definition_ribs += 1;
+ let def = self.definitions.local_def_id(item.id);
+ self.ribs[ValueNS].push(Rib::new(MacroDefinition(def)));
+ self.label_ribs.push(Rib::new(MacroDefinition(def)));
}
}
use rustc::hir::def::{Def, Export};
use rustc::hir::map::{self, DefCollector};
use rustc::ty;
-use std::cell::Cell;
-use std::rc::Rc;
use syntax::ast::{self, Name, Ident};
-use syntax::attr;
+use syntax::attr::{self, HasAttrs};
use syntax::errors::DiagnosticBuilder;
-use syntax::ext::base::{self, Determinacy, MultiModifier, MultiDecorator};
-use syntax::ext::base::{Resolver as SyntaxResolver, SyntaxExtension};
-use syntax::ext::base::MacroKind;
-use syntax::ext::expand::{Expansion, mark_tts};
+use syntax::ext::base::{self, Annotatable, Determinacy, MultiModifier, MultiDecorator};
+use syntax::ext::base::{MacroKind, SyntaxExtension, Resolver as SyntaxResolver};
+use syntax::ext::expand::{Expansion, ExpansionKind, Invocation, InvocationKind, find_attr_invoc};
use syntax::ext::hygiene::Mark;
+use syntax::ext::placeholders::placeholder;
use syntax::ext::tt::macro_rules;
use syntax::feature_gate::{self, emit_feature_err, GateIssue};
use syntax::fold::{self, Folder};
use syntax::ptr::P;
use syntax::symbol::{Symbol, keywords};
use syntax::util::lev_distance::find_best_match_for_name;
-use syntax::visit::Visitor;
use syntax_pos::{Span, DUMMY_SP};
+use std::cell::Cell;
+use std::mem;
+use std::rc::Rc;
+
#[derive(Clone)]
pub struct InvocationData<'a> {
pub module: Cell<Module<'a>>,
pub struct LegacyBinding<'a> {
pub parent: Cell<LegacyScope<'a>>,
pub name: ast::Name,
- ext: Rc<SyntaxExtension>,
+ def_id: DefId,
pub span: Span,
}
invocation.expansion.set(visitor.legacy_scope);
}
- fn add_ext(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
+ fn add_builtin(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
let def_id = DefId {
krate: BUILTIN_MACROS_CRATE,
index: DefIndex::new(self.macro_map.len()),
self.builtin_macros.insert(ident.name, binding);
}
- fn add_expansions_at_stmt(&mut self, id: ast::NodeId, macros: Vec<Mark>) {
- self.macros_at_scope.insert(id, macros);
- }
-
fn resolve_imports(&mut self) {
ImportResolver { resolver: self }.resolve_imports()
}
None
}
- fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind,
- force: bool) -> Result<Rc<SyntaxExtension>, Determinacy> {
+ fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
+ -> Result<Option<Rc<SyntaxExtension>>, Determinacy> {
+ let def = match invoc.kind {
+ InvocationKind::Attr { attr: None, .. } => return Ok(None),
+ _ => match self.resolve_invoc_to_def(invoc, scope, force) {
+ Ok(def) => def,
+ Err(determinacy) => return Err(determinacy),
+ },
+ };
+ self.macro_defs.insert(invoc.expansion_data.mark, def.def_id());
+ Ok(Some(self.get_macro(def)))
+ }
+
+ fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
+ -> Result<Rc<SyntaxExtension>, Determinacy> {
+ self.resolve_macro_to_def(scope, path, kind, force).map(|def| self.get_macro(def))
+ }
+}
+
+impl<'a> Resolver<'a> {
+ fn resolve_invoc_to_def(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
+ -> Result<Def, Determinacy> {
+ let (attr, traits, item) = match invoc.kind {
+ InvocationKind::Attr { ref mut attr, ref traits, ref mut item } => (attr, traits, item),
+ InvocationKind::Bang { ref mac, .. } => {
+ return self.resolve_macro_to_def(scope, &mac.node.path, MacroKind::Bang, force);
+ }
+ InvocationKind::Derive { name, span, .. } => {
+ let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
+ return self.resolve_macro_to_def(scope, &path, MacroKind::Derive, force);
+ }
+ };
+
+ let (attr_name, path) = {
+ let attr = attr.as_ref().unwrap();
+ (attr.name(), ast::Path::from_ident(attr.span, Ident::with_empty_ctxt(attr.name())))
+ };
+
+ let mut determined = true;
+ match self.resolve_macro_to_def(scope, &path, MacroKind::Attr, force) {
+ Ok(def) => return Ok(def),
+ Err(Determinacy::Undetermined) => determined = false,
+ Err(Determinacy::Determined) if force => return Err(Determinacy::Determined),
+ Err(Determinacy::Determined) => {}
+ }
+
+ for &(name, span) in traits {
+ let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
+ match self.resolve_macro(scope, &path, MacroKind::Derive, force) {
+ Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext {
+ if inert_attrs.contains(&attr_name) {
+ // FIXME(jseyfried) Avoid `mem::replace` here.
+ let dummy_item = placeholder(ExpansionKind::Items, ast::DUMMY_NODE_ID)
+ .make_items().pop().unwrap();
+ let dummy_item = Annotatable::Item(dummy_item);
+ *item = mem::replace(item, dummy_item).map_attrs(|mut attrs| {
+ let inert_attr = attr.take().unwrap();
+ attr::mark_known(&inert_attr);
+ if self.proc_macro_enabled {
+ *attr = find_attr_invoc(&mut attrs);
+ }
+ attrs.push(inert_attr);
+ attrs
+ });
+ }
+ return Err(Determinacy::Undetermined);
+ },
+ Err(Determinacy::Undetermined) => determined = false,
+ Err(Determinacy::Determined) => {}
+ }
+ }
+
+ Err(if determined { Determinacy::Determined } else { Determinacy::Undetermined })
+ }
+
+ fn resolve_macro_to_def(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
+ -> Result<Def, Determinacy> {
let ast::Path { ref segments, span } = *path;
if segments.iter().any(|segment| segment.parameters.is_some()) {
let kind =
return Err(Determinacy::Determined);
}
- let ext = match self.resolve_path(&path, Some(MacroNS), None) {
+ let def = match self.resolve_path(&path, Some(MacroNS), None) {
PathResult::NonModule(path_res) => match path_res.base_def() {
Def::Err => Err(Determinacy::Determined),
- def @ _ => Ok(self.get_macro(def)),
+ def @ _ => Ok(def),
},
PathResult::Module(..) => unreachable!(),
PathResult::Indeterminate if !force => return Err(Determinacy::Undetermined),
};
self.current_module.macro_resolutions.borrow_mut()
.push((path.into_boxed_slice(), span));
- return ext;
+ return def;
}
let name = path[0].name;
let result = match self.resolve_legacy_scope(&invocation.legacy_scope, name, false) {
- Some(MacroBinding::Legacy(binding)) => Ok(binding.ext.clone()),
- Some(MacroBinding::Modern(binding)) => Ok(binding.get_macro(self)),
+ Some(MacroBinding::Legacy(binding)) => Ok(Def::Macro(binding.def_id, MacroKind::Bang)),
+ Some(MacroBinding::Modern(binding)) => Ok(binding.def_ignoring_ambiguity()),
None => match self.resolve_lexical_macro_path_segment(path[0], MacroNS, None) {
- Ok(binding) => Ok(binding.get_macro(self)),
+ Ok(binding) => Ok(binding.def_ignoring_ambiguity()),
Err(Determinacy::Undetermined) if !force =>
return Err(Determinacy::Undetermined),
Err(_) => {
result
}
-}
-impl<'a> Resolver<'a> {
// Resolve the initial segment of a non-global macro path (e.g. `foo` in `foo::bar!();`)
pub fn resolve_lexical_macro_path_segment(&mut self,
ident: Ident,
}
pub fn define_macro(&mut self, item: &ast::Item, legacy_scope: &mut LegacyScope<'a>) {
- let tts = match item.node {
- ast::ItemKind::Mac(ref mac) => mac.node.stream(),
- _ => unreachable!(),
- };
-
- if item.ident.name == "macro_rules" {
+ self.local_macro_def_scopes.insert(item.id, self.current_module);
+ let ident = item.ident;
+ if ident.name == "macro_rules" {
self.session.span_err(item.span, "user-defined macros may not be named `macro_rules`");
}
- let mark = Mark::from_placeholder_id(item.id);
- let invocation = self.invocations[&mark];
- invocation.module.set(self.current_module);
-
- let mut def = ast::MacroDef {
- ident: item.ident,
- attrs: item.attrs.clone(),
- id: ast::DUMMY_NODE_ID,
- span: item.span,
- body: mark_tts(tts, mark).into(),
- };
-
+ let def_id = self.definitions.local_def_id(item.id);
+ let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, item));
+ self.macro_map.insert(def_id, ext);
*legacy_scope = LegacyScope::Binding(self.arenas.alloc_legacy_binding(LegacyBinding {
- parent: Cell::new(*legacy_scope),
- name: def.ident.name,
- ext: Rc::new(macro_rules::compile(&self.session.parse_sess, &def)),
- span: def.span,
+ parent: Cell::new(*legacy_scope), name: ident.name, def_id: def_id, span: item.span,
}));
- self.macro_names.insert(def.ident.name);
+ self.macro_names.insert(ident.name);
- if attr::contains_name(&def.attrs, "macro_export") {
- def.id = self.next_node_id();
- DefCollector::new(&mut self.definitions).with_parent(CRATE_DEF_INDEX, |collector| {
- collector.visit_macro_def(&def)
- });
- self.macro_exports.push(Export {
- name: def.ident.name,
- def: Def::Macro(self.definitions.local_def_id(def.id), MacroKind::Bang),
- });
- self.exported_macros.push(def);
+ if attr::contains_name(&item.attrs, "macro_export") {
+ let def = Def::Macro(def_id, MacroKind::Bang);
+ self.macro_exports.push(Export { name: ident.name, def: def });
}
}
use rustc::hir;
use rustc::hir::def_id::{CrateNum, DefId};
-use syntax::ast::{self, NodeId};
+use syntax::ast::{self, Attribute, NodeId};
use syntax_pos::Span;
pub struct CrateData {
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
/// Data for extern crates.
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
/// Data about a function call.
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
/// Data for modules.
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
/// Data for a reference to a module.
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
#[derive(Debug, RustcEncodable)]
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
#[derive(Debug, RustcEncodable)]
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
#[derive(Debug, RustcEncodable)]
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
/// Data for a typedef.
pub parent: Option<DefId>,
pub docs: String,
pub sig: Option<Signature>,
+ pub attributes: Vec<Attribute>,
}
/// Data for a reference to a type or trait.
pub visibility: Visibility,
pub docs: String,
pub sig: Option<Signature>,
+ pub attributes: Vec<Attribute>,
}
#[derive(Debug, RustcEncodable)]
scope: scope
}.lower(self.tcx));
}
+ // With macros 2.0, we can legitimately get a ref to a macro, but
+ // we don't handle it properly for now (FIXME).
+ Def::Macro(..) => {}
Def::Local(..) |
Def::Upvar(..) |
Def::SelfTy(..) |
Def::AssociatedTy(..) |
Def::AssociatedConst(..) |
Def::PrimTy(_) |
- Def::Macro(..) |
Def::Err => {
span_bug!(span,
"process_def_kind for unexpected item: {:?}",
visibility: Visibility::Inherited,
docs: String::new(),
sig: None,
+ attributes: vec![],
}.lower(self.tcx));
}
}
visibility: vis,
docs: docs_for_attrs(attrs),
sig: method_data.sig,
+ attributes: attrs.to_vec(),
}.lower(self.tcx));
}
parent: None,
docs: String::new(),
sig: None,
+ attributes: vec![],
}.lower(self.tcx));
}
}
visibility: vis,
docs: docs_for_attrs(attrs),
sig: None,
+ attributes: attrs.to_vec(),
}.lower(self.tcx));
}
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: self.save_ctxt.sig_base(item),
+ attributes: item.attrs.clone(),
}.lower(self.tcx));
}
parent: Some(make_def_id(item.id, &self.tcx.hir)),
docs: docs_for_attrs(&variant.node.attrs),
sig: sig,
+ attributes: variant.node.attrs.clone(),
}.lower(self.tcx));
}
}
parent: Some(make_def_id(item.id, &self.tcx.hir)),
docs: docs_for_attrs(&variant.node.attrs),
sig: sig,
+ attributes: variant.node.attrs.clone(),
}.lower(self.tcx));
}
}
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: self.save_ctxt.sig_base(item),
+ attributes: item.attrs.clone(),
}.lower(self.tcx));
}
visibility: Visibility::Inherited,
docs: String::new(),
sig: None,
+ attributes: vec![],
}.lower(self.tcx));
}
}
parent: None,
docs: docs_for_attrs(&item.attrs),
sig: Some(self.save_ctxt.sig_base(item)),
+ attributes: item.attrs.clone(),
}.lower(self.tcx));
}
visibility: Visibility::Inherited,
docs: String::new(),
sig: None,
+ attributes: vec![],
}.lower(self.tcx));
}
}
use rustc::hir::def_id::{CrateNum, DefId, DefIndex};
use rustc::hir::map::Map;
use rustc::ty::TyCtxt;
-use syntax::ast::NodeId;
+use syntax::ast::{self, NodeId};
use syntax::codemap::CodeMap;
+use syntax::print::pprust;
+use syntax::symbol::Symbol;
use syntax_pos::Span;
use data::{self, Visibility, SigElement};
}
}
+/// Represent an arbitrary attribute on a code element
+#[derive(Clone, Debug, RustcEncodable)]
+pub struct Attribute {
+ value: String,
+ span: SpanData,
+}
+
+impl Lower for Vec<ast::Attribute> {
+ type Target = Vec<Attribute>;
+
+ fn lower(self, tcx: TyCtxt) -> Vec<Attribute> {
+ let doc = Symbol::intern("doc");
+ self.into_iter()
+ // Only retain real attributes. Doc comments are lowered separately.
+ .filter(|attr| attr.name() != doc)
+ .map(|mut attr| {
+ // Remove the surrounding '#[..]' or '#![..]' of the pretty printed
+ // attribute. First normalize all inner attribute (#![..]) to outer
+ // ones (#[..]), then remove the two leading and the one trailing character.
+ attr.style = ast::AttrStyle::Outer;
+ let value = pprust::attribute_to_string(&attr);
+ // This str slicing works correctly, because the leading and trailing characters
+ // are in the ASCII range and thus exactly one byte each.
+ let value = value[2..value.len()-1].to_string();
+
+ Attribute {
+ value: value,
+ span: SpanData::from_span(attr.span, tcx.sess.codemap()),
+ }
+ }).collect()
+ }
+}
+
#[derive(Debug, RustcEncodable)]
pub struct CratePreludeData {
pub crate_name: String,
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::EnumData {
visibility: self.visibility,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::FunctionData {
parent: self.parent,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::MethodData {
parent: self.parent,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::ModData {
visibility: self.visibility,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::StructData {
visibility: self.visibility,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::StructVariantData {
parent: self.parent,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::TraitData {
visibility: self.visibility,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::TupleVariantData {
parent: self.parent,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub parent: Option<DefId>,
pub docs: String,
pub sig: Option<Signature>,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::TypeDefData {
parent: self.parent,
docs: self.docs,
sig: self.sig.map(|s| s.lower(tcx)),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub visibility: Visibility,
pub docs: String,
pub sig: Option<Signature>,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::VariableData {
visibility: self.visibility,
docs: self.docs,
sig: self.sig.map(|s| s.lower(tcx)),
+ attributes: self.attributes.lower(tcx),
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
};
if def.span.file_name != def.value {
// If the module is an out-of-line defintion, then we'll make the
decl_id: Option<Id>,
docs: String,
sig: Option<JsonSignature>,
+ attributes: Vec<Attribute>,
}
#[derive(Debug, RustcEncodable)]
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: data.decl_id.map(|id| From::from(id)),
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: None,
+ attributes: vec![],
}
}
}
decl_id: None,
docs: String::new(),
sig: data.sig.map(|s| From::from(s)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: None,
+ attributes: data.attributes,
}
}
}
parent: None,
docs: docs_for_attrs(&item.attrs),
sig: self.sig_base(item),
+ attributes: item.attrs.clone(),
}))
}
ast::ItemKind::Static(ref typ, mt, ref expr) => {
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: Some(self.sig_base(item)),
+ attributes: item.attrs.clone(),
}))
}
ast::ItemKind::Const(ref typ, ref expr) => {
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: Some(self.sig_base(item)),
+ attributes: item.attrs.clone(),
}))
}
ast::ItemKind::Mod(ref m) => {
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: self.sig_base(item),
+ attributes: item.attrs.clone(),
}))
}
ast::ItemKind::Enum(ref def, _) => {
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: self.sig_base(item),
+ attributes: item.attrs.clone(),
}))
}
ast::ItemKind::Impl(.., ref trait_ref, ref typ, _) => {
visibility: From::from(&field.vis),
docs: docs_for_attrs(&field.attrs),
sig: Some(sig),
+ attributes: field.attrs.clone(),
})
} else {
None
name: ast::Name, span: Span) -> Option<FunctionData> {
// The qualname for a method is the trait name or name of the struct in an impl in
// which the method is declared in, followed by the method's name.
- let (qualname, parent_scope, decl_id, vis, docs) =
+ let (qualname, parent_scope, decl_id, vis, docs, attributes) =
match self.tcx.impl_of_method(self.tcx.hir.local_def_id(id)) {
Some(impl_id) => match self.tcx.hir.get_if_local(impl_id) {
Some(Node::NodeItem(item)) => {
(result, trait_id, decl_id,
From::from(&item.vis),
- docs_for_attrs(&item.attrs))
+ docs_for_attrs(&item.attrs),
+ item.attrs.to_vec())
}
_ => {
span_bug!(span,
(format!("::{}", self.tcx.item_path_str(def_id)),
Some(def_id), None,
From::from(&item.vis),
- docs_for_attrs(&item.attrs))
+ docs_for_attrs(&item.attrs),
+ item.attrs.to_vec())
}
r => {
span_bug!(span,
}
}
None => {
- span_bug!(span, "Could not find container for method {}", id);
+ debug!("Could not find container for method {} at {:?}", id, span);
+ // This is not necessarily a bug, if there was a compilation error, the tables
+ // we need might not exist.
+ return None;
}
},
};
parent: parent_scope,
docs: docs,
sig: sig,
+ attributes: attributes,
})
}
sess.abort_if_errors();
// Invoke the system linker
+ //
+ // Note that there's a terribly awful hack that really shouldn't be present
+ // in any compiler. Here an environment variable is supported to
+ // automatically retry the linker invocation if the linker looks like it
+ // segfaulted.
+ //
+ // Gee that seems odd, normally segfaults are things we want to know about!
+ // Unfortunately though in rust-lang/rust#38878 we're experiencing the
+ // linker segfaulting on Travis quite a bit which is causing quite a bit of
+ // pain to land PRs when they spuriously fail due to a segfault.
+ //
+ // The issue #38878 has some more debugging information on it as well, but
+ // this unfortunately looks like it's just a race condition in OSX's linker
+ // with some thread pool working in the background. It seems that no one
+ // currently knows a fix for this so in the meantime we're left with this...
info!("{:?}", &cmd);
- let prog = time(sess.time_passes(), "running linker", || cmd.output());
+ let retry_on_segfault = env::var("RUSTC_RETRY_LINKER_ON_SEGFAULT").is_ok();
+ let mut prog;
+ let mut i = 0;
+ loop {
+ i += 1;
+ prog = time(sess.time_passes(), "running linker", || cmd.output());
+ if !retry_on_segfault || i > 3 {
+ break
+ }
+ let output = match prog {
+ Ok(ref output) => output,
+ Err(_) => break,
+ };
+ if output.status.success() {
+ break
+ }
+ let mut out = output.stderr.clone();
+ out.extend(&output.stdout);
+ let out = String::from_utf8_lossy(&out);
+ let msg = "clang: error: unable to execute command: \
+ Segmentation fault: 11";
+ if !out.contains(msg) {
+ break
+ }
+
+ sess.struct_warn("looks like the linker segfaulted when we tried to \
+ call it, automatically retrying again")
+ .note(&format!("{:?}", cmd))
+ .note(&out)
+ .emit();
+ }
+
match prog {
Ok(prog) => {
fn escape_string(s: &[u8]) -> String {
use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::adjustment::CustomCoerceUnsized;
-use rustc::dep_graph::{DepNode, WorkProduct};
+use rustc::dep_graph::{AssertDepGraphSafe, DepNode, WorkProduct};
use rustc::hir::map as hir_map;
use rustc::util::common::time;
use session::config::{self, NoDebugInfo};
// Instantiate translation items without filling out definitions yet...
for ccx in crate_context_list.iter_need_trans() {
- let cgu = ccx.codegen_unit();
- let trans_items = cgu.items_in_deterministic_order(tcx, &symbol_map);
-
- tcx.dep_graph.with_task(cgu.work_product_dep_node(), || {
+ let dep_node = ccx.codegen_unit().work_product_dep_node();
+ tcx.dep_graph.with_task(dep_node,
+ ccx,
+ AssertDepGraphSafe(symbol_map.clone()),
+ trans_decl_task);
+
+ fn trans_decl_task<'a, 'tcx>(ccx: CrateContext<'a, 'tcx>,
+ symbol_map: AssertDepGraphSafe<Rc<SymbolMap<'tcx>>>) {
+ // FIXME(#40304): Instead of this, the symbol-map should be an
+ // on-demand thing that we compute.
+ let AssertDepGraphSafe(symbol_map) = symbol_map;
+ let cgu = ccx.codegen_unit();
+ let trans_items = cgu.items_in_deterministic_order(ccx.tcx(), &symbol_map);
for (trans_item, linkage) in trans_items {
trans_item.predefine(&ccx, linkage);
}
- });
+ }
}
// ... and now that we have everything pre-defined, fill out those definitions.
for ccx in crate_context_list.iter_need_trans() {
- let cgu = ccx.codegen_unit();
- let trans_items = cgu.items_in_deterministic_order(tcx, &symbol_map);
- tcx.dep_graph.with_task(cgu.work_product_dep_node(), || {
+ let dep_node = ccx.codegen_unit().work_product_dep_node();
+ tcx.dep_graph.with_task(dep_node,
+ ccx,
+ AssertDepGraphSafe(symbol_map.clone()),
+ trans_def_task);
+
+ fn trans_def_task<'a, 'tcx>(ccx: CrateContext<'a, 'tcx>,
+ symbol_map: AssertDepGraphSafe<Rc<SymbolMap<'tcx>>>) {
+ // FIXME(#40304): Instead of this, the symbol-map should be an
+ // on-demand thing that we compute.
+ let AssertDepGraphSafe(symbol_map) = symbol_map;
+ let cgu = ccx.codegen_unit();
+ let trans_items = cgu.items_in_deterministic_order(ccx.tcx(), &symbol_map);
for (trans_item, _) in trans_items {
trans_item.define(&ccx);
}
if ccx.sess().opts.debuginfo != NoDebugInfo {
debuginfo::finalize(&ccx);
}
- });
+ }
}
symbol_names_test::report_symbol_names(&shared_ccx);
use llvm;
use llvm::{ContextRef, ModuleRef, ValueRef};
-use rustc::dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig, WorkProduct};
+use rustc::dep_graph::{DepGraph, DepGraphSafe, DepNode, DepTrackingMap,
+ DepTrackingMapConfig, WorkProduct};
use middle::cstore::LinkMeta;
use rustc::hir;
use rustc::hir::def::ExportMap;
index: usize,
}
+impl<'a, 'tcx> DepGraphSafe for CrateContext<'a, 'tcx> {
+}
+
pub struct CrateContextIterator<'a, 'tcx: 'a> {
shared: &'a SharedCrateContext<'a, 'tcx>,
local_ccxs: &'a [LocalCrateContext<'tcx>],
use super::FnCtxt;
+use rustc::infer::InferOk;
use rustc::traits;
use rustc::ty::{self, Ty, TraitRef};
use rustc::ty::{ToPredicate, TypeFoldable};
pub fn finalize<'b, I>(self, pref: LvaluePreference, exprs: I)
where I: IntoIterator<Item = &'b hir::Expr>
+ {
+ let fcx = self.fcx;
+ fcx.register_infer_ok_obligations(self.finalize_as_infer_ok(pref, exprs));
+ }
+
+ pub fn finalize_as_infer_ok<'b, I>(self, pref: LvaluePreference, exprs: I)
+ -> InferOk<'tcx, ()>
+ where I: IntoIterator<Item = &'b hir::Expr>
{
let methods: Vec<_> = self.steps
.iter()
}
}
- for obligation in self.obligations {
- self.fcx.register_predicate(obligation);
+ InferOk {
+ value: (),
+ obligations: self.obligations
}
}
}
use rustc::hir;
use rustc::hir::def_id::DefId;
-use rustc::infer::{Coercion, InferOk, TypeTrace};
+use rustc::infer::{Coercion, InferResult, InferOk, TypeTrace};
+use rustc::infer::type_variable::TypeVariableOrigin;
use rustc::traits::{self, ObligationCause, ObligationCauseCode};
use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
use rustc::ty::{self, LvaluePreference, TypeAndMut,
use rustc::ty::subst::Subst;
use syntax::abi;
use syntax::feature_gate;
-use util::common::indent;
-use std::cell::RefCell;
use std::collections::VecDeque;
use std::ops::Deref;
fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
cause: ObligationCause<'tcx>,
use_lub: bool,
- unsizing_obligations: RefCell<Vec<traits::PredicateObligation<'tcx>>>,
}
impl<'a, 'gcx, 'tcx> Deref for Coerce<'a, 'gcx, 'tcx> {
}
}
-type CoerceResult<'tcx> = RelateResult<'tcx, (Ty<'tcx>, Adjust<'tcx>)>;
+type CoerceResult<'tcx> = InferResult<'tcx, Adjustment<'tcx>>;
fn coerce_mutbls<'tcx>(from_mutbl: hir::Mutability,
to_mutbl: hir::Mutability)
}
}
+fn identity<'tcx>() -> Adjust<'tcx> {
+ Adjust::DerefRef {
+ autoderefs: 0,
+ autoref: None,
+ unsize: false,
+ }
+}
+
+fn success<'tcx>(kind: Adjust<'tcx>,
+ target: Ty<'tcx>,
+ obligations: traits::PredicateObligations<'tcx>)
+ -> CoerceResult<'tcx> {
+ Ok(InferOk {
+ value: Adjustment {
+ kind,
+ target
+ },
+ obligations
+ })
+}
+
impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
fn new(fcx: &'f FnCtxt<'f, 'gcx, 'tcx>, cause: ObligationCause<'tcx>) -> Self {
Coerce {
fcx: fcx,
cause: cause,
use_lub: false,
- unsizing_obligations: RefCell::new(vec![]),
}
}
- fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
+ fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
self.commit_if_ok(|_| {
let trace = TypeTrace::types(&self.cause, false, a, b);
if self.use_lub {
self.lub(false, trace, &a, &b)
- .map(|ok| self.register_infer_ok_obligations(ok))
} else {
self.sub(false, trace, &a, &b)
- .map(|InferOk { value, obligations }| {
- self.fcx.register_predicates(obligations);
- value
- })
}
})
}
- /// Unify two types (using sub or lub) and produce a noop coercion.
- fn unify_and_identity(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> {
- self.unify(&a, &b).and_then(|ty| self.identity(ty))
- }
-
- /// Synthesize an identity adjustment.
- fn identity(&self, ty: Ty<'tcx>) -> CoerceResult<'tcx> {
- Ok((ty, Adjust::DerefRef {
- autoderefs: 0,
- autoref: None,
- unsize: false,
- }))
+ /// Unify two types (using sub or lub) and produce a specific coercion.
+ fn unify_and(&self, a: Ty<'tcx>, b: Ty<'tcx>, kind: Adjust<'tcx>)
+ -> CoerceResult<'tcx> {
+ self.unify(&a, &b).and_then(|InferOk { value: ty, obligations }| {
+ success(kind, ty, obligations)
+ })
}
fn coerce<'a, E, I>(&self, exprs: &E, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx>
// Just ignore error types.
if a.references_error() || b.references_error() {
- return self.identity(b);
+ return success(identity(), b, vec![]);
}
if a.is_never() {
- return Ok((b, Adjust::NeverToAny));
+ return success(Adjust::NeverToAny, b, vec![]);
}
// Consider coercing the subtype to a DST
}
_ => {
// Otherwise, just use unification rules.
- self.unify_and_identity(a, b)
+ self.unify_and(a, b, identity())
}
}
}
coerce_mutbls(mt_a.mutbl, mt_b.mutbl)?;
(r_a, mt_a)
}
- _ => return self.unify_and_identity(a, b),
+ _ => return self.unify_and(a, b, identity()),
};
let span = self.cause.span;
let mut first_error = None;
let mut r_borrow_var = None;
let mut autoderef = self.autoderef(span, a);
- let mut success = None;
+ let mut found = None;
for (referent_ty, autoderefs) in autoderef.by_ref() {
if autoderefs == 0 {
mutbl: mt_b.mutbl, // [1] above
});
match self.unify(derefd_ty_a, b) {
- Ok(ty) => {
- success = Some((ty, autoderefs));
+ Ok(ok) => {
+ found = Some((ok, autoderefs));
break;
}
Err(err) => {
// (e.g., in example above, the failure from relating `Vec<T>`
// to the target type), since that should be the least
// confusing.
- let (ty, autoderefs) = match success {
+ let (InferOk { value: ty, mut obligations }, autoderefs) = match found {
Some(d) => d,
None => {
let err = first_error.expect("coerce_borrowed_pointer had no error");
}
};
- // This commits the obligations to the fulfillcx. After this succeeds,
- // this snapshot can't be rolled back.
- autoderef.finalize(LvaluePreference::from_mutbl(mt_b.mutbl), exprs());
-
- // Now apply the autoref. We have to extract the region out of
- // the final ref type we got.
if ty == a && mt_a.mutbl == hir::MutImmutable && autoderefs == 1 {
// As a special case, if we would produce `&'a *x`, that's
// a total no-op. We end up with the type `&'a T` just as
// `self.x`, but we auto-coerce it to `foo(&mut *self.x)`,
// which is a borrow.
assert_eq!(mt_b.mutbl, hir::MutImmutable); // can only coerce &T -> &U
- return self.identity(ty);
+ return success(identity(), ty, obligations);
}
+
+ // Now apply the autoref. We have to extract the region out of
+ // the final ref type we got.
let r_borrow = match ty.sty {
ty::TyRef(r_borrow, _) => r_borrow,
_ => span_bug!(span, "expected a ref type, got {:?}", ty),
ty,
autoderefs,
autoref);
- Ok((ty, Adjust::DerefRef {
+
+ let pref = LvaluePreference::from_mutbl(mt_b.mutbl);
+ obligations.extend(autoderef.finalize_as_infer_ok(pref, exprs()).obligations);
+
+ success(Adjust::DerefRef {
autoderefs: autoderefs,
autoref: autoref,
unsize: false,
- }))
+ }, ty, obligations)
}
}
_ => (source, None),
};
- let source = source.adjust_for_autoref(self.tcx, reborrow);
+ let coerce_source = source.adjust_for_autoref(self.tcx, reborrow);
+
+ let adjust = Adjust::DerefRef {
+ autoderefs: if reborrow.is_some() { 1 } else { 0 },
+ autoref: reborrow,
+ unsize: true,
+ };
+
+ // Setup either a subtyping or a LUB relationship between
+ // the `CoerceUnsized` target type and the expected type.
+ // We only have the latter, so we use an inference variable
+ // for the former and let type inference do the rest.
+ let origin = TypeVariableOrigin::MiscVariable(self.cause.span);
+ let coerce_target = self.next_ty_var(origin);
+ let mut coercion = self.unify_and(coerce_target, target, adjust)?;
let mut selcx = traits::SelectionContext::new(self);
// Use a FIFO queue for this custom fulfillment procedure.
let mut queue = VecDeque::new();
- let mut leftover_predicates = vec![];
// Create an obligation for `Source: CoerceUnsized<Target>`.
let cause = ObligationCause::misc(self.cause.span, self.body_id);
queue.push_back(self.tcx
- .predicate_for_trait_def(cause, coerce_unsized_did, 0, source, &[target]));
+ .predicate_for_trait_def(cause, coerce_unsized_did, 0,
+ coerce_source, &[coerce_target]));
// Keep resolving `CoerceUnsized` and `Unsize` predicates to avoid
// emitting a coercion in cases like `Foo<$1>` -> `Foo<$2>`, where
let trait_ref = match obligation.predicate {
ty::Predicate::Trait(ref tr) if traits.contains(&tr.def_id()) => tr.clone(),
_ => {
- leftover_predicates.push(obligation);
+ coercion.obligations.push(obligation);
continue;
}
};
}
}
- *self.unsizing_obligations.borrow_mut() = leftover_predicates;
-
- let adjustment = Adjust::DerefRef {
- autoderefs: if reborrow.is_some() { 1 } else { 0 },
- autoref: reborrow,
- unsize: true,
- };
- debug!("Success, coerced with {:?}", adjustment);
- Ok((target, adjustment))
+ Ok(coercion)
}
fn coerce_from_safe_fn(&self,
a: Ty<'tcx>,
fn_ty_a: ty::PolyFnSig<'tcx>,
- b: Ty<'tcx>)
+ b: Ty<'tcx>,
+ to_unsafe: Adjust<'tcx>,
+ normal: Adjust<'tcx>)
-> CoerceResult<'tcx> {
if let ty::TyFnPtr(fn_ty_b) = b.sty {
match (fn_ty_a.unsafety(), fn_ty_b.unsafety()) {
(hir::Unsafety::Normal, hir::Unsafety::Unsafe) => {
let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a);
- return self.unify_and_identity(unsafe_a, b)
- .map(|(ty, _)| (ty, Adjust::UnsafeFnPointer));
+ return self.unify_and(unsafe_a, b, to_unsafe);
}
_ => {}
}
}
- self.unify_and_identity(a, b)
+ self.unify_and(a, b, normal)
}
fn coerce_from_fn_pointer(&self,
let b = self.shallow_resolve(b);
debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b);
- self.coerce_from_safe_fn(a, fn_ty_a, b)
+ self.coerce_from_safe_fn(a, fn_ty_a, b,
+ Adjust::UnsafeFnPointer, identity())
}
fn coerce_from_fn_item(&self,
match b.sty {
ty::TyFnPtr(_) => {
let a_fn_pointer = self.tcx.mk_fn_ptr(fn_ty_a);
- self.coerce_from_safe_fn(a_fn_pointer, fn_ty_a, b)
- .map(|(ty, _)| (ty, Adjust::ReifyFnPointer))
+ self.coerce_from_safe_fn(a_fn_pointer, fn_ty_a, b,
+ Adjust::ReifyFnPointer, Adjust::ReifyFnPointer)
}
- _ => self.unify_and_identity(a, b),
+ _ => self.unify_and(a, b, identity()),
}
}
self.cause.span,
feature_gate::GateIssue::Language,
feature_gate::CLOSURE_TO_FN_COERCION);
- return self.unify_and_identity(a, b);
+ return self.unify_and(a, b, identity());
}
// We coerce the closure, which has fn type
// `extern "rust-call" fn((arg0,arg1,...)) -> _`
let pointer_ty = self.tcx.mk_fn_ptr(converted_sig);
debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})",
a, b, pointer_ty);
- self.unify_and_identity(pointer_ty, b)
- .map(|(ty, _)| (ty, Adjust::ClosureFnPointer))
+ self.unify_and(pointer_ty, b, Adjust::ClosureFnPointer)
}
- _ => self.unify_and_identity(a, b),
+ _ => self.unify_and(a, b, identity()),
}
}
ty::TyRef(_, mt) => (true, mt),
ty::TyRawPtr(mt) => (false, mt),
_ => {
- return self.unify_and_identity(a, b);
+ return self.unify_and(a, b, identity());
}
};
mutbl: mutbl_b,
ty: mt_a.ty,
});
- let (ty, noop) = self.unify_and_identity(a_unsafe, b)?;
coerce_mutbls(mt_a.mutbl, mutbl_b)?;
-
// Although references and unsafe ptrs have the same
// representation, we still register an Adjust::DerefRef so that
// regionck knows that the region for `a` must be valid here.
- Ok((ty,
- if is_ref {
- Adjust::DerefRef {
- autoderefs: 1,
- autoref: Some(AutoBorrow::RawPtr(mutbl_b)),
- unsize: false,
- }
- } else if mt_a.mutbl != mutbl_b {
- Adjust::MutToConstPointer
- } else {
- noop
- }))
- }
-}
-
-fn apply<'a, 'b, 'gcx, 'tcx, E, I>(coerce: &mut Coerce<'a, 'gcx, 'tcx>,
- exprs: &E,
- a: Ty<'tcx>,
- b: Ty<'tcx>)
- -> RelateResult<'tcx, Adjustment<'tcx>>
- where E: Fn() -> I,
- I: IntoIterator<Item = &'b hir::Expr>
-{
-
- let (ty, adjust) = indent(|| coerce.coerce(exprs, a, b))?;
-
- let fcx = coerce.fcx;
- if let Adjust::DerefRef { unsize: true, .. } = adjust {
- let mut obligations = coerce.unsizing_obligations.borrow_mut();
- for obligation in obligations.drain(..) {
- fcx.register_predicate(obligation);
- }
+ self.unify_and(a_unsafe, b, if is_ref {
+ Adjust::DerefRef {
+ autoderefs: 1,
+ autoref: Some(AutoBorrow::RawPtr(mutbl_b)),
+ unsize: false,
+ }
+ } else if mt_a.mutbl != mutbl_b {
+ Adjust::MutToConstPointer
+ } else {
+ identity()
+ })
}
-
- Ok(Adjustment {
- kind: adjust,
- target: ty
- })
}
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target);
let cause = self.cause(expr.span, ObligationCauseCode::ExprAssignable);
- let mut coerce = Coerce::new(self, cause);
+ let coerce = Coerce::new(self, cause);
self.commit_if_ok(|_| {
- let adjustment = apply(&mut coerce, &|| Some(expr), source, target)?;
+ let ok = coerce.coerce(&|| Some(expr), source, target)?;
+ let adjustment = self.register_infer_ok_obligations(ok);
if !adjustment.is_identity() {
debug!("Success, coerced with {:?}", adjustment);
match self.tables.borrow().adjustments.get(&expr.id) {
// but only if the new expression has no coercion already applied to it.
let mut first_error = None;
if !self.tables.borrow().adjustments.contains_key(&new.id) {
- let result = self.commit_if_ok(|_| apply(&mut coerce, &|| Some(new), new_ty, prev_ty));
+ let result = self.commit_if_ok(|_| coerce.coerce(&|| Some(new), new_ty, prev_ty));
match result {
- Ok(adjustment) => {
+ Ok(ok) => {
+ let adjustment = self.register_infer_ok_obligations(ok);
if !adjustment.is_identity() {
self.write_adjustment(new.id, adjustment);
}
}
}
- match self.commit_if_ok(|_| apply(&mut coerce, &exprs, prev_ty, new_ty)) {
+ match self.commit_if_ok(|_| coerce.coerce(&exprs, prev_ty, new_ty)) {
Err(_) => {
// Avoid giving strange errors on failed attempts.
if let Some(e) = first_error {
})
}
}
- Ok(adjustment) => {
+ Ok(ok) => {
+ let adjustment = self.register_infer_ok_obligations(ok);
if !adjustment.is_identity() {
let mut tables = self.tables.borrow_mut();
for expr in exprs() {
}
pub fn check_item_bodies<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> CompileResult {
- tcx.sess.track_errors(|| {
- tcx.dep_graph.with_task(DepNode::TypeckBodiesKrate, || {
- tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
- tcx.item_tables(body_owner_def_id);
- });
+ return tcx.sess.track_errors(|| {
+ tcx.dep_graph.with_task(DepNode::TypeckBodiesKrate, tcx, (), check_item_bodies_task);
+ });
+
+ fn check_item_bodies_task<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (): ()) {
+ tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
+ tcx.item_tables(body_owner_def_id);
});
- })
+ }
}
pub fn provide(providers: &mut Providers) {
/// 4. This is added by the code in `visit_expr` when we write to `item_types`.
/// 5. This is added by the code in `convert_item` when we write to `item_types`;
/// note that this write occurs inside the `CollectItemSig` task.
- /// 6. Added by explicit `read` below
- fn with_collect_item_sig<OP>(&self, id: ast::NodeId, op: OP)
- where OP: FnOnce()
- {
+ /// 6. Added by reads from within `op`.
+ fn with_collect_item_sig(&self, id: ast::NodeId, op: fn(TyCtxt<'a, 'tcx, 'tcx>, ast::NodeId)) {
let def_id = self.tcx.hir.local_def_id(id);
- self.tcx.dep_graph.with_task(DepNode::CollectItemSig(def_id), || {
- self.tcx.hir.read(id);
- op();
- });
+ self.tcx.dep_graph.with_task(DepNode::CollectItemSig(def_id), self.tcx, id, op);
}
}
}
fn visit_item(&mut self, item: &'tcx hir::Item) {
- self.with_collect_item_sig(item.id, || convert_item(self.tcx, item));
+ self.with_collect_item_sig(item.id, convert_item);
intravisit::walk_item(self, item);
}
}
fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {
- self.with_collect_item_sig(trait_item.id, || {
- convert_trait_item(self.tcx, trait_item)
- });
+ self.with_collect_item_sig(trait_item.id, convert_trait_item);
intravisit::walk_trait_item(self, trait_item);
}
fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {
- self.with_collect_item_sig(impl_item.id, || {
- convert_impl_item(self.tcx, impl_item)
- });
+ self.with_collect_item_sig(impl_item.id, convert_impl_item);
intravisit::walk_impl_item(self, impl_item);
}
}
}
}
-fn convert_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, it: &hir::Item) {
+fn convert_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_id: ast::NodeId) {
+ let it = tcx.hir.expect_item(item_id);
debug!("convert: item {} with id {}", it.name, it.id);
- let def_id = tcx.hir.local_def_id(it.id);
+ let def_id = tcx.hir.local_def_id(item_id);
match it.node {
// These don't define types.
hir::ItemExternCrate(_) | hir::ItemUse(..) | hir::ItemMod(_) => {
}
}
-fn convert_trait_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_item: &hir::TraitItem) {
+fn convert_trait_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_item_id: ast::NodeId) {
+ let trait_item = tcx.hir.expect_trait_item(trait_item_id);
let def_id = tcx.hir.local_def_id(trait_item.id);
tcx.item_generics(def_id);
tcx.item_predicates(def_id);
}
-fn convert_impl_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_item: &hir::ImplItem) {
- let def_id = tcx.hir.local_def_id(impl_item.id);
+fn convert_impl_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_item_id: ast::NodeId) {
+ let def_id = tcx.hir.local_def_id(impl_item_id);
tcx.item_generics(def_id);
tcx.item_type(def_id);
tcx.item_predicates(def_id);
/// Used when rendering a `ResolvedPath` structure. This invokes the `path`
/// rendering function with the necessary arguments for linking to a local path.
fn resolved_path(w: &mut fmt::Formatter, did: DefId, path: &clean::Path,
- print_all: bool, use_absolute: bool) -> fmt::Result {
+ print_all: bool, use_absolute: bool, is_not_debug: bool) -> fmt::Result {
let last = path.segments.last().unwrap();
let rel_root = match &*path.segments[0].name {
"self" => Some("./".to_string()),
} else {
root.push_str(&seg.name);
root.push_str("/");
- write!(w, "<a class=\"mod\"
- href=\"{}index.html\">{}</a>::",
- root,
- seg.name)?;
+ if is_not_debug {
+ write!(w, "<a class=\"mod\"
+ href=\"{}index.html\">{}</a>::",
+ root,
+ seg.name)?;
+ } else {
+ write!(w, "{}::", seg.name)?;
+ }
}
}
}
}
}
if w.alternate() {
- write!(w, "{:#}{:#}", HRef::new(did, &last.name), last.params)?;
+ if is_not_debug {
+ write!(w, "{:#}{:#}", HRef::new(did, &last.name), last.params)?;
+ } else {
+ write!(w, "{:?}{:?}", HRef::new(did, &last.name), last.params)?;
+ }
} else {
- let path = if use_absolute {
- match href(did) {
- Some((_, _, fqp)) => format!("{}::{}",
- fqp[..fqp.len()-1].join("::"),
- HRef::new(did, fqp.last().unwrap())),
- None => format!("{}", HRef::new(did, &last.name)),
- }
+ if is_not_debug {
+ let path = if use_absolute {
+ match href(did) {
+ Some((_, _, fqp)) => format!("{}::{}",
+ fqp[..fqp.len()-1].join("::"),
+ HRef::new(did, fqp.last().unwrap())),
+ None => format!("{}", HRef::new(did, &last.name)),
+ }
+ } else {
+ format!("{}", HRef::new(did, &last.name))
+ };
+ write!(w, "{}{}", path, last.params)?;
} else {
- format!("{}", HRef::new(did, &last.name))
- };
- write!(w, "{}{}", path, last.params)?;
+ let path = if use_absolute {
+ match href(did) {
+ Some((_, _, fqp)) => format!("{:?}::{:?}",
+ fqp[..fqp.len()-1].join("::"),
+ HRef::new(did, fqp.last().unwrap())),
+ None => format!("{:?}", HRef::new(did, &last.name)),
+ }
+ } else {
+ format!("{:?}", HRef::new(did, &last.name))
+ };
+ write!(w, "{}{:?}", path, last.params)?;
+ }
}
Ok(())
}
}
}
+impl<'a> fmt::Debug for HRef<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", self.text)
+ }
+}
+
fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool,
is_not_debug: bool) -> fmt::Result {
match *t {
}
clean::ResolvedPath{ did, ref typarams, ref path, is_generic } => {
// Paths like T::Output and Self::Output should be rendered with all segments
- resolved_path(f, did, path, is_generic, use_absolute)?;
+ resolved_path(f, did, path, is_generic, use_absolute, is_not_debug)?;
tybounds(f, typarams)
}
clean::Infer => write!(f, "_"),
write!(f, "{}::", self_type)?;
}
let path = clean::Path::singleton(name.clone());
- resolved_path(f, did, &path, true, use_absolute)?;
+ resolved_path(f, did, &path, true, use_absolute, is_not_debug)?;
// FIXME: `typarams` are not rendered, and this seems bad?
drop(typarams);
impl fmt::Display for clean::ImportSource {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.did {
- Some(did) => resolved_path(f, did, &self.path, true, false),
+ Some(did) => resolved_path(f, did, &self.path, true, false, true),
_ => {
for (i, seg) in self.path.segments.iter().enumerate() {
if i > 0 {
.content .method .where,
.content .fn .where,
.content .where.fmt-newline {
- display: block;
+ display: block;
}
/* Bit of whitespace to indent it */
.content .method .where::before,
.content .fn .where::before,
.content .where.fmt-newline::before {
- content: ' ';
+ content: ' ';
}
.content .methods > div { margin-left: 40px; }
}
#help > div {
flex: 0 0 auto;
- background: #e9e9e9;
box-shadow: 0 0 6px rgba(0,0,0,.2);
width: 550px;
height: 330px;
- border: 1px solid #bfbfbf;
+ border: 1px solid;
}
#help dt {
float: left;
border-radius: 4px;
- border: 1px solid #bfbfbf;
- background: #fff;
+ border: 1px solid;
width: 23px;
text-align: center;
clear: left;
.since {
font-weight: normal;
font-size: initial;
- color: grey;
position: absolute;
right: 0;
top: 0;
padding-right: 0px;
}
-.line-numbers :target { background-color: transparent; }
-
-/* Code highlighting */
-pre.rust .kw { color: #8959A8; }
-pre.rust .kw-2, pre.rust .prelude-ty { color: #4271AE; }
-pre.rust .number, pre.rust .string { color: #718C00; }
-pre.rust .self, pre.rust .bool-val, pre.rust .prelude-val,
-pre.rust .attribute, pre.rust .attribute .ident { color: #C82829; }
-pre.rust .macro, pre.rust .macro-nonterminal { color: #3E999F; }
-pre.rust .lifetime { color: #B76514; }
pre.rust .question-mark {
- color: #ff9011;
font-weight: bold;
}
pre.rust { position: relative; }
a.test-arrow {
- background-color: rgba(78, 139, 202, 0.2);
display: inline-block;
position: absolute;
padding: 5px 10px 5px 10px;
right: 5px;
}
a.test-arrow:hover{
- background-color: #4e8bca;
text-decoration: none;
}
text-align: center;
}
-.toggle-label {
- color: #999;
-}
-
.ghost {
display: none;
}
}
:target > code {
- background: #FDFFD3;
- opacity: 1;
+ opacity: 1;
}
/* Media Queries */
nav.sub, .content .out-of-band, .collapse-toggle {
display: none;
}
-}
+}
\ No newline at end of file
/* General structure and fonts */
body {
- background-color: white;
- color: black;
+ background-color: white;
+ color: black;
}
h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {
- color: black;
+ color: black;
}
h1.fqn {
- border-bottom-color: #D5D5D5;
+ border-bottom-color: #D5D5D5;
}
h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {
- border-bottom-color: #DDDDDD;
+ border-bottom-color: #DDDDDD;
}
.in-band {
- background-color: white;
+ background-color: white;
}
.docblock code, .docblock-short code {
- background-color: #F5F5F5;
+ background-color: #F5F5F5;
}
pre {
- background-color: #F5F5F5;
+ background-color: #F5F5F5;
+}
+
+.sidebar {
+ background-color: #F1F1F1;
+}
+
+.sidebar .current {
+ background-color: #fff;
}
.sidebar {
}
.sidebar .location {
- border-color: #000;
- background-color: #fff;
- color: #333;
+ border-color: #000;
+ background-color: #fff;
+ color: #333;
}
.block a:hover {
- background: #F5F5F5;
+ background: #F5F5F5;
}
.line-numbers span { color: #c67e2d; }
.line-numbers .line-highlighted {
- background-color: #f6fdb0 !important;
+ background-color: #f6fdb0 !important;
}
:target { background: #FDFFD3; }
.content .highlighted {
- color: #000 !important;
- background-color: #ccc;
+ color: #000 !important;
+ background-color: #ccc;
}
.content .highlighted a, .content .highlighted span { color: #000 !important; }
.content .highlighted.trait { background-color: #fece7e; }
.content .highlighted.type { background-color: #c6afb3; }
.docblock h1, .docblock h2, .docblock h3, .docblock h4, .docblock h5 {
- border-bottom-color: #DDD;
+ border-bottom-color: #DDD;
}
.docblock table {
- border-color: #ddd;
+ border-color: #ddd;
}
.docblock table td {
- border-top-color: #ddd;
- border-bottom-color: #ddd;
+ border-top-color: #ddd;
+ border-bottom-color: #ddd;
}
.docblock table th {
- border-top-color: #ddd;
- border-bottom-color: #ddd;
+ border-top-color: #ddd;
+ border-bottom-color: #ddd;
}
.content span.primitive, .content a.primitive, .block a.current.primitive { color: #39a7bf; }
pre.rust .doccomment { color: #4D4D4C; }
nav {
- border-bottom-color: #e0e0e0;
+ border-bottom-color: #e0e0e0;
}
nav.main .current {
- border-top-color: #000;
- border-bottom-color: #000;
+ border-top-color: #000;
+ border-bottom-color: #000;
}
nav.main .separator {
- border: 1px solid #000;
+ border: 1px solid #000;
}
a {
- color: #000;
+ color: #000;
}
.docblock a, .docblock-short a, .stability a {
- color: #3873AD;
+ color: #3873AD;
}
a.test-arrow {
- color: #f5f5f5;
+ color: #f5f5f5;
}
.content span.trait, .content a.trait, .block a.current.trait { color: #7c5af3; }
.search-input {
- color: #555;
- box-shadow: 0 0 0 1px #e0e0e0, 0 0 0 2px transparent;
- background-color: white;
+ color: #555;
+ box-shadow: 0 0 0 1px #e0e0e0, 0 0 0 2px transparent;
+ background-color: white;
}
.stab.unstable { background: #FFF5D6; border-color: #FFC600; }
.stab.deprecated { background: #F3DFFF; border-color: #7F0087; }
+
+#help > div {
+ background: #e9e9e9;
+ border-color: #bfbfbf;;
+}
+
+#help dt {
+ border-color: #bfbfbf;
+ background: #fff;
+}
+
+.since {
+ color: grey;
+}
+
+.line-numbers :target { background-color: transparent; }
+
+/* Code highlighting */
+pre.rust .kw { color: #8959A8; }
+pre.rust .kw-2, pre.rust .prelude-ty { color: #4271AE; }
+pre.rust .number, pre.rust .string { color: #718C00; }
+pre.rust .self, pre.rust .bool-val, pre.rust .prelude-val,
+pre.rust .attribute, pre.rust .attribute .ident { color: #C82829; }
+pre.rust .macro, pre.rust .macro-nonterminal { color: #3E999F; }
+pre.rust .lifetime { color: #B76514; }
+pre.rust .question-mark {
+ color: #ff9011;
+}
+
+a.test-arrow {
+ background-color: rgba(78, 139, 202, 0.2);
+}
+
+a.test-arrow:hover{
+ background-color: #4e8bca;
+}
+
+.toggle-label {
+ color: #999;
+}
+
+:target > code {
+ background: #FDFFD3;
+}
\ No newline at end of file
use syntax::abi;
use syntax::ast;
use syntax::attr;
+use syntax::tokenstream::TokenStream;
use syntax_pos::Span;
use rustc::hir::map as hir_map;
}
let imported_from = self.cx.sess().cstore.original_crate_name(def_id.krate);
let def = match self.cx.sess().cstore.load_macro(def_id, self.cx.sess()) {
- LoadedMacro::MacroRules(macro_rules) => macro_rules,
+ LoadedMacro::MacroDef(macro_def) => macro_def,
// FIXME(jseyfried): document proc macro reexports
LoadedMacro::ProcMacro(..) => continue,
};
- // FIXME(jseyfried) merge with `self.visit_macro()`
- let tts = def.stream().trees().collect::<Vec<_>>();
- let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
+ let matchers = if let ast::ItemKind::MacroDef(ref tokens) = def.node {
+ let tts: Vec<_> = TokenStream::from(tokens.clone()).into_trees().collect();
+ tts.chunks(4).map(|arm| arm[0].span()).collect()
+ } else {
+ unreachable!()
+ };
om.macros.push(Macro {
def_id: def_id,
attrs: def.attrs.clone().into(),
/// byte was found too early in the slice provided or one wasn't found at all.
#[derive(Clone, PartialEq, Eq, Debug)]
#[stable(feature = "cstr_from_bytes", since = "1.10.0")]
-pub struct FromBytesWithNulError { _a: () }
+pub struct FromBytesWithNulError {
+ kind: FromBytesWithNulErrorKind,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+enum FromBytesWithNulErrorKind {
+ InteriorNul(usize),
+ NotNulTerminated,
+}
+
+impl FromBytesWithNulError {
+ fn interior_nul(pos: usize) -> FromBytesWithNulError {
+ FromBytesWithNulError {
+ kind: FromBytesWithNulErrorKind::InteriorNul(pos),
+ }
+ }
+ fn not_nul_terminated() -> FromBytesWithNulError {
+ FromBytesWithNulError {
+ kind: FromBytesWithNulErrorKind::NotNulTerminated,
+ }
+ }
+}
/// An error returned from `CString::into_string` to indicate that a UTF-8 error
/// was encountered during the conversion.
#[stable(feature = "frombyteswithnulerror_impls", since = "1.17.0")]
impl Error for FromBytesWithNulError {
fn description(&self) -> &str {
- "data provided is not null terminated or contains an interior nul byte"
+ match self.kind {
+ FromBytesWithNulErrorKind::InteriorNul(..) =>
+ "data provided contains an interior nul byte",
+ FromBytesWithNulErrorKind::NotNulTerminated =>
+ "data provided is not nul terminated",
+ }
}
}
#[stable(feature = "frombyteswithnulerror_impls", since = "1.17.0")]
impl fmt::Display for FromBytesWithNulError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.description().fmt(f)
+ f.write_str(self.description())?;
+ if let FromBytesWithNulErrorKind::InteriorNul(pos) = self.kind {
+ write!(f, " at byte pos {}", pos)?;
+ }
+ Ok(())
}
}
#[stable(feature = "cstr_from_bytes", since = "1.10.0")]
pub fn from_bytes_with_nul(bytes: &[u8])
-> Result<&CStr, FromBytesWithNulError> {
- if bytes.is_empty() || memchr::memchr(0, &bytes) != Some(bytes.len() - 1) {
- Err(FromBytesWithNulError { _a: () })
+ let nul_pos = memchr::memchr(0, bytes);
+ if let Some(nul_pos) = nul_pos {
+ if nul_pos + 1 != bytes.len() {
+ return Err(FromBytesWithNulError::interior_nul(nul_pos));
+ }
+ Ok(unsafe { CStr::from_bytes_with_nul_unchecked(bytes) })
} else {
- Ok(unsafe { Self::from_bytes_with_nul_unchecked(bytes) })
+ Err(FromBytesWithNulError::not_nul_terminated())
}
}
self.inner.reserve_exact(additional)
}
+ /// Shrinks the capacity of the `OsString` to match its length.
+ #[unstable(feature = "osstring_shrink_to_fit", issue = "40421")]
+ pub fn shrink_to_fit(&mut self) {
+ self.inner.shrink_to_fit()
+ }
+
/// Converts this `OsString` into a boxed `OsStr`.
#[unstable(feature = "into_boxed_os_str", issue = "0")]
pub fn into_boxed_os_str(self) -> Box<OsStr> {
self.inner.reserve_exact(additional)
}
+ #[inline]
+ pub fn shrink_to_fit(&mut self) {
+ self.inner.shrink_to_fit()
+ }
+
pub fn as_slice(&self) -> &Slice {
unsafe { mem::transmute(&*self.inner) }
}
self.inner.reserve_exact(additional)
}
+ #[inline]
+ pub fn shrink_to_fit(&mut self) {
+ self.inner.shrink_to_fit()
+ }
+
pub fn as_slice(&self) -> &Slice {
unsafe { mem::transmute(&*self.inner) }
}
self.inner.reserve_exact(additional)
}
+ pub fn shrink_to_fit(&mut self) {
+ self.inner.shrink_to_fit()
+ }
+
#[inline]
pub fn into_box(self) -> Box<Slice> {
unsafe { mem::transmute(self.inner.into_box()) }
msg: *const libc::c_char,
errnum: libc::c_int);
enum backtrace_state {}
-#[link(name = "backtrace", kind = "static")]
-#[cfg(all(not(test), not(cargobuild)))]
-extern {}
extern {
fn backtrace_create_state(filename: *const libc::c_char,
self.bytes.reserve_exact(additional)
}
+ #[inline]
+ pub fn shrink_to_fit(&mut self) {
+ self.bytes.shrink_to_fit()
+ }
+
/// Returns the number of bytes that this string buffer can hold without reallocating.
#[inline]
pub fn capacity(&self) -> usize {
pub module: Mod,
pub attrs: Vec<Attribute>,
pub span: Span,
- pub exported_macros: Vec<MacroDef>,
}
/// A spanned compile-time attribute list item.
Option<TraitRef>, // (optional) trait this impl implements
P<Ty>, // self
Vec<ImplItem>),
- /// A macro invocation (which includes macro definition).
+ /// A macro invocation.
///
/// E.g. `macro_rules! foo { .. }` or `foo!(..)`
Mac(Mac),
+
+ /// A macro definition.
+ MacroDef(ThinTokenStream),
}
impl ItemKind {
ItemKind::Union(..) => "union",
ItemKind::Trait(..) => "trait",
ItemKind::Mac(..) |
+ ItemKind::MacroDef(..) |
ItemKind::Impl(..) |
ItemKind::DefaultImpl(..) => "item"
}
}
}
-/// A macro definition, in this crate or imported from another.
-///
-/// Not parsed directly, but created on macro import or `macro_rules!` expansion.
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
-pub struct MacroDef {
- pub ident: Ident,
- pub attrs: Vec<Attribute>,
- pub id: NodeId,
- pub span: Span,
- pub body: ThinTokenStream,
-}
-
-impl MacroDef {
- pub fn stream(&self) -> TokenStream {
- self.body.clone().into()
- }
-}
-
#[cfg(test)]
mod tests {
use serialize;
use codemap::{self, CodeMap, ExpnInfo, Spanned, respan};
use syntax_pos::{Span, ExpnId, NO_EXPANSION};
use errors::{DiagnosticBuilder, FatalError};
-use ext::expand::{self, Expansion};
+use ext::expand::{self, Expansion, Invocation};
use ext::hygiene::Mark;
use fold::{self, Folder};
use parse::{self, parser, DirectoryOwnership};
fn is_whitelisted_legacy_custom_derive(&self, name: Name) -> bool;
fn visit_expansion(&mut self, mark: Mark, expansion: &Expansion, derives: &[Mark]);
- fn add_ext(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>);
- fn add_expansions_at_stmt(&mut self, id: ast::NodeId, macros: Vec<Mark>);
+ fn add_builtin(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>);
fn resolve_imports(&mut self);
// Resolves attribute and derive legacy macros from `#![plugin(..)]`.
fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec<Attribute>) -> Option<Attribute>;
- fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind,
- force: bool) -> Result<Rc<SyntaxExtension>, Determinacy>;
+ fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
+ -> Result<Option<Rc<SyntaxExtension>>, Determinacy>;
+ fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
+ -> Result<Rc<SyntaxExtension>, Determinacy>;
}
#[derive(Copy, Clone, Debug)]
fn is_whitelisted_legacy_custom_derive(&self, _name: Name) -> bool { false }
fn visit_expansion(&mut self, _invoc: Mark, _expansion: &Expansion, _derives: &[Mark]) {}
- fn add_ext(&mut self, _ident: ast::Ident, _ext: Rc<SyntaxExtension>) {}
- fn add_expansions_at_stmt(&mut self, _id: ast::NodeId, _macros: Vec<Mark>) {}
+ fn add_builtin(&mut self, _ident: ast::Ident, _ext: Rc<SyntaxExtension>) {}
fn resolve_imports(&mut self) {}
fn find_legacy_attr_invoc(&mut self, _attrs: &mut Vec<Attribute>) -> Option<Attribute> { None }
+ fn resolve_invoc(&mut self, _invoc: &mut Invocation, _scope: Mark, _force: bool)
+ -> Result<Option<Rc<SyntaxExtension>>, Determinacy> {
+ Err(Determinacy::Determined)
+ }
fn resolve_macro(&mut self, _scope: Mark, _path: &ast::Path, _kind: MacroKind,
_force: bool) -> Result<Rc<SyntaxExtension>, Determinacy> {
Err(Determinacy::Determined)
pub struct Invocation {
pub kind: InvocationKind,
expansion_kind: ExpansionKind,
- expansion_data: ExpansionData,
+ pub expansion_data: ExpansionData,
}
pub enum InvocationKind {
let scope =
if self.monotonic { invoc.expansion_data.mark } else { orig_expansion_data.mark };
- let ext = match self.resolve_invoc(&mut invoc, scope, force) {
+ let ext = match self.cx.resolver.resolve_invoc(&mut invoc, scope, force) {
Ok(ext) => Some(ext),
Err(Determinacy::Determined) => None,
Err(Determinacy::Undetermined) => {
result
}
- fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
- -> Result<Option<Rc<SyntaxExtension>>, Determinacy> {
- let (attr, traits, item) = match invoc.kind {
- InvocationKind::Bang { ref mac, .. } => {
- return self.cx.resolver.resolve_macro(scope, &mac.node.path,
- MacroKind::Bang, force).map(Some);
- }
- InvocationKind::Attr { attr: None, .. } => return Ok(None),
- InvocationKind::Derive { name, span, .. } => {
- let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
- return self.cx.resolver.resolve_macro(scope, &path,
- MacroKind::Derive, force).map(Some)
- }
- InvocationKind::Attr { ref mut attr, ref traits, ref mut item } => (attr, traits, item),
- };
-
- let (attr_name, path) = {
- let attr = attr.as_ref().unwrap();
- (attr.name(), ast::Path::from_ident(attr.span, Ident::with_empty_ctxt(attr.name())))
- };
-
- let mut determined = true;
- match self.cx.resolver.resolve_macro(scope, &path, MacroKind::Attr, force) {
- Ok(ext) => return Ok(Some(ext)),
- Err(Determinacy::Undetermined) => determined = false,
- Err(Determinacy::Determined) if force => return Err(Determinacy::Determined),
- _ => {}
- }
-
- for &(name, span) in traits {
- let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
- match self.cx.resolver.resolve_macro(scope, &path, MacroKind::Derive, force) {
- Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext {
- if inert_attrs.contains(&attr_name) {
- // FIXME(jseyfried) Avoid `mem::replace` here.
- let dummy_item = placeholder(ExpansionKind::Items, ast::DUMMY_NODE_ID)
- .make_items().pop().unwrap();
- *item = mem::replace(item, Annotatable::Item(dummy_item))
- .map_attrs(|mut attrs| {
- let inert_attr = attr.take().unwrap();
- attr::mark_known(&inert_attr);
- if self.cx.ecfg.proc_macro_enabled() {
- *attr = find_attr_invoc(&mut attrs);
- }
- attrs.push(inert_attr);
- attrs
- });
- }
- return Err(Determinacy::Undetermined);
- },
- Err(Determinacy::Undetermined) => determined = false,
- Err(Determinacy::Determined) => {}
- }
- }
-
- Err(if determined { Determinacy::Determined } else { Determinacy::Undetermined })
- }
-
fn expand_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) -> Expansion {
match invoc.kind {
InvocationKind::Bang { .. } => self.expand_bang_invoc(invoc, ext),
let attr_toks = stream_for_attr_args(&attr, &self.cx.parse_sess);
let item_toks = stream_for_item(&item, &self.cx.parse_sess);
+ let span = Span {
+ expn_id: self.cx.codemap().record_expansion(ExpnInfo {
+ call_site: attr.span,
+ callee: NameAndSpan {
+ format: MacroAttribute(name),
+ span: None,
+ allow_internal_unstable: false,
+ },
+ }),
+ ..attr.span
+ };
+
let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
- self.parse_expansion(tok_result, kind, name, attr.span)
+ self.parse_expansion(tok_result, kind, name, span)
}
SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => {
self.cx.span_err(attr.span, &format!("`{}` is a derive mode", name));
let extname = path.segments.last().unwrap().identifier.name;
let ident = ident.unwrap_or(keywords::Invalid.ident());
- let marked_tts = mark_tts(mac.node.stream(), mark);
+ let marked_tts =
+ noop_fold_tts(mac.node.stream(), &mut Marker { mark: mark, expn_id: None });
let opt_expanded = match *ext {
NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
if ident.name != keywords::Invalid.name() {
}
}
-fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
+pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
for i in 0 .. attrs.len() {
if !attr::is_known(&attrs[i]) && !is_builtin_attr(&attrs[i]) {
return Some(attrs.remove(i));
match item.node {
ast::ItemKind::Mac(..) => {
self.check_attributes(&item.attrs);
- let is_macro_def = if let ItemKind::Mac(ref mac) = item.node {
- mac.node.path.segments[0].identifier.name == "macro_rules"
- } else {
- unreachable!()
- };
-
- item.and_then(|mut item| match item.node {
- ItemKind::Mac(_) if is_macro_def => {
- item.id = Mark::fresh().as_placeholder_id();
- SmallVector::one(P(item))
- }
+ item.and_then(|item| match item.node {
ItemKind::Mac(mac) => {
self.collect(ExpansionKind::Items, InvocationKind::Bang {
mac: mac,
}
fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind {
- noop_fold_item_kind(self.cfg.configure_item_kind(item), self)
+ match item {
+ ast::ItemKind::MacroDef(..) => item,
+ _ => noop_fold_item_kind(self.cfg.configure_item_kind(item), self),
+ }
}
fn new_id(&mut self, id: ast::NodeId) -> ast::NodeId {
span
}
}
-
-// apply a given mark to the given token trees. Used prior to expansion of a macro.
-pub fn mark_tts(tts: TokenStream, m: Mark) -> TokenStream {
- noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
-}
}
/// A mark is a unique id associated with a macro expansion.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default, RustcEncodable, RustcDecodable)]
pub struct Mark(u32);
impl Mark {
})
})
}
-
- /// If `ident` is macro expanded, return the source ident from the macro definition
- /// and the mark of the expansion that created the macro definition.
- pub fn source(self) -> (Self /* source context */, Mark /* source macro */) {
- let macro_def_ctxt = self.data().prev_ctxt.data();
- (macro_def_ctxt.prev_ctxt, macro_def_ctxt.outer_mark)
- }
}
impl fmt::Debug for SyntaxContext {
use util::small_vector::SmallVector;
use std::collections::HashMap;
-use std::mem;
pub fn placeholder(kind: ExpansionKind, id: ast::NodeId) -> Expansion {
fn mac_placeholder() -> ast::Mac {
fn fold_block(&mut self, block: P<ast::Block>) -> P<ast::Block> {
noop_fold_block(block, self).map(|mut block| {
- let mut macros = Vec::new();
let mut remaining_stmts = block.stmts.len();
block.stmts = block.stmts.move_flat_map(|mut stmt| {
remaining_stmts -= 1;
- // `macro_rules!` macro definition
- if let ast::StmtKind::Item(ref item) = stmt.node {
- if let ast::ItemKind::Mac(_) = item.node {
- macros.push(Mark::from_placeholder_id(item.id));
- return None;
- }
- }
-
match stmt.node {
// Avoid wasting a node id on a trailing expression statement,
// which shares a HIR node with the expression itself.
_ => {}
}
- if self.monotonic && !macros.is_empty() {
- let macros = mem::replace(&mut macros, Vec::new());
- self.cx.resolver.add_expansions_at_stmt(stmt.id, macros);
- }
-
Some(stmt)
});
// Holy self-referential!
/// Converts a `macro_rules!` invocation into a syntax extension.
-pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
+pub fn compile(sess: &ParseSess, def: &ast::Item) -> SyntaxExtension {
let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
];
// Parse the macro_rules! invocation
- let argument_map = match parse(sess, def.body.clone().into(), &argument_gram, None) {
+ let body = match def.node {
+ ast::ItemKind::MacroDef(ref body) => body.clone().into(),
+ _ => unreachable!(),
+ };
+ let argument_map = match parse(sess, body, &argument_gram, None) {
Success(m) => m,
Failure(sp, tok) => {
let s = parse_failure_msg(tok);
items.move_flat_map(|item| folder.fold_trait_item(item)),
),
ItemKind::Mac(m) => ItemKind::Mac(folder.fold_mac(m)),
+ ItemKind::MacroDef(tts) => ItemKind::MacroDef(folder.fold_tts(tts.into()).into()),
}
}
}
}
-pub fn noop_fold_crate<T: Folder>(Crate {module, attrs, mut exported_macros, span}: Crate,
+pub fn noop_fold_crate<T: Folder>(Crate {module, attrs, span}: Crate,
folder: &mut T) -> Crate {
let mut items = folder.fold_item(P(ast::Item {
ident: keywords::Invalid.ident(),
}, vec![], span)
};
- for def in &mut exported_macros {
- def.id = folder.new_id(def.id);
- }
-
Crate {
module: module,
attrs: attrs,
- exported_macros: exported_macros,
span: span,
}
}
matches_codepattern,
"matches_codepattern",
pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
- "zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string());
+ "macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string());
}
}
self.expected_tokens.clear();
}
- pub fn look_ahead<R, F>(&mut self, dist: usize, f: F) -> R where
+ pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
F: FnOnce(&token::Token) -> R,
{
if dist == 0 {
})
}
- fn is_union_item(&mut self) -> bool {
+ fn is_union_item(&self) -> bool {
self.token.is_keyword(keywords::Union) &&
self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword())
}
+ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility)
+ -> PResult<'a, Option<P<Item>>> {
+ let lo = self.span.lo;
+ match self.token {
+ token::Ident(ident) if ident.name == "macro_rules" => {
+ if self.look_ahead(1, |t| *t == token::Not) {
+ let prev_span = self.prev_span;
+ self.complain_if_pub_macro(vis, prev_span);
+ self.bump();
+ self.bump();
+ }
+ }
+ _ => return Ok(None),
+ };
+
+ let id = self.parse_ident()?;
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ if delim != token::Brace {
+ if !self.eat(&token::Semi) {
+ let msg = "macros that expand to items must either be surrounded with braces \
+ or followed by a semicolon";
+ self.span_err(self.prev_span, msg);
+ }
+ }
+
+ let hi = self.prev_span.hi;
+ let kind = ItemKind::MacroDef(tts);
+ Ok(Some(self.mk_item(lo, hi, id, kind, Visibility::Inherited, attrs.to_owned())))
+ }
+
fn parse_stmt_without_recovery(&mut self,
macro_legacy_warnings: bool)
-> PResult<'a, Option<Stmt>> {
node: StmtKind::Local(self.parse_local(attrs.into())?),
span: mk_sp(lo, self.prev_span.hi),
}
+ } else if let Some(macro_def) = self.eat_macro_def(&attrs, &Visibility::Inherited)? {
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Item(macro_def),
+ span: mk_sp(lo, self.prev_span.hi),
+ }
// Starts like a simple path, but not a union item.
} else if self.token.is_path_start() &&
!self.token.is_qpath_start() &&
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
+ if let Some(macro_def) = self.eat_macro_def(&attrs, &visibility)? {
+ return Ok(Some(macro_def));
+ }
+
self.parse_macro_use_or_failure(attrs,macros_allowed,attributes_allowed,lo,visibility)
}
attrs: self.parse_inner_attributes()?,
module: self.parse_mod_items(&token::Eof, lo)?,
span: mk_sp(lo, self.span.lo),
- exported_macros: Vec::new(),
})
}
self.bclose(item.span)?;
}
ast::ItemKind::Mac(codemap::Spanned { ref node, .. }) => {
- self.print_visibility(&item.vis)?;
self.print_path(&node.path, false, 0, false)?;
word(&mut self.s, "! ")?;
self.print_ident(item.ident)?;
word(&mut self.s, ";")?;
self.end()?;
}
+ ast::ItemKind::MacroDef(ref tts) => {
+ word(&mut self.s, "macro_rules! ")?;
+ self.print_ident(item.ident)?;
+ self.cbox(INDENT_UNIT)?;
+ self.popen()?;
+ self.print_tts(tts.clone().into())?;
+ self.pclose()?;
+ word(&mut self.s, ";")?;
+ self.end()?;
+ }
}
self.ann.post(self, NodeItem(item))
}
fn visit_attribute(&mut self, _attr: &Attribute) {
self.count += 1;
}
- fn visit_macro_def(&mut self, macro_def: &MacroDef) {
- self.count += 1;
- walk_macro_def(self, macro_def)
- }
-
}
walk_assoc_type_binding(self, type_binding)
}
fn visit_attribute(&mut self, _attr: &'ast Attribute) {}
- fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) {
- walk_macro_def(self, macro_def)
- }
fn visit_vis(&mut self, vis: &'ast Visibility) {
walk_vis(self, vis)
}
pub fn walk_crate<'a, V: Visitor<'a>>(visitor: &mut V, krate: &'a Crate) {
visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID);
walk_list!(visitor, visit_attribute, &krate.attrs);
- walk_list!(visitor, visit_macro_def, &krate.exported_macros);
-}
-
-pub fn walk_macro_def<'a, V: Visitor<'a>>(visitor: &mut V, macro_def: &'a MacroDef) {
- visitor.visit_ident(macro_def.span, macro_def.ident);
- walk_list!(visitor, visit_attribute, ¯o_def.attrs);
}
pub fn walk_mod<'a, V: Visitor<'a>>(visitor: &mut V, module: &'a Mod) {
walk_list!(visitor, visit_trait_item, methods);
}
ItemKind::Mac(ref mac) => visitor.visit_mac(mac),
+ ItemKind::MacroDef(..) => {},
}
walk_list!(visitor, visit_attribute, &item.attrs);
}
pub fn register_builtin_derives(resolver: &mut Resolver) {
$(
- resolver.add_ext(
+ resolver.add_builtin(
ast::Ident::with_empty_ctxt(Symbol::intern($name)),
Rc::new(SyntaxExtension::BuiltinDerive($func))
);
deriving::register_builtin_derives(resolver);
let mut register = |name, ext| {
- resolver.add_ext(ast::Ident::with_empty_ctxt(name), Rc::new(ext));
+ resolver.add_builtin(ast::Ident::with_empty_ctxt(name), Rc::new(ext));
};
macro_rules! register {
krate.module.items.push(mk_registrar(&mut cx, &derives, &attr_macros, &bang_macros));
- if krate.exported_macros.len() > 0 {
- handler.err("cannot export macro_rules! macros from a `proc-macro` \
- crate type currently");
- }
-
- return krate
+ krate
}
fn is_proc_macro_attr(attr: &ast::Attribute) -> bool {
impl<'a> Visitor<'a> for CollectProcMacros<'a> {
fn visit_item(&mut self, item: &'a ast::Item) {
+ if let ast::ItemKind::MacroDef(..) = item.node {
+ if self.is_proc_macro_crate &&
+ item.attrs.iter().any(|attr| attr.name() == "macro_export") {
+ let msg =
+ "cannot export macro_rules! macros from a `proc-macro` crate type currently";
+ self.handler.span_err(item.span, msg);
+ }
+ }
+
// First up, make sure we're checking a bare function. If we're not then
// we're just not interested in this item.
//
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn prove_static<T: 'static + ?Sized>(_: &'static T) {}
+
+fn lifetime_transmute_slice<'a, T: ?Sized>(x: &'a T, y: &T) -> &'a T {
+ let mut out = [x];
+ //~^ ERROR cannot infer an appropriate lifetime due to conflicting requirements
+ {
+ let slice: &mut [_] = &mut out;
+ slice[0] = y;
+ }
+ out[0]
+}
+
+struct Struct<T, U: ?Sized> {
+ head: T,
+ _tail: U
+}
+
+fn lifetime_transmute_struct<'a, T: ?Sized>(x: &'a T, y: &T) -> &'a T {
+ let mut out = Struct { head: x, _tail: [()] };
+ //~^ ERROR cannot infer an appropriate lifetime due to conflicting requirements
+ {
+ let dst: &mut Struct<_, [()]> = &mut out;
+ dst.head = y;
+ }
+ out.head
+}
+
+fn main() {
+ prove_static(lifetime_transmute_slice("", &String::from("foo")));
+ prove_static(lifetime_transmute_struct("", &String::from("bar")));
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn save_ref<'a>(refr: &'a i32, to: &mut [&'a i32]) {
+ for val in &mut *to {
+ *val = refr;
+ }
+}
+
+fn main() {
+ let ref init = 0i32;
+ let ref mut refr = 1i32;
+
+ let mut out = [init];
+
+ save_ref(&*refr, &mut out);
+
+ // This shouldn't be allowed as `refr` is borrowed
+ *refr = 3; //~ ERROR cannot assign to `*refr` because it is borrowed
+
+ // Prints 3?!
+ println!("{:?}", out[0]);
+}
+++ /dev/null
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Exercise the unused_unsafe attribute in some positive and negative cases
-
-#![allow(dead_code)]
-#![deny(unused_unsafe)]
-
-
-mod foo {
- extern {
- pub fn bar();
- }
-}
-
-fn callback<T, F>(_f: F) -> T where F: FnOnce() -> T { panic!() }
-unsafe fn unsf() {}
-
-fn bad1() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
-fn bad2() { unsafe { bad1() } } //~ ERROR: unnecessary `unsafe` block
-unsafe fn bad3() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
-fn bad4() { unsafe { callback(||{}) } } //~ ERROR: unnecessary `unsafe` block
-unsafe fn bad5() { unsafe { unsf() } } //~ ERROR: unnecessary `unsafe` block
-fn bad6() {
- unsafe { // don't put the warning here
- unsafe { //~ ERROR: unnecessary `unsafe` block
- unsf()
- }
- }
-}
-unsafe fn bad7() {
- unsafe { //~ ERROR: unnecessary `unsafe` block
- unsafe { //~ ERROR: unnecessary `unsafe` block
- unsf()
- }
- }
-}
-
-unsafe fn good0() { unsf() }
-fn good1() { unsafe { unsf() } }
-fn good2() {
- /* bug uncovered when implementing warning about unused unsafe blocks. Be
- sure that when purity is inherited that the source of the unsafe-ness
- is tracked correctly */
- unsafe {
- unsafe fn what() -> Vec<String> { panic!() }
-
- callback(|| {
- what();
- });
- }
-}
-
-unsafe fn good3() { foo::bar() }
-fn good4() { unsafe { foo::bar() } }
-
-#[allow(unused_unsafe)] fn allowed() { unsafe {} }
-
-fn main() {}
// which fails to type check.
ss
- //~^ ERROR lifetime bound not satisfied
+ //~^ ERROR cannot infer
//~| ERROR cannot infer
}
// `Box<SomeTrait>` defaults to a `'static` bound, so this return
// is illegal.
- ss.r //~ ERROR lifetime bound not satisfied
+ ss.r //~ ERROR cannot infer an appropriate lifetime
}
fn store(ss: &mut SomeStruct, b: Box<SomeTrait>) {
fn store1<'b>(ss: &mut SomeStruct, b: Box<SomeTrait+'b>) {
// Here we override the lifetimes explicitly, and so naturally we get an error.
- ss.r = b; //~ ERROR lifetime bound not satisfied
+ ss.r = b; //~ ERROR cannot infer an appropriate lifetime
}
fn main() {
fn make_object_bad<'a,'b,'c,A:SomeTrait+'a+'b>(v: A) -> Box<SomeTrait+'c> {
// A outlives 'a AND 'b...but not 'c.
- box v as Box<SomeTrait+'a> //~ ERROR lifetime bound not satisfied
+ box v as Box<SomeTrait+'a> //~ ERROR cannot infer an appropriate lifetime
}
fn main() {
fn static_proc(x: &isize) -> Box<FnMut()->(isize) + 'static> {
// This is illegal, because the region bound on `proc` is 'static.
- Box::new(move|| { *x }) //~ ERROR does not fulfill the required lifetime
+ Box::new(move|| { *x }) //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
fn foo3<'a,'b>(x: &'a mut Dummy) -> &'b mut Dummy {
// Without knowing 'a:'b, we can't coerce
- x //~ ERROR lifetime bound not satisfied
- //~^ ERROR cannot infer
+ x //~ ERROR cannot infer an appropriate lifetime
+ //~^ ERROR cannot infer an appropriate lifetime
}
struct Wrapper<T>(T);
-> Box<Get<&'min i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
where 'max : 'min
{
// Previously OK:
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
where 'max : 'min
{
// Previously OK, now an error as traits are invariant.
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
-> Box<Get<&'max i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
-> Box<Get<&'min i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
-> Box<Get<&'max i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
// === GDB TESTS ===================================================================================
-// gdb-command:print 'c_style_enum::SINGLE_VARIANT'
+// gdbg-command:print 'c_style_enum::SINGLE_VARIANT'
+// gdbr-command:print c_style_enum::SINGLE_VARIANT
// gdbg-check:$1 = TheOnlyVariant
// gdbr-check:$1 = c_style_enum::SingleVariant::TheOnlyVariant
-// gdb-command:print 'c_style_enum::AUTO_ONE'
+// gdbg-command:print 'c_style_enum::AUTO_ONE'
+// gdbr-command:print c_style_enum::AUTO_ONE
// gdbg-check:$2 = One
// gdbr-check:$2 = c_style_enum::AutoDiscriminant::One
-// gdb-command:print 'c_style_enum::AUTO_TWO'
+// gdbg-command:print 'c_style_enum::AUTO_TWO'
+// gdbr-command:print c_style_enum::AUTO_TWO
// gdbg-check:$3 = One
// gdbr-check:$3 = c_style_enum::AutoDiscriminant::One
-// gdb-command:print 'c_style_enum::AUTO_THREE'
+// gdbg-command:print 'c_style_enum::AUTO_THREE'
+// gdbr-command:print c_style_enum::AUTO_THREE
// gdbg-check:$4 = One
// gdbr-check:$4 = c_style_enum::AutoDiscriminant::One
-// gdb-command:print 'c_style_enum::MANUAL_ONE'
+// gdbg-command:print 'c_style_enum::MANUAL_ONE'
+// gdbr-command:print c_style_enum::MANUAL_ONE
// gdbg-check:$5 = OneHundred
// gdbr-check:$5 = c_style_enum::ManualDiscriminant::OneHundred
-// gdb-command:print 'c_style_enum::MANUAL_TWO'
+// gdbg-command:print 'c_style_enum::MANUAL_TWO'
+// gdbr-command:print c_style_enum::MANUAL_TWO
// gdbg-check:$6 = OneHundred
// gdbr-check:$6 = c_style_enum::ManualDiscriminant::OneHundred
-// gdb-command:print 'c_style_enum::MANUAL_THREE'
+// gdbg-command:print 'c_style_enum::MANUAL_THREE'
+// gdbr-command:print c_style_enum::MANUAL_THREE
// gdbg-check:$7 = OneHundred
// gdbr-check:$7 = c_style_enum::ManualDiscriminant::OneHundred
// Make sure functions have proper names
// gdb-command:info functions
-// gdb-check:[...]void[...]main([...]);
-// gdb-check:[...]void[...]some_function([...]);
-// gdb-check:[...]void[...]some_other_function([...]);
-// gdb-check:[...]void[...]zzz([...]);
+// gdbg-check:[...]void[...]main([...]);
+// gdbr-check:fn limited_debuginfo::main();
+// gdbg-check:[...]void[...]some_function([...]);
+// gdbr-check:fn limited_debuginfo::some_function();
+// gdbg-check:[...]void[...]some_other_function([...]);
+// gdbr-check:fn limited_debuginfo::some_other_function();
+// gdbg-check:[...]void[...]zzz([...]);
+// gdbr-check:fn limited_debuginfo::zzz();
// gdb-command:run
// === GDB TESTS ===================================================================================
-// there's no frame yet for gdb to reliably detect the language, set it explicitly
-// gdbr-command:set language rust
-
// gdbg-command:print 'simple_struct::NO_PADDING_16'
// gdbr-command:print simple_struct::NO_PADDING_16
// gdbg-check:$1 = {x = 1000, y = -1001}
// === GDB TESTS ===================================================================================
-// there's no frame yet for gdb to reliably detect the language, set it explicitly
-// gdbr-command:set language rust
-
// gdbg-command:print/d 'simple_tuple::NO_PADDING_8'
// gdbr-command:print simple_tuple::NO_PADDING_8
// gdbg-check:$1 = {__0 = -50, __1 = 50}
// }
//
// bb2: {
-// StorageLive(_6);
// _0 = ();
// StorageDead(_4);
// StorageDead(_1);
--- /dev/null
+-include ../tools.mk
+
+all:
+ $(RUSTC) foo.rs --emit dep-info
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// We're only emitting dep info, so we shouldn't be running static analysis to
+// figure out that this program is erroneous.
+fn main() {
+ let a: u8 = "a";
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::fmt;
+
#[repr(packed)]
-#[derive(Copy, Clone, PartialEq, Debug)]
+#[derive(Copy, Clone)]
struct Foo {
a: i8,
b: i16,
c: i8
}
+impl PartialEq for Foo {
+ fn eq(&self, other: &Foo) -> bool {
+ self.a == other.a && self.b == other.b && self.c == other.c
+ }
+}
+
+impl fmt::Debug for Foo {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let a = self.a;
+ let b = self.b;
+ let c = self.c;
+
+ f.debug_struct("Foo")
+ .field("a", &a)
+ .field("b", &b)
+ .field("c", &c)
+ .finish()
+ }
+}
+
#[link(name = "test", kind = "static")]
extern {
fn foo(f: Foo) -> Foo;
#![feature(no_core)]
#![no_core]
+macro_rules! foo /* 60#0 */(( $ x : ident ) => { y + $ x });
fn bar /* 62#0 */() { let x /* 59#2 */ = 1; y /* 61#4 */ + x /* 59#5 */ }
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// force-host
-
-#![feature(plugin_registrar, rustc_private)]
-#![feature(box_syntax)]
-
-#[macro_use] extern crate rustc;
-extern crate rustc_plugin;
-extern crate rustc_const_math;
-extern crate syntax;
-
-use rustc::mir::transform::{self, MirPass, MirSource};
-use rustc::mir::{Mir, Literal, Location};
-use rustc::mir::visit::MutVisitor;
-use rustc::ty::TyCtxt;
-use rustc::middle::const_val::ConstVal;
-use rustc_const_math::ConstInt;
-use rustc_plugin::Registry;
-
-struct Pass;
-
-impl transform::Pass for Pass {}
-
-impl<'tcx> MirPass<'tcx> for Pass {
- fn run_pass<'a>(&mut self, _: TyCtxt<'a, 'tcx, 'tcx>,
- _: MirSource, mir: &mut Mir<'tcx>) {
- Visitor.visit_mir(mir)
- }
-}
-
-struct Visitor;
-
-impl<'tcx> MutVisitor<'tcx> for Visitor {
- fn visit_literal(&mut self, literal: &mut Literal<'tcx>, _: Location) {
- if let Literal::Value { ref mut value } = *literal {
- if let ConstVal::Integral(ConstInt::I32(ref mut i @ 11)) = *value {
- *i = 42;
- }
- }
- }
-}
-
-#[plugin_registrar]
-pub fn plugin_registrar(reg: &mut Registry) {
- reg.register_mir_pass(box Pass);
-}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// aux-build:dummy_mir_pass.rs
-// ignore-stage1
-
-#![feature(plugin)]
-#![plugin(dummy_mir_pass)]
-
-fn math() -> i32 {
- 11
-}
-
-pub fn main() {
- assert_eq!(math(), 42);
-}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// pretty-expanded FIXME #23616
+
+use std::rc::Rc;
+
+fn lub_short<'a, T>(_: &[&'a T], _: &[&'a T]) {}
+
+// The two arguments are a subtype of their LUB, after coercion.
+fn long_and_short<'a, T>(xs: &[&'static T; 1], ys: &[&'a T; 1]) {
+ lub_short(xs, ys);
+}
+
+// The argument coerces to a subtype of the return type.
+fn long_to_short<'a, 'b, T>(xs: &'b [&'static T; 1]) -> &'b [&'a T] {
+ xs
+}
+
+// Rc<T> is covariant over T just like &T.
+fn long_to_short_rc<'a, T>(xs: Rc<[&'static T; 1]>) -> Rc<[&'a T]> {
+ xs
+}
+
+// LUB-coercion (if-else/match/array) coerces `xs: &'b [&'static T: N]`
+// to a subtype of the LUB of `xs` and `ys` (i.e. `&'b [&'a T]`),
+// regardless of the order they appear (in if-else/match/array).
+fn long_and_short_lub1<'a, 'b, T>(xs: &'b [&'static T; 1], ys: &'b [&'a T]) {
+ let _order1 = [xs, ys];
+ let _order2 = [ys, xs];
+}
+
+// LUB-coercion should also have the exact same effect when `&'b [&'a T; N]`
+// needs to be coerced, i.e. the resulting type is not &'b [&'static T], but
+// rather the `&'b [&'a T]` LUB.
+fn long_and_short_lub2<'a, 'b, T>(xs: &'b [&'static T], ys: &'b [&'a T; 1]) {
+ let _order1 = [xs, ys];
+ let _order2 = [ys, xs];
+}
+
+fn main() {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::fmt;
use std::mem;
#[repr(packed)]
-#[derive(Copy, Clone, PartialEq, Debug)]
+#[derive(Copy, Clone)]
struct Foo {
bar: u8,
baz: u64
}
+impl PartialEq for Foo {
+ fn eq(&self, other: &Foo) -> bool {
+ self.bar == other.bar && self.baz == other.baz
+ }
+}
+
+impl fmt::Debug for Foo {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let bar = self.bar;
+ let baz = self.baz;
+
+ f.debug_struct("Foo")
+ .field("bar", &bar)
+ .field("baz", &baz)
+ .finish()
+ }
+}
+
pub fn main() {
let foos = [Foo { bar: 1, baz: 2 }; 10];
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-fn main() {
- let (x,) = (vec![],);
-}
+++ /dev/null
-error[E0282]: type annotations needed
- --> $DIR/issue-38812-2.rs:12:17
- |
-12 | let (x,) = (vec![],);
- | ---- ^^^^^^ cannot infer type for `T`
- | |
- | consider giving a type to pattern
- |
- = note: this error originates in a macro outside of the current crate
-
-error: aborting due to previous error
-
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-fn main() {
- let x = vec![];
-}
+++ /dev/null
-error[E0282]: type annotations needed
- --> $DIR/issue-38812.rs:12:13
- |
-12 | let x = vec![];
- | - ^^^^^^ cannot infer type for `T`
- | |
- | consider giving `x` a type
- |
- = note: this error originates in a macro outside of the current crate
-
-error: aborting due to previous error
-
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main () {
+ {println!("{:?}", match { let foo = vec![1, 2]; foo.get(1) } { x => x });}
+}
--- /dev/null
+error: `foo` does not live long enough
+ --> $DIR/issue-40157.rs:12:64
+ |
+12 | {println!("{:?}", match { let foo = vec![1, 2]; foo.get(1) } { x => x });}
+ | ----------------------------------------------------------^-------------
+ | | | |
+ | | | `foo` dropped here while still borrowed
+ | | borrow occurs here
+ | borrowed value needs to live until here
+ |
+ = note: this error originates in a macro outside of the current crate
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Exercise the unused_unsafe attribute in some positive and negative cases
+
+#![allow(dead_code)]
+#![deny(unused_unsafe)]
+
+
+mod foo {
+ extern {
+ pub fn bar();
+ }
+}
+
+fn callback<T, F>(_f: F) -> T where F: FnOnce() -> T { panic!() }
+unsafe fn unsf() {}
+
+fn bad1() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
+fn bad2() { unsafe { bad1() } } //~ ERROR: unnecessary `unsafe` block
+unsafe fn bad3() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
+fn bad4() { unsafe { callback(||{}) } } //~ ERROR: unnecessary `unsafe` block
+unsafe fn bad5() { unsafe { unsf() } } //~ ERROR: unnecessary `unsafe` block
+fn bad6() {
+ unsafe { // don't put the warning here
+ unsafe { //~ ERROR: unnecessary `unsafe` block
+ unsf()
+ }
+ }
+}
+unsafe fn bad7() {
+ unsafe { //~ ERROR: unnecessary `unsafe` block
+ unsafe { //~ ERROR: unnecessary `unsafe` block
+ unsf()
+ }
+ }
+}
+
+unsafe fn good0() { unsf() }
+fn good1() { unsafe { unsf() } }
+fn good2() {
+ /* bug uncovered when implementing warning about unused unsafe blocks. Be
+ sure that when purity is inherited that the source of the unsafe-ness
+ is tracked correctly */
+ unsafe {
+ unsafe fn what() -> Vec<String> { panic!() }
+
+ callback(|| {
+ what();
+ });
+ }
+}
+
+unsafe fn good3() { foo::bar() }
+fn good4() { unsafe { foo::bar() } }
+
+#[allow(unused_unsafe)] fn allowed() { unsafe {} }
+
+fn main() {}
--- /dev/null
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:26:13
+ |
+26 | fn bad1() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^ unnecessary `unsafe` block
+ |
+note: lint level defined here
+ --> $DIR/lint-unused-unsafe.rs:14:9
+ |
+14 | #![deny(unused_unsafe)]
+ | ^^^^^^^^^^^^^
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:27:13
+ |
+27 | fn bad2() { unsafe { bad1() } } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^^^^^^^^^ unnecessary `unsafe` block
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:28:20
+ |
+28 | unsafe fn bad3() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^ unnecessary `unsafe` block
+ |
+note: because it's nested under this `unsafe` fn
+ --> $DIR/lint-unused-unsafe.rs:28:1
+ |
+28 | unsafe fn bad3() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:29:13
+ |
+29 | fn bad4() { unsafe { callback(||{}) } } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ unnecessary `unsafe` block
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:30:20
+ |
+30 | unsafe fn bad5() { unsafe { unsf() } } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^^^^^^^^^ unnecessary `unsafe` block
+ |
+note: because it's nested under this `unsafe` fn
+ --> $DIR/lint-unused-unsafe.rs:30:1
+ |
+30 | unsafe fn bad5() { unsafe { unsf() } } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:33:9
+ |
+33 | unsafe { //~ ERROR: unnecessary `unsafe` block
+ | _________^ starting here...
+34 | | unsf()
+35 | | }
+ | |_________^ ...ending here: unnecessary `unsafe` block
+ |
+note: because it's nested under this `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:32:5
+ |
+32 | unsafe { // don't put the warning here
+ | _____^ starting here...
+33 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+34 | | unsf()
+35 | | }
+36 | | }
+ | |_____^ ...ending here
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:39:5
+ |
+39 | unsafe { //~ ERROR: unnecessary `unsafe` block
+ | _____^ starting here...
+40 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+41 | | unsf()
+42 | | }
+43 | | }
+ | |_____^ ...ending here: unnecessary `unsafe` block
+ |
+note: because it's nested under this `unsafe` fn
+ --> $DIR/lint-unused-unsafe.rs:38:1
+ |
+38 | unsafe fn bad7() {
+ | _^ starting here...
+39 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+40 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+41 | | unsf()
+42 | | }
+43 | | }
+44 | | }
+ | |_^ ...ending here
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:40:9
+ |
+40 | unsafe { //~ ERROR: unnecessary `unsafe` block
+ | _________^ starting here...
+41 | | unsf()
+42 | | }
+ | |_________^ ...ending here: unnecessary `unsafe` block
+ |
+note: because it's nested under this `unsafe` fn
+ --> $DIR/lint-unused-unsafe.rs:38:1
+ |
+38 | unsafe fn bad7() {
+ | _^ starting here...
+39 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+40 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+41 | | unsf()
+42 | | }
+43 | | }
+44 | | }
+ | |_^ ...ending here
+
+error: aborting due to 8 previous errors
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+
+fn main() {
+ let tup = (1,);
+ println!("☃{}", tup[0]);
+}
+
--- /dev/null
+error: cannot index a value of type `({integer},)`
+ --> $DIR/suggestion-non-ascii.rs:14:21
+ |
+14 | println!("☃{}", tup[0]);
+ | ^^^^^^
+ |
+help: to access tuple elements, use tuple indexing syntax as shown
+ | println!("☃{}", tup.0);
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let (x,) = (vec![],);
+}
--- /dev/null
+error[E0282]: type annotations needed
+ --> $DIR/issue-38812-2.rs:12:17
+ |
+12 | let (x,) = (vec![],);
+ | ---- ^^^^^^ cannot infer type for `T`
+ | |
+ | consider giving a type to pattern
+ |
+ = note: this error originates in a macro outside of the current crate
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let x = vec![];
+}
--- /dev/null
+error[E0282]: type annotations needed
+ --> $DIR/issue-38812.rs:12:13
+ |
+12 | let x = vec![];
+ | - ^^^^^^ cannot infer type for `T`
+ | |
+ | consider giving `x` a type
+ |
+ = note: this error originates in a macro outside of the current crate
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Foo: Sized {
+ fn foo(self);
+}
+
+fn foo<'a,'b,T>(x: &'a T, y: &'b T)
+ where &'a T : Foo,
+ &'b T : Foo
+{
+ x.foo();
+ y.foo();
+}
+
+fn main() { }
--- /dev/null
+error[E0282]: type annotations needed
+ --> $DIR/issue-40294.rs:15:1
+ |
+15 | fn foo<'a,'b,T>(x: &'a T, y: &'b T)
+ | _^ starting here...
+16 | | where &'a T : Foo,
+17 | | &'b T : Foo
+18 | | {
+19 | | x.foo();
+20 | | y.foo();
+21 | | }
+ | |_^ ...ending here: cannot infer type for `&'a T`
+
+error: aborting due to previous error
+
static TARGETS: &'static [&'static str] = &[
"aarch64-apple-ios",
+ "aarch64-unknown-fuchsia",
"aarch64-linux-android",
"aarch64-unknown-linux-gnu",
"arm-linux-androideabi",
"x86_64-pc-windows-msvc",
"x86_64-rumprun-netbsd",
"x86_64-unknown-freebsd",
+ "x86_64-unknown-fuchsia",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
"x86_64-unknown-netbsd",
let mut manifest = BTreeMap::new();
manifest.insert("manifest-version".to_string(),
toml::Value::String(manifest_version));
- manifest.insert("date".to_string(), toml::Value::String(date));
+ manifest.insert("date".to_string(), toml::Value::String(date.clone()));
manifest.insert("pkg".to_string(), toml::encode(&pkg));
let manifest = toml::Value::Table(manifest).to_string();
let filename = format!("channel-rust-{}.toml", self.rust_release);
self.write_manifest(&manifest, &filename);
+ let filename = format!("channel-rust-{}-date.txt", self.rust_release);
+ self.write_date_stamp(&date, &filename);
+
if self.rust_release != "beta" && self.rust_release != "nightly" {
self.write_manifest(&manifest, "channel-rust-stable.toml");
+ self.write_date_stamp(&date, "channel-rust-stable-date.txt");
}
}
self.package("rust-docs", &mut manifest.pkg, TARGETS);
self.package("rust-src", &mut manifest.pkg, &["*"]);
- if self.channel == "nightly" {
+ if self.rust_release == "nightly" {
self.package("rust-analysis", &mut manifest.pkg, TARGETS);
}
target: target.to_string(),
});
}
- if self.channel == "nightly" {
+ if self.rust_release == "nightly" {
extensions.push(Component {
pkg: "rust-analysis".to_string(),
target: target.to_string(),
self.hash(&dst);
self.sign(&dst);
}
+
+ fn write_date_stamp(&self, date: &str, name: &str) {
+ let dst = self.output.join(name);
+ t!(t!(File::create(&dst)).write_all(date.as_bytes()));
+ self.hash(&dst);
+ self.sign(&dst);
+ }
}
+++ /dev/null
-Subproject commit d17b61aa5a2ca790f268a043bffdb0ffb04f0ec7