path = src/doc/nomicon
url = https://github.com/rust-lang-nursery/nomicon
[submodule "src/tools/cargo"]
- path = src/tools/cargo
+ path = cargo
url = https://github.com/rust-lang/cargo
[submodule "reference"]
path = src/doc/reference
- env: IMAGE=arm-android
- env: IMAGE=armhf-gnu
- env: IMAGE=cross DEPLOY=1
- - env: IMAGE=linux-tested-targets DEPLOY=1
- env: IMAGE=dist-android DEPLOY=1
- env: IMAGE=dist-arm-linux DEPLOY=1
- env: IMAGE=dist-armv7-aarch64-linux DEPLOY=1
- env: IMAGE=dist-freebsd DEPLOY=1
+ - env: IMAGE=dist-i586-gnu-i686-musl DEPLOY=1
+ - env: IMAGE=dist-fuchsia DEPLOY=1
- env: IMAGE=dist-mips-linux DEPLOY=1
- env: IMAGE=dist-mips64-linux DEPLOY=1
- env: IMAGE=dist-powerpc-linux DEPLOY=1
- env: IMAGE=dist-powerpc64-linux DEPLOY=1
- env: IMAGE=dist-s390x-linux-netbsd DEPLOY=1
- env: IMAGE=dist-x86-linux DEPLOY=1
+ - env: IMAGE=dist-x86_64-musl DEPLOY=1
- env: IMAGE=emscripten
- env: IMAGE=i686-gnu
- env: IMAGE=i686-gnu-nopt
RUST_CHECK_TARGET=check
RUST_CONFIGURE_ARGS=--build=x86_64-apple-darwin
SRC=.
+ RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache=debug
os: osx
osx_image: xcode8.2
install: &osx_install_sccache >
RUST_CHECK_TARGET=check
RUST_CONFIGURE_ARGS=--build=i686-apple-darwin
SRC=.
+ RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache=debug
os: osx
osx_image: xcode8.2
install: *osx_install_sccache
RUST_CONFIGURE_ARGS="--build=i686-apple-darwin --enable-extended"
SRC=.
DEPLOY=1
+ RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache=debug
os: osx
osx_image: xcode8.2
install: >
travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-25-sccache-x86_64-apple-darwin &&
- chmod +x /usr/local/bin/sccache &&
- brew uninstall --ignore-dependencies openssl &&
- brew install openssl --universal --without-test
+ chmod +x /usr/local/bin/sccache
- env: >
RUST_CHECK_TARGET=dist
RUST_CONFIGURE_ARGS="--target=aarch64-apple-ios,armv7-apple-ios,armv7s-apple-ios,i386-apple-ios,x86_64-apple-ios --enable-extended"
SRC=.
DEPLOY=1
+ RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache=debug
os: osx
osx_image: xcode8.2
install: *osx_install_sccache
RUST_CONFIGURE_ARGS="--enable-extended"
SRC=.
DEPLOY_ALT=1
+ RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache=debug
os: osx
osx_image: xcode8.2
install: *osx_install_sccache
echo "#### Build failed; Disk usage after running script:";
df -h;
du . | sort -nr | head -n100
+ - cat obj/tmp/sccache.log
+ - cat /tmp/sccache.log
# Save tagged docker images we created and load them if they're available
before_cache:
$ ./x.py doc
```
-The generated documentation will appear in a top-level `doc` directory,
-created by the `make` rule.
+The generated documentation will appear under `doc` in the `build` directory for
+the ABI used. I.e., if the ABI was `x86_64-pc-windows-msvc`, the directory will be
+`build\x86_64-pc-windows-msvc\doc`.
## Notes
+Version 1.16.0 (2017-03-16)
+===========================
+
+Language
+--------
+
+* Lifetimes in statics and consts default to `'static`. [RFC 1623]
+* [The compiler's `dead_code` lint now accounts for type aliases][38051].
+* [Uninhabitable enums (those without any variants) no longer permit wildcard
+ match patterns][38069]
+* [Clean up semantics of `self` in an import list][38313]
+* [`Self` may appear in `impl` headers][38920]
+* [`Self` may appear in struct expressions][39282]
+
+Compiler
+--------
+
+* [`rustc` now supports `--emit=metadata`, which causes rustc to emit
+ a `.rmeta` file containing only crate metadata][38571]. This can be
+ used by tools like the Rust Language Service to perform
+ metadata-only builds.
+* [Levenshtein based typo suggestions now work in most places, while
+ previously they worked only for fields and sometimes for local
+ variables][38927]. Together with the overhaul of "no
+ resolution"/"unexpected resolution" errors (#[38154]) they result in
+ large and systematic improvement in resolution diagnostics.
+* [Fix `transmute::<T, U>` where `T` requires a bigger alignment than
+ `U`][38670]
+* [rustc: use -Xlinker when specifying an rpath with ',' in it][38798]
+* [`rustc` no longer attempts to provide "consider using an explicit
+ lifetime" suggestions][37057]. They were inaccurate.
+
+Stabilized APIs
+---------------
+
+* [`VecDeque::truncate`]
+* [`VecDeque::resize`]
+* [`String::insert_str`]
+* [`Duration::checked_add`]
+* [`Duration::checked_sub`]
+* [`Duration::checked_div`]
+* [`Duration::checked_mul`]
+* [`str::replacen`]
+* [`str::repeat`]
+* [`SocketAddr::is_ipv4`]
+* [`SocketAddr::is_ipv6`]
+* [`IpAddr::is_ipv4`]
+* [`IpAddr::is_ipv6`]
+* [`Vec::dedup_by`]
+* [`Vec::dedup_by_key`]
+* [`Result::unwrap_or_default`]
+* [`<*const T>::wrapping_offset`]
+* [`<*mut T>::wrapping_offset`]
+* `CommandExt::creation_flags`
+* [`File::set_permissions`]
+* [`String::split_off`]
+
+Libraries
+---------
+
+* [`[T]::binary_search` and `[T]::binary_search_by_key` now take
+ their argument by `Borrow` parameter][37761]
+* [All public types in std implement `Debug`][38006]
+* [`IpAddr` implements `From<Ipv4Addr>` and `From<Ipv6Addr>`][38327]
+* [`Ipv6Addr` implements `From<[u16; 8]>`][38131]
+* [Ctrl-Z returns from `Stdin.read()` when reading from the console on
+ Windows][38274]
+* [std: Fix partial writes in `LineWriter`][38062]
+* [std: Clamp max read/write sizes on Unix][38062]
+* [Use more specific panic message for `&str` slicing errors][38066]
+* [`TcpListener::set_only_v6` is deprecated][38304]. This
+ functionality cannot be achieved in std currently.
+* [`writeln!`, like `println!`, now accepts a form with no string
+ or formatting arguments, to just print a newline][38469]
+* [Implement `iter::Sum` and `iter::Product` for `Result`][38580]
+* [Reduce the size of static data in `std_unicode::tables`][38781]
+* [`char::EscapeDebug`, `EscapeDefault`, `EscapeUnicode`,
+ `CaseMappingIter`, `ToLowercase`, `ToUppercase`, implement
+ `Display`][38909]
+* [`Duration` implements `Sum`][38712]
+* [`String` implements `ToSocketAddrs`][39048]
+
+Cargo
+-----
+
+* [The `cargo check` command does a type check of a project without
+ building it][cargo/3296]
+* [crates.io will display CI badges from Travis and AppVeyor, if
+ specified in Cargo.toml][cargo/3546]
+* [crates.io will display categories listed in Cargo.toml][cargo/3301]
+* [Compilation profiles accept integer values for `debug`, in addition
+ to `true` and `false`. These are passed to `rustc` as the value to
+ `-C debuginfo`][cargo/3534]
+* [Implement `cargo --version --verbose`][cargo/3604]
+* [All builds now output 'dep-info' build dependencies compatible with
+ make and ninja][cargo/3557]
+* [Build all workspace members with `build --all`][cargo/3511]
+* [Document all workspace members with `doc --all`][cargo/3515]
+* [Path deps outside workspace are not members][cargo/3443]
+
+Misc
+----
+
+* [`rustdoc` has a `--sysroot` argument that, like `rustc`, specifies
+ the path to the Rust implementation][38589]
+* [The `armv7-linux-androideabi` target no longer enables NEON
+ extensions, per Google's ABI guide][38413]
+* [The stock standard library can be compiled for Redox OS][38401]
+* [Rust has initial SPARC support][38726]. Tier 3. No builds
+ available.
+* [Rust has experimental support for Nvidia PTX][38559]. Tier 3. No
+ builds available.
+* [Fix backtraces on i686-pc-windows-gnu by disabling FPO][39379]
+
+Compatibility Notes
+-------------------
+
+* [Uninhabitable enums (those without any variants) no longer permit wildcard
+ match patterns][38069]
+* In this release, references to uninhabited types can not be
+ pattern-matched. This was accidentally allowed in 1.15.
+* [The compiler's `dead_code` lint now accounts for type aliases][38051].
+* [Ctrl-Z returns from `Stdin.read()` when reading from the console on
+ Windows][38274]
+* [Clean up semantics of `self` in an import list][38313]
+
+[37057]: https://github.com/rust-lang/rust/pull/37057
+[37761]: https://github.com/rust-lang/rust/pull/37761
+[38006]: https://github.com/rust-lang/rust/pull/38006
+[38051]: https://github.com/rust-lang/rust/pull/38051
+[38062]: https://github.com/rust-lang/rust/pull/38062
+[38062]: https://github.com/rust-lang/rust/pull/38622
+[38066]: https://github.com/rust-lang/rust/pull/38066
+[38069]: https://github.com/rust-lang/rust/pull/38069
+[38131]: https://github.com/rust-lang/rust/pull/38131
+[38154]: https://github.com/rust-lang/rust/pull/38154
+[38274]: https://github.com/rust-lang/rust/pull/38274
+[38304]: https://github.com/rust-lang/rust/pull/38304
+[38313]: https://github.com/rust-lang/rust/pull/38313
+[38314]: https://github.com/rust-lang/rust/pull/38314
+[38327]: https://github.com/rust-lang/rust/pull/38327
+[38401]: https://github.com/rust-lang/rust/pull/38401
+[38413]: https://github.com/rust-lang/rust/pull/38413
+[38469]: https://github.com/rust-lang/rust/pull/38469
+[38559]: https://github.com/rust-lang/rust/pull/38559
+[38571]: https://github.com/rust-lang/rust/pull/38571
+[38580]: https://github.com/rust-lang/rust/pull/38580
+[38589]: https://github.com/rust-lang/rust/pull/38589
+[38670]: https://github.com/rust-lang/rust/pull/38670
+[38712]: https://github.com/rust-lang/rust/pull/38712
+[38726]: https://github.com/rust-lang/rust/pull/38726
+[38781]: https://github.com/rust-lang/rust/pull/38781
+[38798]: https://github.com/rust-lang/rust/pull/38798
+[38909]: https://github.com/rust-lang/rust/pull/38909
+[38920]: https://github.com/rust-lang/rust/pull/38920
+[38927]: https://github.com/rust-lang/rust/pull/38927
+[39048]: https://github.com/rust-lang/rust/pull/39048
+[39282]: https://github.com/rust-lang/rust/pull/39282
+[39379]: https://github.com/rust-lang/rust/pull/39379
+[`<*const T>::wrapping_offset`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.wrapping_offset
+[`<*mut T>::wrapping_offset`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.wrapping_offset
+[`Duration::checked_add`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.checked_add
+[`Duration::checked_div`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.checked_div
+[`Duration::checked_mul`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.checked_mul
+[`Duration::checked_sub`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.checked_sub
+[`File::set_permissions`]: https://doc.rust-lang.org/std/fs/struct.File.html#method.set_permissions
+[`IpAddr::is_ipv4`]: https://doc.rust-lang.org/std/net/enum.IpAddr.html#method.is_ipv4
+[`IpAddr::is_ipv6`]: https://doc.rust-lang.org/std/net/enum.IpAddr.html#method.is_ipv6
+[`Result::unwrap_or_default`]: https://doc.rust-lang.org/std/result/enum.Result.html#method.unwrap_or_default
+[`SocketAddr::is_ipv4`]: https://doc.rust-lang.org/std/net/enum.SocketAddr.html#method.is_ipv4
+[`SocketAddr::is_ipv6`]: https://doc.rust-lang.org/std/net/enum.SocketAddr.html#method.is_ipv6
+[`String::insert_str`]: https://doc.rust-lang.org/std/string/struct.String.html#method.insert_str
+[`String::split_off`]: https://doc.rust-lang.org/std/string/struct.String.html#method.split_off
+[`Vec::dedup_by_key`]: https://doc.rust-lang.org/std/vec/struct.Vec.html#method.dedup_by_key
+[`Vec::dedup_by`]: https://doc.rust-lang.org/std/vec/struct.Vec.html#method.dedup_by
+[`VecDeque::resize`]: https://doc.rust-lang.org/std/collections/vec_deque/struct.VecDeque.html#method.resize
+[`VecDeque::truncate`]: https://doc.rust-lang.org/std/collections/vec_deque/struct.VecDeque.html#method.truncate
+[`str::repeat`]: https://doc.rust-lang.org/std/primitive.str.html#method.repeat
+[`str::replacen`]: https://doc.rust-lang.org/std/primitive.str.html#method.replacen
+[cargo/3296]: https://github.com/rust-lang/cargo/pull/3296
+[cargo/3301]: https://github.com/rust-lang/cargo/pull/3301
+[cargo/3443]: https://github.com/rust-lang/cargo/pull/3443
+[cargo/3511]: https://github.com/rust-lang/cargo/pull/3511
+[cargo/3515]: https://github.com/rust-lang/cargo/pull/3515
+[cargo/3534]: https://github.com/rust-lang/cargo/pull/3534
+[cargo/3546]: https://github.com/rust-lang/cargo/pull/3546
+[cargo/3557]: https://github.com/rust-lang/cargo/pull/3557
+[cargo/3604]: https://github.com/rust-lang/cargo/pull/3604
+[RFC 1623]: https://github.com/rust-lang/rfcs/blob/master/text/1623-static.md
+
+
Version 1.15.1 (2017-02-09)
===========================
* Compiler works with the following configurations:
* Linux: x86 and x86_64 hosts and targets
- * MacOS: x86 and x86_64 hosts and targets
+ * macOS: x86 and x86_64 hosts and targets
* Windows: x86 hosts and targets
* Cross compilation / multi-target configuration supported.
- set PATH=%PATH%;%CD%\handle
- handle.exe -accepteula -help
+ # Attempt to debug sccache failures
+ - set RUST_LOG=sccache=debug
+ - set SCCACHE_ERROR_LOG=%CD%/sccache.log
+
test_script:
- appveyor-retry sh -c 'git submodule deinit -f . && git submodule update --init'
- set SRC=.
- set NO_CCACHE=1
- sh src/ci/run.sh
+on_failure:
+ - cat %CD%/sccache.log
+
cache:
- - "build/i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- - "build/x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- - "i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- - "x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
+ - "build/i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-rebuild-trigger"
+ - "build/x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-rebuild-trigger"
+ - "i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-rebuild-trigger"
+ - "x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-rebuild-trigger"
branches:
only:
--- /dev/null
+Subproject commit 5f3b9c4c6a7be1f177d6024cb83d150b6479148a
need_cmd file
need_cmd make
-msg "inspecting environment"
-
-CFG_OSTYPE=$(uname -s)
-CFG_CPUTYPE=$(uname -m)
-
-if [ $CFG_OSTYPE = Darwin -a $CFG_CPUTYPE = i386 ]
-then
- # Darwin's `uname -s` lies and always returns i386. We have to use sysctl
- # instead.
- if sysctl hw.optional.x86_64 | grep -q ': 1'
- then
- CFG_CPUTYPE=x86_64
- fi
-fi
-
-# The goal here is to come up with the same triple as LLVM would,
-# at least for the subset of platforms we're willing to target.
-
-case $CFG_OSTYPE in
-
- Linux)
- CFG_OSTYPE=unknown-linux-gnu
- ;;
-
- FreeBSD)
- CFG_OSTYPE=unknown-freebsd
- ;;
-
- DragonFly)
- CFG_OSTYPE=unknown-dragonfly
- ;;
-
- Bitrig)
- CFG_OSTYPE=unknown-bitrig
- ;;
-
- OpenBSD)
- CFG_OSTYPE=unknown-openbsd
- ;;
-
- NetBSD)
- CFG_OSTYPE=unknown-netbsd
- ;;
-
- Darwin)
- CFG_OSTYPE=apple-darwin
- ;;
-
- SunOS)
- CFG_OSTYPE=sun-solaris
- CFG_CPUTYPE=$(isainfo -n)
- ;;
-
- Haiku)
- CFG_OSTYPE=unknown-haiku
- ;;
-
- MINGW*)
- # msys' `uname` does not print gcc configuration, but prints msys
- # configuration. so we cannot believe `uname -m`:
- # msys1 is always i686 and msys2 is always x86_64.
- # instead, msys defines $MSYSTEM which is MINGW32 on i686 and
- # MINGW64 on x86_64.
- CFG_CPUTYPE=i686
- CFG_OSTYPE=pc-windows-gnu
- if [ "$MSYSTEM" = MINGW64 ]
- then
- CFG_CPUTYPE=x86_64
- fi
- ;;
-
- MSYS*)
- CFG_OSTYPE=pc-windows-gnu
- ;;
-
-# Thad's Cygwin identifiers below
-
-# Vista 32 bit
- CYGWIN_NT-6.0)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=i686
- ;;
-
-# Vista 64 bit
- CYGWIN_NT-6.0-WOW64)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=x86_64
- ;;
-
-# Win 7 32 bit
- CYGWIN_NT-6.1)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=i686
- ;;
-
-# Win 7 64 bit
- CYGWIN_NT-6.1-WOW64)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=x86_64
- ;;
-
-# Win 8 # uname -s on 64-bit cygwin does not contain WOW64, so simply use uname -m to detect arch (works in my install)
- CYGWIN_NT-6.3)
- CFG_OSTYPE=pc-windows-gnu
- ;;
-# We do not detect other OS such as XP/2003 using 64 bit using uname.
-# If we want to in the future, we will need to use Cygwin - Chuck's csih helper in /usr/lib/csih/winProductName.exe or alternative.
- *)
- err "unknown OS type: $CFG_OSTYPE"
- ;;
-esac
-
-
-case $CFG_CPUTYPE in
-
- i386 | i486 | i686 | i786 | x86)
- CFG_CPUTYPE=i686
- ;;
-
- xscale | arm)
- CFG_CPUTYPE=arm
- ;;
-
- armv6l)
- CFG_CPUTYPE=arm
- CFG_OSTYPE="${CFG_OSTYPE}eabihf"
- ;;
-
- armv7l)
- CFG_CPUTYPE=armv7
- CFG_OSTYPE="${CFG_OSTYPE}eabihf"
- ;;
-
- aarch64 | arm64)
- CFG_CPUTYPE=aarch64
- ;;
-
- powerpc | ppc)
- CFG_CPUTYPE=powerpc
- ;;
-
- powerpc64 | ppc64)
- CFG_CPUTYPE=powerpc64
- ;;
-
- powerpc64le | ppc64le)
- CFG_CPUTYPE=powerpc64le
- ;;
-
- s390x)
- CFG_CPUTYPE=s390x
- ;;
-
- x86_64 | x86-64 | x64 | amd64)
- CFG_CPUTYPE=x86_64
- ;;
-
- mips | mips64)
- if [ "$CFG_CPUTYPE" = "mips64" ]; then
- CFG_OSTYPE="${CFG_OSTYPE}abi64"
- fi
- ENDIAN=$(printf '\1' | od -dAn)
- if [ "$ENDIAN" -eq 1 ]; then
- CFG_CPUTYPE="${CFG_CPUTYPE}el"
- elif [ "$ENDIAN" -ne 256 ]; then
- err "unknown endianness: $ENDIAN (expecting 1 for little or 256 for big)"
- fi
- ;;
-
- BePC)
- CFG_CPUTYPE=i686
- ;;
-
- *)
- err "unknown CPU type: $CFG_CPUTYPE"
-esac
-
-# Detect 64 bit linux systems with 32 bit userland and force 32 bit compilation
-if [ $CFG_OSTYPE = unknown-linux-gnu -a $CFG_CPUTYPE = x86_64 ]
-then
- # $SHELL does not exist in standard 'sh', so probably only exists
- # if configure is running in an interactive bash shell. /usr/bin/env
- # exists *everywhere*.
- BIN_TO_PROBE="$SHELL"
- if [ ! -r "$BIN_TO_PROBE" ]; then
- if [ -r "/usr/bin/env" ]; then
- BIN_TO_PROBE="/usr/bin/env"
- else
- warn "Cannot check if the userland is i686 or x86_64"
- fi
- fi
- file -L "$BIN_TO_PROBE" | grep -q "x86[_-]64"
- if [ $? != 0 ]; then
- msg "i686 userland on x86_64 Linux kernel"
- CFG_CPUTYPE=i686
- fi
-fi
-
-
-DEFAULT_BUILD="${CFG_CPUTYPE}-${CFG_OSTYPE}"
-
CFG_SRC_DIR="$(abs_path $(dirname $0))/"
CFG_SRC_DIR_RELATIVE="$(dirname $0)/"
CFG_BUILD_DIR="$(pwd)/"
opt local-rebuild 0 "assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version"
opt llvm-static-stdcpp 0 "statically link to libstdc++ for LLVM"
opt llvm-link-shared 0 "prefer shared linking to LLVM (llvm-config --link-shared)"
+opt llvm-clean-rebuild 0 "delete LLVM build directory on rebuild"
opt rpath 1 "build rpaths into rustc itself"
opt stage0-landing-pads 1 "enable landing pads during bootstrap with stage0"
# This is used by the automation to produce single-target nightlies
opt inject-std-version 1 "inject the current compiler version of libstd into programs"
opt llvm-version-check 1 "check if the LLVM version is supported, build anyway"
opt codegen-tests 1 "run the src/test/codegen tests"
+opt save-analysis 0 "save API analysis data"
opt option-checking 1 "complain about unrecognized options in this configure script"
opt ninja 0 "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)"
opt locked-deps 0 "force Cargo.lock to be up to date"
valopt llvm-root "" "set LLVM root"
valopt python "" "set path to python"
valopt jemalloc-root "" "set directory where libjemalloc_pic.a is located"
-valopt build "${DEFAULT_BUILD}" "GNUs ./configure syntax LLVM build triple"
+valopt build "" "GNUs ./configure syntax LLVM build triple"
valopt android-cross-path "" "Android NDK standalone path (deprecated)"
valopt i686-linux-android-ndk "" "i686-linux-android NDK standalone path"
valopt arm-linux-androideabi-ndk "" "arm-linux-androideabi NDK standalone path"
err "Found $python_version, but Python 2.7 is required"
fi
-# If we have no git directory then we are probably a tarball distribution
-# and shouldn't attempt to load submodules
-if [ ! -e ${CFG_SRC_DIR}.git ]
-then
- probe CFG_GIT git
- msg "git: no git directory. disabling submodules"
- CFG_DISABLE_MANAGE_SUBMODULES=1
-else
- probe_need CFG_GIT git
-fi
-
-# Use `md5sum` on GNU platforms, or `md5 -q` on BSD
-probe CFG_MD5 md5
-probe CFG_MD5SUM md5sum
-if [ -n "$CFG_MD5" ]
-then
- CFG_HASH_COMMAND="$CFG_MD5 -q | cut -c 1-8"
-elif [ -n "$CFG_MD5SUM" ]
-then
- CFG_HASH_COMMAND="$CFG_MD5SUM | cut -c 1-8"
-else
- err 'could not find one of: md5 md5sum'
-fi
-putvar CFG_HASH_COMMAND
-
-probe CFG_CLANG clang++
-probe CFG_CCACHE ccache
-probe CFG_GCC gcc
-probe CFG_LD ld
-probe CFG_VALGRIND valgrind
-probe CFG_PERF perf
-probe CFG_ISCC iscc
-probe CFG_ANTLR4 antlr4
-probe CFG_GRUN grun
-probe CFG_FLEX flex
-probe CFG_BISON bison
-probe CFG_GDB gdb
-probe CFG_LLDB lldb
-
-if [ -n "$CFG_ENABLE_NINJA" ]
-then
- probe CFG_NINJA ninja
- if [ -z "$CFG_NINJA" ]
- then
- # On Debian and Fedora, the `ninja` binary is an IRC bot, so the build tool was
- # renamed. Handle this case.
- probe CFG_NINJA ninja-build
- fi
-fi
-
-# For building LLVM
-if [ -z "$CFG_LLVM_ROOT" ]
-then
- probe_need CFG_CMAKE cmake
-fi
-
-# On MacOS X, invoking `javac` pops up a dialog if the JDK is not
-# installed. Since `javac` is only used if `antlr4` is available,
-# probe for it only in this case.
-if [ -n "$CFG_ANTLR4" ]
-then
- CFG_ANTLR4_JAR="\"$(find /usr/ -name antlr-complete.jar 2>/dev/null | head -n 1)\""
- if [ "x" = "x$CFG_ANTLR4_JAR" ]
- then
- CFG_ANTLR4_JAR="\"$(find ~ -name antlr-complete.jar 2>/dev/null | head -n 1)\""
- fi
- putvar CFG_ANTLR4_JAR $CFG_ANTLR4_JAR
- probe CFG_JAVAC javac
-fi
-
# the valgrind rpass tests will fail if you don't have a valgrind, but they're
# only disabled if you opt out.
if [ -z "$CFG_VALGRIND" ]
fi
fi
-if [ -n "$CFG_LLDB" ]
-then
- # Store LLDB's version
- CFG_LLDB_VERSION=$($CFG_LLDB --version 2>/dev/null | head -1)
- putvar CFG_LLDB_VERSION
-
- # If CFG_LLDB_PYTHON_DIR is not already set from the outside and valid, try to read it from
- # LLDB via the -P commandline options.
- if [ -z "$CFG_LLDB_PYTHON_DIR" ] || [ ! -d "$CFG_LLDB_PYTHON_DIR" ]
- then
- CFG_LLDB_PYTHON_DIR=$($CFG_LLDB -P)
-
- # If CFG_LLDB_PYTHON_DIR is not a valid directory, set it to something more readable
- if [ ! -d "$CFG_LLDB_PYTHON_DIR" ]
- then
- CFG_LLDB_PYTHON_DIR="LLDB_PYTHON_DIRECTORY_NOT_FOUND"
- fi
-
- putvar CFG_LLDB_PYTHON_DIR
- fi
-fi
-
-# LLDB tests on OSX require /usr/bin/python, not something like Homebrew's
-# /usr/local/bin/python. We're loading a compiled module for LLDB tests which is
-# only compatible with the system.
-case $CFG_BUILD in
- *-apple-darwin)
- CFG_LLDB_PYTHON=/usr/bin/python
- ;;
- *)
- CFG_LLDB_PYTHON=$CFG_PYTHON
- ;;
-esac
-putvar CFG_LLDB_PYTHON
-
# Do some sanity checks if running on buildbot
# (these env vars are set by rust-buildbot)
if [ -n "$RUST_DIST_SERVER" -a -n "$ALLOW_NONZERO_RLIMIT_CORE" ]; then
fi
fi
-step_msg "looking for target specific programs"
-
-probe CFG_ADB adb
-
BIN_SUF=
if [ "$CFG_OSTYPE" = "pc-windows-gnu" ] || [ "$CFG_OSTYPE" = "pc-windows-msvc" ]
then
CFG_DOCDIR=${CFG_DOCDIR%/}
CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')"
CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')"
-CFG_SUPPORTED_TARGET=""
-for target_file in ${CFG_SRC_DIR}mk/cfg/*.mk; do
- CFG_SUPPORTED_TARGET="${CFG_SUPPORTED_TARGET} $(basename "$target_file" .mk)"
-done
# copy build-triples to host-triples so that builds are a subset of hosts
V_TEMP=""
putvar CFG_DOCDIR
putvar CFG_USING_LIBCPP
-# Avoid spurious warnings from clang by feeding it original source on
-# ccache-miss rather than preprocessed input.
-if [ -n "$CFG_ENABLE_CCACHE" ] && [ -n "$CFG_USING_CLANG" ]
-then
- CFG_CCACHE_CPP2=1
- putvar CFG_CCACHE_CPP2
-fi
-
-if [ -n "$CFG_ENABLE_CCACHE" ]
-then
- CFG_CCACHE_BASEDIR=${CFG_SRC_DIR}
- putvar CFG_CCACHE_BASEDIR
-fi
-
-
-putvar CFG_LLVM_SRC_DIR
-
-for t in $CFG_HOST
-do
- CFG_LLVM_BUILD_DIR=$(echo CFG_LLVM_BUILD_DIR_${t} | tr - _)
- CFG_LLVM_INST_DIR=$(echo CFG_LLVM_INST_DIR_${t} | tr - _)
- putvar $CFG_LLVM_BUILD_DIR
- putvar $CFG_LLVM_INST_DIR
-done
-
msg
copy_if_changed ${CFG_SRC_DIR}src/bootstrap/mk/Makefile.in ./Makefile
move_if_changed config.tmp config.mk
"libc 0.0.0",
]
-[[package]]
-name = "advapi32-sys"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "aho-corasick"
-version = "0.5.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "aho-corasick"
version = "0.6.2"
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "bufstream"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "build-manifest"
version = "0.1.0"
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "cargo"
-version = "0.18.0"
-dependencies = [
- "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "bufstream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "cargotest 0.1.0",
- "crates-io 0.7.0",
- "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "env_logger 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "flate2 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
- "fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "handlebars 0.25.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "libgit2-sys 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "miow 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "num_cpus 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_derive 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_ignored 0.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "tar 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "toml 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "cargotest"
-version = "0.1.0"
-dependencies = [
- "bufstream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "cargo 0.18.0",
- "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "flate2 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "tar 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "cargotest2"
version = "0.1.0"
-[[package]]
-name = "cfg-if"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "clap"
version = "2.20.5"
dependencies = [
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
dependencies = [
"env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
name = "core"
version = "0.0.0"
-[[package]]
-name = "crates-io"
-version = "0.7.0"
-dependencies = [
- "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_derive 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "crossbeam"
-version = "0.2.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "curl"
-version = "0.4.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "curl-sys 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-probe 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "curl-sys"
-version = "0.3.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "docopt"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "dtoa"
version = "0.4.1"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "env_logger"
-version = "0.4.0"
+version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "flate2"
-version = "0.2.17"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "fmt_macros"
version = "0.0.0"
-[[package]]
-name = "foreign-types"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "fs2"
-version = "0.4.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "gcc"
version = "0.3.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "gdi32-sys"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "getopts"
version = "0.0.0"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "git2"
-version = "0.6.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "libgit2-sys 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-probe 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "git2-curl"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "git2 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "glob"
-version = "0.2.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "graphviz"
version = "0.0.0"
-[[package]]
-name = "hamcrest"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "handlebars"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "idna"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
[[package]]
name = "lazy_static"
-version = "0.2.2"
+version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
[[package]]
name = "libc"
-version = "0.2.20"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "libgit2-sys"
-version = "0.6.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cmake 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
- "curl-sys 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "libssh2-sys 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "libssh2-sys"
-version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cmake 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "libz-sys"
-version = "1.0.13"
+version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
-]
[[package]]
name = "linkchecker"
[[package]]
name = "log"
-version = "0.3.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "matches"
-version = "0.1.4"
+version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "mdbook"
-version = "0.0.17"
+version = "0.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"clap 2.20.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "env_logger 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"handlebars 0.25.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"pulldown-cmark 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "toml 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "memchr"
-version = "0.1.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "miniz-sys"
-version = "0.1.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "miow"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "net2 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "net2"
-version = "0.2.26"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num"
-version = "0.1.36"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-bigint 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-complex 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-iter 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-rational 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num-bigint"
-version = "0.1.35"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
- "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num-complex"
-version = "0.1.35"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num-integer"
-version = "0.1.32"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num-iter"
-version = "0.1.32"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num-rational"
-version = "0.1.35"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "num-bigint 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "num-traits"
-version = "0.1.36"
+version = "0.1.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "num_cpus"
-version = "1.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "openssl"
-version = "0.9.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "openssl-probe"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
-[[package]]
-name = "openssl-sys"
-version = "0.9.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
- "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "panic_abort"
version = "0.0.0"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "pkg-config"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "proc_macro"
version = "0.0.0"
"syntax_pos 0.0.0",
]
-[[package]]
-name = "psapi-sys"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "pulldown-cmark"
version = "0.0.8"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "quote"
-version = "0.3.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "rand"
version = "0.0.0"
"core 0.0.0",
]
-[[package]]
-name = "rand"
-version = "0.3.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "regex"
-version = "0.1.80"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "regex"
version = "0.2.1"
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "regex-syntax"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "regex-syntax"
version = "0.4.0"
version = "0.1.0"
dependencies = [
"clap 2.20.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "mdbook 0.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mdbook 0.0.18 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"syntax_pos 0.0.0",
]
-[[package]]
-name = "semver"
-version = "0.6.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "semver-parser"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "serde"
-version = "0.9.7"
+version = "0.9.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "serde_codegen_internals"
-version = "0.14.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "syn 0.11.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "serde_derive"
-version = "0.9.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "quote 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_codegen_internals 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.11.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "serde_ignored"
-version = "0.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "serde_json"
-version = "0.9.7"
+version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "serialize"
version = "0.0.0"
-[[package]]
-name = "shell-escape"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "std"
version = "0.0.0"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "syn"
-version = "0.11.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "quote 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
- "synom 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "synom"
-version = "0.11.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "syntax"
version = "0.0.0"
"serialize 0.0.0",
]
-[[package]]
-name = "tar"
-version = "0.4.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "tempdir"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "term"
version = "0.0.0"
-[[package]]
-name = "term"
-version = "0.4.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "term_size"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
"term 0.0.0",
]
-[[package]]
-name = "thread-id"
-version = "2.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "thread-id"
version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "thread_local"
-version = "0.2.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
[[package]]
name = "toml"
-version = "0.3.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "unicode-bidi"
-version = "0.2.5"
+version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "unicode-normalization"
-version = "0.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "unicode-segmentation"
version = "1.1.0"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "unicode-xid"
-version = "0.0.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "unreachable"
version = "0.1.1"
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "url"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "user32-sys"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "utf8-ranges"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-
[[package]]
name = "utf8-ranges"
version = "1.0.0"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "ws2_32-sys"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[metadata]
-"checksum advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a"
-"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66"
"checksum aho-corasick 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0638fd549427caa90c499814196d1b9e3725eb4d15d7339d6de073a680ed0ca2"
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
"checksum bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4f67931368edf3a9a51d29886d245f1c3db2f1ef0dcc9e35ff70341b78c10d23"
"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
-"checksum bufstream 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7b48dbe2ff0e98fa2f03377d204a9637d3c9816cd431bfe05a8abbd0ea11d074"
-"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
"checksum clap 2.20.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7db281b0520e97fbd15cd615dcd8f8bcad0c26f5f7d5effe705f090f39e9a758"
"checksum cmake 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)" = "e1acc68a3f714627af38f9f5d09706a28584ba60dfe2cca68f40bf779f941b25"
-"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97"
-"checksum curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c90e1240ef340dd4027ade439e5c7c2064dd9dc652682117bd50d1486a3add7b"
-"checksum curl-sys 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)" = "c0d909dc402ae80b6f7b0118c039203436061b9d9a3ca5d2c2546d93e0a61aaa"
-"checksum docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab32ea6e284d87987066f21a9e809a73c14720571ef34516f0890b3d355ccfd8"
"checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
-"checksum env_logger 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "99971fb1b635fe7a0ee3c4d065845bb93cca80a23b5613b5613391ece5de4144"
+"checksum env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e3856f1697098606fc6cb97a93de88ca3f3bc35bb878c725920e6e82ecf05e83"
"checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922"
-"checksum flate2 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "d4e4d0c15ef829cbc1b7cda651746be19cceeb238be7b1049227b14891df9e25"
-"checksum foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e4056b9bd47f8ac5ba12be771f77a0dae796d1bbaaf5fd0b9c2d38b69b8a29d"
-"checksum fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "34edaee07555859dc13ca387e6ae05686bb4d0364c95d649b6dab959511f4baf"
"checksum gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)" = "c07c758b972368e703a562686adb39125707cc1ef3399da8c019fc6c2498a75d"
-"checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518"
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
-"checksum git2 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "046ae03385257040b2a35e56d9669d950dd911ba2bf48202fbef73ee6aab27b2"
-"checksum git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "68676bc784bf0bef83278898929bf64a251e87c0340723d0b93fa096c9c5bf8e"
-"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
-"checksum hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bf088f042a467089e9baa4972f57f9247e42a0cc549ba264c7a04fbb8ecb89d4"
"checksum handlebars 0.25.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b2249f6f0dc5a3bb2b3b1a8f797dfccbc4b053344d773d654ad565e51427d335"
-"checksum idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1053236e00ce4f668aeca4a769a09b3bf5a682d802abd6f3cb39374f6b162c11"
"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
-"checksum lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6abe0ee2e758cd6bc8a2cd56726359007748fbf4128da998b65d0b70f881e19b"
-"checksum libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)" = "684f330624d8c3784fb9558ca46c4ce488073a8d22450415c5eb4f4cfb0d11b5"
-"checksum libgit2-sys 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)" = "d951fd5eccae07c74e8c2c1075b05ea1e43be7f8952245af8c2840d1480b1d95"
-"checksum libssh2-sys 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "91e135645c2e198a39552c8c7686bb5b83b1b99f64831c040a6c2798a1195934"
-"checksum libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e5ee912a45d686d393d5ac87fac15ba0ba18daae14e8e7543c63ebf7fb7e970c"
-"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
-"checksum matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "efd7622e3022e1a6eaa602c4cea8912254e5582c9c692e9167714182244801b1"
-"checksum mdbook 0.0.17 (registry+https://github.com/rust-lang/crates.io-index)" = "dbba458ca886cb082d026afd704eeeeb0531f7e4ffd6c619f72dc309c1c18fe4"
-"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
+"checksum lazy_static 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7291b1dd97d331f752620b02dfdbc231df7fc01bf282a00769e1cdb963c460dc"
+"checksum libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "88ee81885f9f04bff991e306fea7c1c60a5f0f9e409e99f6b40e3311a3363135"
+"checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
+"checksum mdbook 0.0.18 (registry+https://github.com/rust-lang/crates.io-index)" = "06a68e8738e42b38a02755d3ce5fa12d559e17acb238e4326cbc3cc056e65280"
"checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4"
-"checksum miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "28eaee17666671fa872e567547e8428e83308ebe5808cdf6a0e28397dbe2c726"
-"checksum miow 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3a78d2605eb97302c10cf944b8d96b0a2a890c52957caf92fcd1f24f69049579"
-"checksum net2 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)" = "5edf9cb6be97212423aed9413dd4729d62b370b5e1c571750e882cebbbc1e3e2"
-"checksum num 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "bde7c03b09e7c6a301ee81f6ddf66d7a28ec305699e3d3b056d2fc56470e3120"
-"checksum num-bigint 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "88b14378471f7c2adc5262f05b4701ef53e8da376453a8d8fee48e51db745e49"
-"checksum num-complex 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "f0c78e054dd19c3fd03419ade63fa661e9c49bb890ce3beb4eee5b7baf93f92f"
-"checksum num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "fb24d9bfb3f222010df27995441ded1e954f8f69cd35021f6bef02ca9552fb92"
-"checksum num-iter 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "287a1c9969a847055e1122ec0ea7a5c5d6f72aad97934e131c83d5c08ab4e45c"
-"checksum num-rational 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "54ff603b8334a72fbb27fe66948aac0abaaa40231b3cecd189e76162f6f38aaf"
-"checksum num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "a16a42856a256b39c6d3484f097f6713e14feacd9bfb02290917904fae46c81c"
+"checksum num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "e1cbfa3781f3fe73dc05321bed52a06d2d491eaa764c52335cf4399f046ece99"
"checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3"
-"checksum num_cpus 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a225d1e2717567599c24f88e49f00856c6e825a12125181ee42c4257e3688d39"
"checksum open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3478ed1686bd1300c8a981a940abc92b06fac9cbef747f4c668d4e032ff7b842"
-"checksum openssl 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)" = "f9871ecf7629da3760599e3e547d35940cff3cead49159b49f81cd1250f24f1d"
-"checksum openssl-probe 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "756d49c8424483a3df3b5d735112b4da22109ced9a8294f1f5cdf80fb3810919"
-"checksum openssl-sys 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5dd48381e9e8a6dce9c4c402db143b2e243f5f872354532f7a009c289b3998ca"
"checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8"
-"checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
-"checksum psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "abcd5d1a07d360e29727f757a9decb3ce8bc6e0efa8969cfaad669a8317a2478"
"checksum pulldown-cmark 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1058d7bb927ca067656537eec4e02c2b4b70eaaa129664c5b90c111e20326f41"
"checksum quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0aad603e8d7fb67da22dbdf1f4b826ce8829e406124109e73cf1b2454b93a71c"
-"checksum quote 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "7375cf7ad34a92e8fd18dd9c42f58b9a11def59ab48bec955bf359a788335592"
-"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d"
-"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
"checksum regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4278c17d0f6d62dfef0ab00028feb45bd7d2102843f80763474eeb1be8a10c01"
-"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
"checksum regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9191b1f57603095f105d317e375d19b1c9c5c3185ea9633a99a6dcbed04457"
"checksum rustc-serialize 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "237546c689f20bb44980270c73c3b9edd0891c1be49cc1274406134a66d3957b"
-"checksum semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537"
-"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
-"checksum serde 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1e0ed773960f90a78567fcfbe935284adf50c5d7cf119aa2cf43bb0b4afa69bb"
-"checksum serde_codegen_internals 0.14.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4d52006899f910528a10631e5b727973fe668f3228109d1707ccf5bad5490b6e"
-"checksum serde_derive 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)" = "789ee9f3cd78c850948b94121020147f5220b47dafbf230d7098a93a58f726cf"
-"checksum serde_ignored 0.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d4b3f5576874721d14690657e9f0ed286e72a52be2f6fdc0cf2f024182bd8f64"
-"checksum serde_json 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)" = "2eb96d30e4e6f9fc52e08f51176d078b6f79b981dc3ed4134f7b850be9f446a8"
-"checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8"
+"checksum serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)" = "a702319c807c016e51f672e5c77d6f0b46afddd744b5e437d6b8436b888b458f"
+"checksum serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)" = "dbc45439552eb8fb86907a2c41c1fd0ef97458efb87ff7f878db466eb581824e"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
-"checksum syn 0.11.8 (registry+https://github.com/rust-lang/crates.io-index)" = "37c279fb816210c9bb28b2c292664581e7b87b4561e86b94df462664d8620bb8"
-"checksum synom 0.11.2 (registry+https://github.com/rust-lang/crates.io-index)" = "27e31aa4b09b9f4cb12dff3c30ba503e17b1a624413d764d32dab76e3920e5bc"
-"checksum tar 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)" = "1eb3bf6ec92843ca93f4fcfb5fc6dfe30534815b147885db4b5759b8e2ff7d52"
-"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
-"checksum term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d168af3930b369cfe245132550579d47dfd873d69470755a19c2c6568dbbd989"
"checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a"
-"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
"checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a"
-"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
"checksum thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c85048c6260d17cf486ceae3282d9fb6b90be220bf5b28c400f5485ffc29f0c7"
"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"
-"checksum toml 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "08272367dd2e766db3fa38f068067d17aa6a9dfd7259af24b3927db92f1e0c2f"
-"checksum unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d3a078ebdd62c0e71a709c3d53d2af693fe09fe93fbff8344aebe289b78f9032"
-"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff"
+"checksum toml 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3474f3c6eaf32eedb4f4a66a26214f020f828a6d96c37e38a35e3a379bbcfd11"
"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
-"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
-"checksum url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5ba8a749fb4479b043733416c244fa9d1d3af3d7c23804944651c8a448cb87e"
-"checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47"
-"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
"checksum vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cac5efe5cb0fa14ec2f84f83c701c562ee63f6dcc680861b21d65c682adfb05f"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
-"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
"tools/build-manifest",
"tools/qemu-test-client",
"tools/qemu-test-server",
- "tools/cargo",
]
# Curiously, compiletest will segfault if compiled with opt-level=3 on 64-bit
if env::var("RUSTC_RPATH") == Ok("true".to_string()) {
let rpath = if target.contains("apple") {
- // Note that we need to take one extra step on OSX to also pass
+ // Note that we need to take one extra step on macOS to also pass
// `-Wl,-instal_name,@rpath/...` to get things to work right. To
// do that we pass a weird flag to the compiler to get it to do
// so. Note that this is definitely a hack, and we should likely
def get_mk(self, key):
for line in iter(self.config_mk.splitlines()):
- if line.startswith(key):
- return line[line.find(':=') + 2:].strip()
+ if line.startswith(key + ' '):
+ var = line[line.find(':=') + 2:].strip()
+ if var != '':
+ return var
return None
def cargo(self):
sys.exit(err)
elif ostype == 'Darwin':
ostype = 'apple-darwin'
+ elif ostype == 'Haiku':
+ ostype = 'unknown-haiku'
elif ostype.startswith('MINGW'):
# msys' `uname` does not print gcc configuration, but prints msys
# configuration. so we cannot believe `uname -m`:
cputype = 'i686'
elif cputype in {'xscale', 'arm'}:
cputype = 'arm'
- elif cputype == 'armv7l':
+ elif cputype in {'armv6l', 'armv7l', 'armv8l'}:
cputype = 'arm'
ostype += 'eabihf'
+ elif cputype == 'armv7l':
+ cputype = 'armv7'
+ ostype += 'eabihf'
elif cputype == 'aarch64':
cputype = 'aarch64'
elif cputype == 'arm64':
raise ValueError('unknown byteorder: ' + sys.byteorder)
# only the n64 ABI is supported, indicate it
ostype += 'abi64'
- elif cputype in {'powerpc', 'ppc', 'ppc64'}:
+ elif cputype in {'powerpc', 'ppc'}:
cputype = 'powerpc'
+ elif cputype in {'powerpc64', 'ppc64'}:
+ cputype = 'powerpc64'
+ elif cputype in {'powerpc64le', 'ppc64le'}:
+ cputype = 'powerpc64le'
elif cputype == 'sparcv9':
pass
elif cputype in {'amd64', 'x86_64', 'x86-64', 'x64'}:
cputype = 'x86_64'
+ elif cputype == 's390x':
+ cputype = 's390x'
+ elif cputype == 'BePC':
+ cputype = 'i686'
else:
err = "unknown cpu type: " + cputype
if self.verbose:
impl GitInfo {
pub fn new(dir: &Path) -> GitInfo {
- if !dir.join(".git").is_dir() {
+ // See if this even begins to look like a git dir
+ if !dir.join(".git").exists() {
return GitInfo { inner: None }
}
+
+ // Make sure git commands work
+ let out = Command::new("git")
+ .arg("rev-parse")
+ .current_dir(dir)
+ .output()
+ .expect("failed to spawn git");
+ if !out.status.success() {
+ return GitInfo { inner: None }
+ }
+
+ // Ok, let's scrape some info
let ver_date = output(Command::new("git").current_dir(dir)
.arg("log").arg("-1")
.arg("--date=short")
cmd.arg("--docck-python").arg(build.python());
if build.config.build.ends_with("apple-darwin") {
- // Force /usr/bin/python on OSX for LLDB tests because we're loading the
+ // Force /usr/bin/python on macOS for LLDB tests because we're loading the
// LLDB plugin's compiled module which only works with the system python
// (namely not Homebrew-installed python)
cmd.arg("--lldb-python").arg("/usr/bin/python");
let filename = e.file_name().into_string().unwrap();
if (target.contains("windows") && filename.ends_with(".exe")) ||
(!target.contains("windows") && !filename.contains(".")) ||
- (target.contains("emscripten") && filename.contains(".js")){
+ (target.contains("emscripten") && filename.ends_with(".js")) {
dst.push(e.path());
}
}
cargo.env("CFG_LLVM_ROOT", s);
}
// Building with a static libstdc++ is only supported on linux right now,
- // not for MSVC or OSX
+ // not for MSVC or macOS
if build.config.llvm_static_stdcpp &&
!target.contains("windows") &&
!target.contains("apple") {
// build.clear_if_dirty(&out_dir, &libstd_stamp(build, stage, &host, target));
let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
- let dir = build.src.join("src/tools").join(tool);
+ let mut dir = build.src.join(tool);
+ if !dir.exists() {
+ dir = build.src.join("src/tools").join(tool);
+ }
cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
// We don't want to build tools dynamically as they'll be running across
pub llvm_static_stdcpp: bool,
pub llvm_link_shared: bool,
pub llvm_targets: Option<String>,
+ pub llvm_link_jobs: Option<u32>,
+ pub llvm_clean_rebuild: bool,
// rust codegen options
pub rust_optimize: bool,
pub rustc_default_ar: Option<String>,
pub rust_optimize_tests: bool,
pub rust_debuginfo_tests: bool,
+ pub rust_save_analysis: bool,
pub rust_dist_src: bool,
pub build: String,
version_check: Option<bool>,
static_libstdcpp: Option<bool>,
targets: Option<String>,
+ link_jobs: Option<u32>,
+ clean_rebuild: Option<bool>,
}
#[derive(RustcDecodable, Default, Clone)]
optimize_tests: Option<bool>,
debuginfo_tests: Option<bool>,
codegen_tests: Option<bool>,
+ save_analysis: Option<bool>,
}
/// TOML representation of how each build target is configured.
set(&mut config.llvm_release_debuginfo, llvm.release_debuginfo);
set(&mut config.llvm_version_check, llvm.version_check);
set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
+ set(&mut config.llvm_clean_rebuild, llvm.clean_rebuild);
config.llvm_targets = llvm.targets.clone();
+ config.llvm_link_jobs = llvm.link_jobs;
}
if let Some(ref rust) = toml.rust {
set(&mut config.rust_optimize_tests, rust.optimize_tests);
set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests);
set(&mut config.codegen_tests, rust.codegen_tests);
+ set(&mut config.rust_save_analysis, rust.save_analysis);
set(&mut config.rust_rpath, rust.rpath);
set(&mut config.debug_jemalloc, rust.debug_jemalloc);
set(&mut config.use_jemalloc, rust.use_jemalloc);
("LLVM_VERSION_CHECK", self.llvm_version_check),
("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp),
("LLVM_LINK_SHARED", self.llvm_link_shared),
+ ("LLVM_CLEAN_REBUILD", self.llvm_clean_rebuild),
("OPTIMIZE", self.rust_optimize),
("DEBUG_ASSERTIONS", self.rust_debug_assertions),
("DEBUGINFO", self.rust_debuginfo),
("LOCAL_REBUILD", self.local_rebuild),
("NINJA", self.ninja),
("CODEGEN_TESTS", self.codegen_tests),
+ ("SAVE_ANALYSIS", self.rust_save_analysis),
("LOCKED_DEPS", self.locked_deps),
("VENDOR", self.vendor),
("FULL_BOOTSTRAP", self.full_bootstrap),
}
match key {
- "CFG_BUILD" => self.build = value.to_string(),
- "CFG_HOST" => {
- self.host = value.split(" ").map(|s| s.to_string())
- .collect();
- }
- "CFG_TARGET" => {
- self.target = value.split(" ").map(|s| s.to_string())
- .collect();
+ "CFG_BUILD" if value.len() > 0 => self.build = value.to_string(),
+ "CFG_HOST" if value.len() > 0 => {
+ self.host.extend(value.split(" ").map(|s| s.to_string()));
+
+ }
+ "CFG_TARGET" if value.len() > 0 => {
+ self.target.extend(value.split(" ").map(|s| s.to_string()));
}
"CFG_MUSL_ROOT" if value.len() > 0 => {
self.musl_root = Some(parse_configure_path(value));
# Rust team and file an issue if you need assistance in porting!
#targets = "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX"
+# Cap the number of parallel linker invocations when compiling LLVM.
+# This can be useful when building LLVM with debug info, which significantly
+# increases the size of binaries and consequently the memory required by
+# each linker process.
+# If absent or 0, linker invocations are treated like any other job and
+# controlled by rustbuild's -j parameter.
+#link-jobs = 0
+
+# Delete LLVM build directory on LLVM rebuild.
+# This option defaults to `false` for local development, but CI may want to
+# always perform clean full builds (possibly accelerated by (s)ccache).
+#clean-rebuild = false
+
# =============================================================================
# General build configuration options
# =============================================================================
# saying that the FileCheck executable is missing, you may want to disable this.
#codegen-tests = true
+# Flag indicating whether the API analysis data should be saved.
+#save-analysis = false
+
# =============================================================================
# Options for specific targets
#
/// Creates a tarball of save-analysis metadata, if available.
pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
+ if !build.config.rust_save_analysis {
+ return
+ }
+
println!("Dist analysis");
- if build.config.channel != "nightly" {
- println!("\tskipping - not on nightly channel");
- return;
- }
if compiler.host != build.config.build {
- println!("\tskipping - not a build host");
- return
- }
- if compiler.stage != 2 {
- println!("\tskipping - not stage2");
+ println!("\tskipping, not a build host");
return
}
let src_dirs = [
"man",
"src",
+ "cargo",
];
let filter_fn = move |path: &Path| {
println!("Dist cargo stage{} ({})", stage, target);
let compiler = Compiler::new(stage, &build.config.build);
- let src = build.src.join("src/tools/cargo");
+ let src = build.src.join("cargo");
let etc = src.join("src/etc");
- let release_num = &build.crates["cargo"].version;
- let name = format!("cargo-{}", build.package_vers(release_num));
- let version = build.cargo_info.version(build, release_num);
+ let release_num = build.cargo_release_num();
+ let name = format!("cargo-{}", build.package_vers(&release_num));
+ let version = build.cargo_info.version(build, &release_num);
let tmp = tmpdir(build);
let image = tmp.join("cargo-image");
println!("Dist extended stage{} ({})", stage, target);
let dist = distdir(build);
- let cargo_vers = &build.crates["cargo"].version;
+ let cargo_vers = build.cargo_release_num();
let rustc_installer = dist.join(format!("{}-{}.tar.gz",
pkgname(build, "rustc"),
target));
cmd.arg(distdir(build));
cmd.arg(today.trim());
cmd.arg(build.rust_package_vers());
- cmd.arg(build.cargo_info.version(build, &build.crates["cargo"].version));
+ cmd.arg(build.package_vers(&build.cargo_release_num()));
cmd.arg(addr);
t!(fs::create_dir_all(distdir(build)));
use std::fs::{self, File};
use std::io::prelude::*;
+use std::io;
+use std::path::Path;
use std::process::Command;
use {Build, Compiler, Mode};
-use util::cp_r;
+use util::{cp_r, symlink_dir};
use build_helper::up_to_date;
/// Invoke `rustbook` as compiled in `stage` for `target` for the doc book
.join(target).join("doc");
let rustdoc = build.rustdoc(&compiler);
- build.clear_if_dirty(&out_dir, &rustdoc);
+ // Here what we're doing is creating a *symlink* (directory junction on
+ // Windows) to the final output location. This is not done as an
+ // optimization but rather for correctness. We've got three trees of
+ // documentation, one for std, one for test, and one for rustc. It's then
+ // our job to merge them all together.
+ //
+ // Unfortunately rustbuild doesn't know nearly as well how to merge doc
+ // trees as rustdoc does itself, so instead of actually having three
+ // separate trees we just have rustdoc output to the same location across
+ // all of them.
+ //
+ // This way rustdoc generates output directly into the output, and rustdoc
+ // will also directly handle merging.
+ let my_out = build.crate_doc_out(target);
+ build.clear_if_dirty(&my_out, &rustdoc);
+ t!(symlink_dir_force(&my_out, &out_dir));
let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
cargo.arg("--manifest-path")
// We don't want to build docs for internal std dependencies unless
// in compiler-docs mode. When not in that mode, we whitelist the crates
// for which docs must be built.
- if build.config.compiler_docs {
- cargo.arg("-p").arg("std");
- } else {
+ if !build.config.compiler_docs {
cargo.arg("--no-deps");
for krate in &["alloc", "collections", "core", "std", "std_unicode"] {
cargo.arg("-p").arg(krate);
build.run(&mut cargo);
- cp_r(&out_dir, &out)
+ cp_r(&my_out, &out);
}
/// Compile all libtest documentation.
.join(target).join("doc");
let rustdoc = build.rustdoc(&compiler);
- build.clear_if_dirty(&out_dir, &rustdoc);
+ // See docs in std above for why we symlink
+ let my_out = build.crate_doc_out(target);
+ build.clear_if_dirty(&my_out, &rustdoc);
+ t!(symlink_dir_force(&my_out, &out_dir));
let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
cargo.arg("--manifest-path")
.arg(build.src.join("src/libtest/Cargo.toml"));
build.run(&mut cargo);
- cp_r(&out_dir, &out)
+ cp_r(&my_out, &out);
}
/// Generate all compiler documentation.
let out_dir = build.stage_out(&compiler, Mode::Librustc)
.join(target).join("doc");
let rustdoc = build.rustdoc(&compiler);
- if !up_to_date(&rustdoc, &out_dir.join("rustc/index.html")) && out_dir.exists() {
- t!(fs::remove_dir_all(&out_dir));
- }
+
+ // See docs in std above for why we symlink
+ let my_out = build.crate_doc_out(target);
+ build.clear_if_dirty(&my_out, &rustdoc);
+ t!(symlink_dir_force(&my_out, &out_dir));
+
let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
cargo.arg("--manifest-path")
.arg(build.src.join("src/rustc/Cargo.toml"))
.arg("--features").arg(build.rustc_features());
+
+ if build.config.compiler_docs {
+ // src/rustc/Cargo.toml contains bin crates called rustc and rustdoc
+ // which would otherwise overwrite the docs for the real rustc and
+ // rustdoc lib crates.
+ cargo.arg("-p").arg("rustc_driver")
+ .arg("-p").arg("rustdoc");
+ } else {
+ // Like with libstd above if compiler docs aren't enabled then we're not
+ // documenting internal dependencies, so we have a whitelist.
+ cargo.arg("--no-deps");
+ for krate in &["proc_macro"] {
+ cargo.arg("-p").arg(krate);
+ }
+ }
+
build.run(&mut cargo);
- cp_r(&out_dir, &out)
+ cp_r(&my_out, &out);
}
/// Generates the HTML rendered error-index by running the
build.run(&mut index);
}
+
+fn symlink_dir_force(src: &Path, dst: &Path) -> io::Result<()> {
+ if let Ok(m) = fs::symlink_metadata(dst) {
+ if m.file_type().is_dir() {
+ try!(fs::remove_dir_all(dst));
+ } else {
+ // handle directory junctions on windows by falling back to
+ // `remove_dir`.
+ try!(fs::remove_file(dst).or_else(|_| {
+ fs::remove_dir(dst)
+ }));
+ }
+ }
+
+ symlink_dir(src, dst)
+}
install_sh(&build, "docs", "rust-docs", stage, host, &prefix,
&docdir, &libdir, &mandir, &empty_dir);
}
+ if build.config.rust_save_analysis {
+ install_sh(&build, "analysis", "rust-analysis", stage, host, &prefix,
+ &docdir, &libdir, &mandir, &empty_dir);
+ }
install_sh(&build, "std", "rust-std", stage, host, &prefix,
&docdir, &libdir, &mandir, &empty_dir);
install_sh(&build, "rustc", "rustc", stage, host, &prefix,
extern crate rustc_serialize;
extern crate toml;
-use std::collections::HashMap;
use std::cmp;
+use std::collections::HashMap;
use std::env;
use std::ffi::OsString;
use std::fs::{self, File};
+use std::io::Read;
use std::path::{Component, PathBuf, Path};
use std::process::Command;
None => false,
};
let rust_info = channel::GitInfo::new(&src);
- let cargo_info = channel::GitInfo::new(&src.join("src/tools/cargo"));
+ let cargo_info = channel::GitInfo::new(&src.join("cargo"));
Build {
flags: flags,
.env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
}
- if self.config.channel == "nightly" && compiler.is_final_stage(self) {
+ if self.config.rust_save_analysis && compiler.is_final_stage(self) {
cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string());
}
self.out.join(target).join("doc")
}
+ /// Output directory for all crate documentation for a target (temporary)
+ ///
+ /// The artifacts here are then copied into `doc_out` above.
+ fn crate_doc_out(&self, target: &str) -> PathBuf {
+ self.out.join(target).join("crate-docs")
+ }
+
/// Returns true if no custom `llvm-config` is set for the specified target.
///
/// If no custom `llvm-config` was specified then Rust's llvm will be used.
.filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
.collect::<Vec<_>>();
- // If we're compiling on OSX then we add a few unconditional flags
+ // If we're compiling on macOS then we add a few unconditional flags
// indicating that we want libc++ (more filled out than libstdc++) and
// we want to compile for 10.7. This way we can ensure that
// LLVM/jemalloc/etc are all properly compiled.
self.rust_info.version(self, channel::CFG_RELEASE_NUM)
}
+ /// Returns the `a.b.c` version that Cargo is at.
+ fn cargo_release_num(&self) -> String {
+ let mut toml = String::new();
+ t!(t!(File::open(self.src.join("cargo/Cargo.toml"))).read_to_string(&mut toml));
+ for line in toml.lines() {
+ let prefix = "version = \"";
+ let suffix = "\"";
+ if line.starts_with(prefix) && line.ends_with(suffix) {
+ return line[prefix.len()..line.len() - suffix.len()].to_string()
+ }
+ }
+
+ panic!("failed to find version in cargo's Cargo.toml")
+ }
+
/// Returns whether unstable features should be enabled for the compiler
/// we're building.
fn unstable_features(&self) -> bool {
}
}
- let clean_trigger = build.src.join("src/rustllvm/llvm-auto-clean-trigger");
- let mut clean_trigger_contents = String::new();
- t!(t!(File::open(&clean_trigger)).read_to_string(&mut clean_trigger_contents));
+ let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger");
+ let mut rebuild_trigger_contents = String::new();
+ t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents));
let out_dir = build.llvm_out(target);
let done_stamp = out_dir.join("llvm-finished-building");
let mut done_contents = String::new();
t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents));
- // LLVM was already built previously.
- // We don't track changes in LLVM sources, so we need to choose between reusing
- // what was built previously, or cleaning the directory and doing a fresh build.
- // The choice depends on contents of the clean-trigger file.
- // If the contents are the same as during the previous build, then no action is required.
- // If the contents differ from the previous build, then cleaning is triggered.
- if done_contents == clean_trigger_contents {
+ // If LLVM was already built previously and contents of the rebuild-trigger file
+ // didn't change from the previous build, then no action is required.
+ if done_contents == rebuild_trigger_contents {
return
- } else {
- t!(fs::remove_dir_all(&out_dir));
}
}
+ if build.config.llvm_clean_rebuild {
+ drop(fs::remove_dir_all(&out_dir));
+ }
println!("Building LLVM for {}", target);
let _time = util::timeit();
cfg.define("LLVM_BUILD_32_BITS", "ON");
}
+ if let Some(num_linkers) = build.config.llvm_link_jobs {
+ if num_linkers > 0 {
+ cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
+ }
+ }
+
// http://llvm.org/docs/HowToCrossCompileLLVM.html
if target != build.config.build {
// FIXME: if the llvm root for the build triple is overridden then we
// tools and libs on all platforms.
cfg.build();
- t!(t!(File::create(&done_stamp)).write_all(clean_trigger_contents.as_bytes()));
+ t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes()));
}
fn check_llvm_version(build: &Build, llvm_config: &Path) {
}
for target in build.config.target.iter() {
- // Can't compile for iOS unless we're on OSX
+ // Can't compile for iOS unless we're on macOS
if target.contains("apple-ios") &&
!build.config.build.contains("apple-darwin") {
- panic!("the iOS target is only supported on OSX");
+ panic!("the iOS target is only supported on macOS");
}
// Make sure musl-root is valid if specified
rules.build("tool-qemu-test-client", "src/tools/qemu-test-client")
.dep(|s| s.name("libstd"))
.run(move |s| compile::tool(build, s.stage, s.target, "qemu-test-client"));
- rules.build("tool-cargo", "src/tools/cargo")
+ rules.build("tool-cargo", "cargo")
.dep(|s| s.name("libstd"))
.dep(|s| s.stage(0).host(s.target).name("openssl"))
.dep(move |s| {
for (krate, path, default) in krates("test") {
rules.doc(&krate.doc_step, path)
.dep(|s| s.name("libtest-link"))
+ // Needed so rustdoc generates relative links to std.
+ .dep(|s| s.name("doc-crate-std"))
.default(default && build.config.compiler_docs)
.run(move |s| doc::test(build, s.stage, s.target));
}
for (krate, path, default) in krates("rustc-main") {
rules.doc(&krate.doc_step, path)
.dep(|s| s.name("librustc-link"))
+ // Needed so rustdoc generates relative links to std.
+ .dep(|s| s.name("doc-crate-std"))
.host(true)
- .default(default && build.config.compiler_docs)
+ .default(default && build.config.docs)
.run(move |s| doc::rustc(build, s.stage, s.target));
}
name: "std".to_string(),
deps: Vec::new(),
path: cwd.join("src/std"),
- doc_step: "doc-std".to_string(),
+ doc_step: "doc-crate-std".to_string(),
build_step: "build-crate-std".to_string(),
- test_step: "test-std".to_string(),
- bench_step: "bench-std".to_string(),
+ test_step: "test-crate-std".to_string(),
+ bench_step: "bench-crate-std".to_string(),
version: String::new(),
});
build.crates.insert("test".to_string(), ::Crate {
name: "test".to_string(),
deps: Vec::new(),
path: cwd.join("src/test"),
- doc_step: "doc-test".to_string(),
+ doc_step: "doc-crate-test".to_string(),
build_step: "build-crate-test".to_string(),
- test_step: "test-test".to_string(),
- bench_step: "bench-test".to_string(),
+ test_step: "test-crate-test".to_string(),
+ bench_step: "bench-crate-test".to_string(),
version: String::new(),
});
build.crates.insert("rustc-main".to_string(), ::Crate {
deps: Vec::new(),
version: String::new(),
path: cwd.join("src/rustc-main"),
- doc_step: "doc-rustc-main".to_string(),
+ doc_step: "doc-crate-rustc-main".to_string(),
build_step: "build-crate-rustc-main".to_string(),
- test_step: "test-rustc-main".to_string(),
- bench_step: "bench-rustc-main".to_string(),
+ test_step: "test-crate-rustc-main".to_string(),
+ bench_step: "bench-crate-rustc-main".to_string(),
});
return build
}
use std::env;
use std::ffi::OsString;
use std::fs;
+use std::io;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::time::Instant;
+use filetime::{self, FileTime};
+
/// Returns the `name` as the filename of a static library for `target`.
pub fn staticlib(name: &str, target: &str) -> String {
if target.contains("windows") {
// Attempt to "easy copy" by creating a hard link (symlinks don't work on
// windows), but if that fails just fall back to a slow `copy` operation.
- let res = fs::hard_link(src, dst);
- let res = res.or_else(|_| fs::copy(src, dst).map(|_| ()));
+ // let res = fs::hard_link(src, dst);
+ let res = fs::copy(src, dst);
if let Err(e) = res {
panic!("failed to copy `{}` to `{}`: {}", src.display(),
dst.display(), e)
}
+ let metadata = t!(src.metadata());
+ t!(fs::set_permissions(dst, metadata.permissions()));
+ let atime = FileTime::from_last_access_time(&metadata);
+ let mtime = FileTime::from_last_modification_time(&metadata);
+ t!(filetime::set_file_times(dst, atime, mtime));
+
}
/// Copies the `src` directory recursively to `dst`. Both are assumed to exist
time.subsec_nanos() / 1_000_000);
}
}
+
+/// Symlinks two directories, using junctions on Windows and normal symlinks on
+/// Unix.
+pub fn symlink_dir(src: &Path, dest: &Path) -> io::Result<()> {
+ let _ = fs::remove_dir(dest);
+ return symlink_dir_inner(src, dest);
+
+ #[cfg(not(windows))]
+ fn symlink_dir_inner(src: &Path, dest: &Path) -> io::Result<()> {
+ use std::os::unix::fs;
+ fs::symlink(src, dest)
+ }
+
+ // Creating a directory junction on windows involves dealing with reparse
+ // points and the DeviceIoControl function, and this code is a skeleton of
+ // what can be found here:
+ //
+ // http://www.flexhex.com/docs/articles/hard-links.phtml
+ //
+ // Copied from std
+ #[cfg(windows)]
+ #[allow(bad_style)]
+ fn symlink_dir_inner(target: &Path, junction: &Path) -> io::Result<()> {
+ use std::ptr;
+ use std::ffi::OsStr;
+ use std::os::windows::ffi::OsStrExt;
+
+ const MAXIMUM_REPARSE_DATA_BUFFER_SIZE: usize = 16 * 1024;
+ const GENERIC_WRITE: DWORD = 0x40000000;
+ const OPEN_EXISTING: DWORD = 3;
+ const FILE_FLAG_OPEN_REPARSE_POINT: DWORD = 0x00200000;
+ const FILE_FLAG_BACKUP_SEMANTICS: DWORD = 0x02000000;
+ const FSCTL_SET_REPARSE_POINT: DWORD = 0x900a4;
+ const IO_REPARSE_TAG_MOUNT_POINT: DWORD = 0xa0000003;
+ const FILE_SHARE_DELETE: DWORD = 0x4;
+ const FILE_SHARE_READ: DWORD = 0x1;
+ const FILE_SHARE_WRITE: DWORD = 0x2;
+
+ type BOOL = i32;
+ type DWORD = u32;
+ type HANDLE = *mut u8;
+ type LPCWSTR = *const u16;
+ type LPDWORD = *mut DWORD;
+ type LPOVERLAPPED = *mut u8;
+ type LPSECURITY_ATTRIBUTES = *mut u8;
+ type LPVOID = *mut u8;
+ type WCHAR = u16;
+ type WORD = u16;
+
+ #[repr(C)]
+ struct REPARSE_MOUNTPOINT_DATA_BUFFER {
+ ReparseTag: DWORD,
+ ReparseDataLength: DWORD,
+ Reserved: WORD,
+ ReparseTargetLength: WORD,
+ ReparseTargetMaximumLength: WORD,
+ Reserved1: WORD,
+ ReparseTarget: WCHAR,
+ }
+
+ extern "system" {
+ fn CreateFileW(lpFileName: LPCWSTR,
+ dwDesiredAccess: DWORD,
+ dwShareMode: DWORD,
+ lpSecurityAttributes: LPSECURITY_ATTRIBUTES,
+ dwCreationDisposition: DWORD,
+ dwFlagsAndAttributes: DWORD,
+ hTemplateFile: HANDLE)
+ -> HANDLE;
+ fn DeviceIoControl(hDevice: HANDLE,
+ dwIoControlCode: DWORD,
+ lpInBuffer: LPVOID,
+ nInBufferSize: DWORD,
+ lpOutBuffer: LPVOID,
+ nOutBufferSize: DWORD,
+ lpBytesReturned: LPDWORD,
+ lpOverlapped: LPOVERLAPPED) -> BOOL;
+ }
+
+ fn to_u16s<S: AsRef<OsStr>>(s: S) -> io::Result<Vec<u16>> {
+ Ok(s.as_ref().encode_wide().chain(Some(0)).collect())
+ }
+
+ // We're using low-level APIs to create the junction, and these are more
+ // picky about paths. For example, forward slashes cannot be used as a
+ // path separator, so we should try to canonicalize the path first.
+ let target = try!(fs::canonicalize(target));
+
+ try!(fs::create_dir(junction));
+
+ let path = try!(to_u16s(junction));
+
+ unsafe {
+ let h = CreateFileW(path.as_ptr(),
+ GENERIC_WRITE,
+ FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE,
+ 0 as *mut _,
+ OPEN_EXISTING,
+ FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
+ ptr::null_mut());
+
+ let mut data = [0u8; MAXIMUM_REPARSE_DATA_BUFFER_SIZE];
+ let mut db = data.as_mut_ptr()
+ as *mut REPARSE_MOUNTPOINT_DATA_BUFFER;
+ let buf = &mut (*db).ReparseTarget as *mut _;
+ let mut i = 0;
+ // FIXME: this conversion is very hacky
+ let v = br"\??\";
+ let v = v.iter().map(|x| *x as u16);
+ for c in v.chain(target.as_os_str().encode_wide().skip(4)) {
+ *buf.offset(i) = c;
+ i += 1;
+ }
+ *buf.offset(i) = 0;
+ i += 1;
+ (*db).ReparseTag = IO_REPARSE_TAG_MOUNT_POINT;
+ (*db).ReparseTargetMaximumLength = (i * 2) as WORD;
+ (*db).ReparseTargetLength = ((i - 1) * 2) as WORD;
+ (*db).ReparseDataLength =
+ (*db).ReparseTargetLength as DWORD + 12;
+
+ let mut ret = 0;
+ let res = DeviceIoControl(h as *mut _,
+ FSCTL_SET_REPARSE_POINT,
+ data.as_ptr() as *mut _,
+ (*db).ReparseDataLength + 8,
+ ptr::null_mut(), 0,
+ &mut ret,
+ ptr::null_mut());
+
+ if res == 0 {
+ Err(io::Error::last_os_error())
+ } else {
+ Ok(())
+ }
+ }
+ }
+}
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ g++ \
+ make \
+ ninja-build \
+ file \
+ curl \
+ ca-certificates \
+ python2.7-dev \
+ git \
+ sudo \
+ bzip2 \
+ xz-utils \
+ swig \
+ libedit-dev \
+ libncurses5-dev
+
+RUN curl -L https://cmake.org/files/v3.8/cmake-3.8.0-rc1-Linux-x86_64.tar.gz | \
+ tar xzf - -C /usr/local --strip-components=1
+
+WORKDIR /tmp
+COPY shared.sh build-toolchain.sh /tmp/
+RUN /tmp/build-toolchain.sh
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-25-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENV \
+ AR_x86_64_unknown_fuchsia=x86_64-unknown-fuchsia-ar \
+ CC_x86_64_unknown_fuchsia=x86_64-unknown-fuchsia-clang \
+ CXX_x86_64_unknown_fuchsia=x86_64-unknown-fuchsia-clang++ \
+ AR_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-ar \
+ CC_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-clang \
+ CXX_aarch64_unknown_fuchsia=aarch64-unknown-fuchsia-clang++
+
+ENV TARGETS=x86_64-unknown-fuchsia
+ENV TARGETS=$TARGETS,aarch64-unknown-fuchsia
+
+ENV RUST_CONFIGURE_ARGS --target=$TARGETS
+ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
--- /dev/null
+#!/bin/bash
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+source shared.sh
+
+# Download sources
+SRCS=(
+ "https://fuchsia.googlesource.com/magenta magenta ac69119"
+ "https://fuchsia.googlesource.com/third_party/llvm llvm 5463083"
+ "https://fuchsia.googlesource.com/third_party/clang llvm/tools/clang 4ff7b4b"
+ "https://fuchsia.googlesource.com/third_party/lld llvm/tools/lld fd465a3"
+ "https://fuchsia.googlesource.com/third_party/lldb llvm/tools/lldb 6bb11f8"
+ "https://fuchsia.googlesource.com/third_party/compiler-rt llvm/runtimes/compiler-rt 52d4ecc"
+ "https://fuchsia.googlesource.com/third_party/libcxx llvm/runtimes/libcxx e891cc8"
+ "https://fuchsia.googlesource.com/third_party/libcxxabi llvm/runtimes/libcxxabi f0f0257"
+ "https://fuchsia.googlesource.com/third_party/libunwind llvm/runtimes/libunwind 50bddc1"
+)
+
+fetch() {
+ mkdir -p $2
+ pushd $2 > /dev/null
+ curl -sL $1/+archive/$3.tar.gz | tar xzf -
+ popd > /dev/null
+}
+
+for i in "${SRCS[@]}"; do
+ fetch $i
+done
+
+# Build toolchain
+cd llvm
+mkdir build
+cd build
+hide_output cmake -GNinja \
+ -DFUCHSIA_SYSROOT=${PWD}/../../magenta/third_party/ulib/musl \
+ -DLLVM_ENABLE_LTO=OFF \
+ -DCLANG_BOOTSTRAP_PASSTHROUGH=LLVM_ENABLE_LTO \
+ -C ../tools/clang/cmake/caches/Fuchsia.cmake \
+ ..
+hide_output ninja stage2-distribution
+hide_output ninja stage2-install-distribution
+cd ../..
+
+# Build sysroot
+rm -rf llvm/runtimes/compiler-rt
+./magenta/scripts/download-toolchain
+
+build_sysroot() {
+ local arch="$1"
+
+ case "${arch}" in
+ x86_64) tgt="magenta-pc-x86-64" ;;
+ aarch64) tgt="magenta-qemu-arm64" ;;
+ esac
+
+ hide_output make -C magenta -j$(getconf _NPROCESSORS_ONLN) $tgt
+ dst=/usr/local/${arch}-unknown-fuchsia
+ mkdir -p $dst
+ cp -r magenta/build-${tgt}/sysroot/include $dst/
+ cp -r magenta/build-${tgt}/sysroot/lib $dst/
+
+ cd llvm
+ mkdir build-runtimes-${arch}
+ cd build-runtimes-${arch}
+ hide_output cmake -GNinja \
+ -DCMAKE_C_COMPILER=clang \
+ -DCMAKE_CXX_COMPILER=clang++ \
+ -DCMAKE_AR=/usr/local/bin/llvm-ar \
+ -DCMAKE_RANLIB=/usr/local/bin/llvm-ranlib \
+ -DCMAKE_INSTALL_PREFIX= \
+ -DLLVM_MAIN_SRC_DIR=${PWD}/.. \
+ -DLLVM_BINARY_DIR=${PWD}/../build \
+ -DLLVM_ENABLE_WERROR=OFF \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DLLVM_INCLUDE_TESTS=ON \
+ -DCMAKE_SYSTEM_NAME=Fuchsia \
+ -DCMAKE_C_COMPILER_TARGET=${arch}-fuchsia \
+ -DCMAKE_CXX_COMPILER_TARGET=${arch}-fuchsia \
+ -DUNIX=1 \
+ -DLIBCXX_HAS_MUSL_LIBC=ON \
+ -DLIBCXXABI_USE_LLVM_UNWINDER=ON \
+ -DCMAKE_SYSROOT=${dst} \
+ -DCMAKE_C_COMPILER_FORCED=TRUE \
+ -DCMAKE_CXX_COMPILER_FORCED=TRUE \
+ -DLLVM_ENABLE_LIBCXX=ON \
+ -DCMAKE_EXE_LINKER_FLAGS="-nodefaultlibs -lc" \
+ -DCMAKE_SHARED_LINKER_FLAGS="$(clang --target=${arch}-fuchsia -print-libgcc-file-name)" \
+ ../runtimes
+ hide_output env DESTDIR="${dst}" ninja install
+ cd ../..
+}
+
+build_sysroot "x86_64"
+build_sysroot "aarch64"
+
+rm -rf magenta llvm
+
+for arch in x86_64 aarch64; do
+ for tool in clang clang++; do
+ cat >/usr/local/bin/${arch}-unknown-fuchsia-${tool} <<EOF
+#!/bin/sh
+${tool} --target=${arch}-unknown-fuchsia --sysroot=/usr/local/${arch}-unknown-fuchsia "\$@"
+EOF
+ chmod +x /usr/local/bin/${arch}-unknown-fuchsia-${tool}
+ done
+ ln -s /usr/local/bin/llvm-ar /usr/local/bin/${arch}-unknown-fuchsia-ar
+done
--- /dev/null
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+hide_output() {
+ set +x
+ on_err="
+echo ERROR: An error was encountered with the build.
+cat /tmp/build.log
+exit 1
+"
+ trap "$on_err" ERR
+ bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
+ PING_LOOP_PID=$!
+ "$@" &> /tmp/build.log
+ trap - ERR
+ kill $PING_LOOP_PID
+ set -x
+}
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ g++-multilib \
+ make \
+ file \
+ curl \
+ ca-certificates \
+ python2.7 \
+ git \
+ cmake \
+ xz-utils \
+ sudo \
+ gdb \
+ patch \
+ libssl-dev \
+ pkg-config
+
+WORKDIR /build/
+COPY musl-libunwind-patch.patch build-musl.sh /build/
+RUN sh /build/build-musl.sh && rm -rf /build
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-25-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENV RUST_CONFIGURE_ARGS \
+ --target=i686-unknown-linux-musl,i586-unknown-linux-gnu \
+ --musl-root-i686=/musl-i686
+
+# Newer binutils broke things on some vms/distros (i.e., linking against
+# unknown relocs disabled by the following flag), so we need to go out of our
+# way to produce "super compatible" binaries.
+#
+# See: https://github.com/rust-lang/rust/issues/34978
+ENV CFLAGS_i686_unknown_linux_musl=-Wa,-mrelax-relocations=no
+
+ENV SCRIPT \
+ python2.7 ../x.py test \
+ --target i686-unknown-linux-musl \
+ --target i586-unknown-linux-gnu \
+ && \
+ python2.7 ../x.py dist \
+ --target i686-unknown-linux-musl \
+ --target i586-unknown-linux-gnu
--- /dev/null
+#!/bin/sh
+# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+# We need to mitigate rust-lang/rust#34978 when compiling musl itself as well
+export CFLAGS="-fPIC -Wa,-mrelax-relocations=no"
+export CXXFLAGS="-Wa,-mrelax-relocations=no"
+
+MUSL=musl-1.1.14
+curl https://www.musl-libc.org/releases/$MUSL.tar.gz | tar xzf -
+cd $MUSL
+CFLAGS="$CFLAGS -m32" ./configure --prefix=/musl-i686 --disable-shared --target=i686
+make -j10
+make install
+cd ..
+
+# To build MUSL we're going to need a libunwind lying around, so acquire that
+# here and build it.
+curl -L https://github.com/llvm-mirror/llvm/archive/release_37.tar.gz | tar xzf -
+curl -L https://github.com/llvm-mirror/libunwind/archive/release_37.tar.gz | tar xzf -
+
+# Whoa what's this mysterious patch we're applying to libunwind! Why are we
+# swapping the values of ESP/EBP in libunwind?!
+#
+# Discovered in #35599 it turns out that the vanilla build of libunwind is not
+# suitable for unwinding 32-bit musl. After some investigation it ended up
+# looking like the register values for ESP/EBP were indeed incorrect (swapped)
+# in the source. Similar commits in libunwind (r280099 and r282589) have noticed
+# this for other platforms, and we just need to realize it for musl linux as
+# well.
+#
+# More technical info can be found at #35599
+cd libunwind-release_37
+patch -Np1 < /build/musl-libunwind-patch.patch
+cd ..
+
+mkdir libunwind-build
+cd libunwind-build
+CFLAGS="$CFLAGS -m32" CXXFLAGS="$CXXFLAGS -m32" cmake ../libunwind-release_37 \
+ -DLLVM_PATH=/build/llvm-release_37 \
+ -DLIBUNWIND_ENABLE_SHARED=0
+make -j10
+cp lib/libunwind.a /musl-i686/lib
--- /dev/null
+diff --git a/include/libunwind.h b/include/libunwind.h
+index c5b9633..1360eb2 100644
+--- a/include/libunwind.h
++++ b/include/libunwind.h
+@@ -151,8 +151,8 @@ enum {
+ UNW_X86_ECX = 1,
+ UNW_X86_EDX = 2,
+ UNW_X86_EBX = 3,
+- UNW_X86_EBP = 4,
+- UNW_X86_ESP = 5,
++ UNW_X86_ESP = 4,
++ UNW_X86_EBP = 5,
+ UNW_X86_ESI = 6,
+ UNW_X86_EDI = 7
+ };
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ g++ \
+ make \
+ file \
+ curl \
+ ca-certificates \
+ python2.7 \
+ git \
+ cmake \
+ xz-utils \
+ sudo \
+ gdb \
+ patch \
+ libssl-dev \
+ pkg-config
+
+WORKDIR /build/
+COPY build-musl.sh /build/
+RUN sh /build/build-musl.sh && rm -rf /build
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-25-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENV RUST_CONFIGURE_ARGS \
+ --target=x86_64-unknown-linux-musl \
+ --musl-root-x86_64=/musl-x86_64
+
+# Newer binutils broke things on some vms/distros (i.e., linking against
+# unknown relocs disabled by the following flag), so we need to go out of our
+# way to produce "super compatible" binaries.
+#
+# See: https://github.com/rust-lang/rust/issues/34978
+ENV CFLAGS_x86_64_unknown_linux_musl=-Wa,-mrelax-relocations=no
+
+ENV SCRIPT \
+ python2.7 ../x.py test --target x86_64-unknown-linux-musl && \
+ python2.7 ../x.py dist --target x86_64-unknown-linux-musl
--- /dev/null
+#!/bin/sh
+# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+# We need to mitigate rust-lang/rust#34978 when compiling musl itself as well
+export CFLAGS="-fPIC -Wa,-mrelax-relocations=no"
+export CXXFLAGS="-Wa,-mrelax-relocations=no"
+
+MUSL=musl-1.1.14
+curl https://www.musl-libc.org/releases/$MUSL.tar.gz | tar xzf -
+cd $MUSL
+./configure --prefix=/musl-x86_64 --disable-shared
+make -j10
+make install
+
+cd ..
+rm -rf $MUSL
+
+# To build MUSL we're going to need a libunwind lying around, so acquire that
+# here and build it.
+curl -L https://github.com/llvm-mirror/llvm/archive/release_37.tar.gz | tar xzf -
+curl -L https://github.com/llvm-mirror/libunwind/archive/release_37.tar.gz | tar xzf -
+
+mkdir libunwind-build
+cd libunwind-build
+cmake ../libunwind-release_37 -DLLVM_PATH=/build/llvm-release_37 \
+ -DLIBUNWIND_ENABLE_SHARED=0
+make -j10
+cp lib/libunwind.a /musl-x86_64/lib
+++ /dev/null
-FROM ubuntu:16.04
-
-RUN apt-get update && apt-get install -y --no-install-recommends \
- g++-multilib \
- make \
- file \
- curl \
- ca-certificates \
- python2.7 \
- git \
- cmake \
- xz-utils \
- sudo \
- gdb \
- patch \
- libssl-dev \
- pkg-config
-
-WORKDIR /build/
-COPY musl-libunwind-patch.patch build-musl.sh /build/
-RUN sh /build/build-musl.sh && rm -rf /build
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
- dpkg -i dumb-init_*.deb && \
- rm dumb-init_*.deb
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
-
-RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-25-sccache-x86_64-unknown-linux-musl && \
- chmod +x /usr/local/bin/sccache
-
-ENV RUST_CONFIGURE_ARGS \
- --target=x86_64-unknown-linux-musl,i686-unknown-linux-musl,i586-unknown-linux-gnu \
- --musl-root-x86_64=/musl-x86_64 \
- --musl-root-i686=/musl-i686
-
-# Newer binutils broke things on some vms/distros (i.e., linking against
-# unknown relocs disabled by the following flag), so we need to go out of our
-# way to produce "super compatible" binaries.
-#
-# See: https://github.com/rust-lang/rust/issues/34978
-ENV CFLAGS_i686_unknown_linux_musl=-Wa,-mrelax-relocations=no \
- CFLAGS_x86_64_unknown_linux_musl=-Wa,-mrelax-relocations=no
-
-ENV SCRIPT \
- python2.7 ../x.py test \
- --target x86_64-unknown-linux-musl \
- --target i686-unknown-linux-musl \
- --target i586-unknown-linux-gnu \
- && \
- python2.7 ../x.py dist \
- --target x86_64-unknown-linux-musl \
- --target i686-unknown-linux-musl \
- --target i586-unknown-linux-gnu
+++ /dev/null
-#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-# We need to mitigate rust-lang/rust#34978 when compiling musl itself as well
-export CFLAGS="-fPIC -Wa,-mrelax-relocations=no"
-export CXXFLAGS="-Wa,-mrelax-relocations=no"
-
-MUSL=musl-1.1.14
-curl https://www.musl-libc.org/releases/$MUSL.tar.gz | tar xzf -
-cd $MUSL
-./configure --prefix=/musl-x86_64 --disable-shared
-make -j10
-make install
-make clean
-# for i686
-CFLAGS="$CFLAGS -m32" ./configure --prefix=/musl-i686 --disable-shared --target=i686
-make -j10
-make install
-cd ..
-
-# To build MUSL we're going to need a libunwind lying around, so acquire that
-# here and build it.
-curl -L https://github.com/llvm-mirror/llvm/archive/release_37.tar.gz | tar xzf -
-curl -L https://github.com/llvm-mirror/libunwind/archive/release_37.tar.gz | tar xzf -
-
-# Whoa what's this mysterious patch we're applying to libunwind! Why are we
-# swapping the values of ESP/EBP in libunwind?!
-#
-# Discovered in #35599 it turns out that the vanilla build of libunwind is not
-# suitable for unwinding 32-bit musl. After some investigation it ended up
-# looking like the register values for ESP/EBP were indeed incorrect (swapped)
-# in the source. Similar commits in libunwind (r280099 and r282589) have noticed
-# this for other platforms, and we just need to realize it for musl linux as
-# well.
-#
-# More technical info can be found at #35599
-cd libunwind-release_37
-patch -Np1 < /build/musl-libunwind-patch.patch
-cd ..
-
-mkdir libunwind-build
-cd libunwind-build
-cmake ../libunwind-release_37 -DLLVM_PATH=/build/llvm-release_37 \
- -DLIBUNWIND_ENABLE_SHARED=0
-make -j10
-cp lib/libunwind.a /musl-x86_64/lib
-
-# (Note: the next cmake call doesn't fully override the previous cached one, so remove the cached
-# configuration manually. IOW, if don't do this or call make clean we'll end up building libunwind
-# for x86_64 again)
-rm -rf *
-# for i686
-CFLAGS="$CFLAGS -m32" CXXFLAGS="$CXXFLAGS -m32" cmake ../libunwind-release_37 \
- -DLLVM_PATH=/build/llvm-release_37 \
- -DLIBUNWIND_ENABLE_SHARED=0
-make -j10
-cp lib/libunwind.a /musl-i686/lib
+++ /dev/null
-diff --git a/include/libunwind.h b/include/libunwind.h
-index c5b9633..1360eb2 100644
---- a/include/libunwind.h
-+++ b/include/libunwind.h
-@@ -151,8 +151,8 @@ enum {
- UNW_X86_ECX = 1,
- UNW_X86_EDX = 2,
- UNW_X86_EBX = 3,
-- UNW_X86_EBP = 4,
-- UNW_X86_ESP = 5,
-+ UNW_X86_ESP = 4,
-+ UNW_X86_EBP = 5,
- UNW_X86_ESI = 6,
- UNW_X86_EDI = 7
- };
objdir=$root_dir/obj
mkdir -p $HOME/.cargo
-mkdir -p $objdir
+mkdir -p $objdir/tmp
args=
if [ "$SCCACHE_BUCKET" != "" ]; then
args="$args --env SCCACHE_BUCKET=$SCCACHE_BUCKET"
args="$args --env AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID"
args="$args --env AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY"
+ args="$args --env SCCACHE_ERROR_LOG=/tmp/sccache/sccache.log"
+ args="$args --env SCCACHE_LOG_LEVEL=debug"
+ args="$args --env RUST_LOG=sccache=debug"
+ args="$args --volume $objdir/tmp:/tmp/sccache"
else
mkdir -p $HOME/.cache/sccache
args="$args --env SCCACHE_DIR=/sccache --volume $HOME/.cache/sccache:/sccache"
--env DEPLOY_ALT=$DEPLOY_ALT \
--env LOCAL_USER_ID=`id -u` \
--volume "$HOME/.cargo:/cargo" \
+ --privileged \
--rm \
rust-ci \
/checkout/src/ci/run.sh
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-manage-submodules"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-locked-deps"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-cargo-openssl-static"
+RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-llvm-clean-rebuild"
if [ "$DIST_SRC" = "" ]; then
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-dist-src"
if [ "$DEPLOY$DEPLOY_ALT" != "" ]; then
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --release-channel=nightly"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-llvm-static-stdcpp"
+ RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-save-analysis"
if [ "$NO_LLVM_ASSERTIONS" = "1" ]; then
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-llvm-assertions"
# fn foo() {}
```
-Let's discuss the details of these code blocks.
-
#### Code block annotations
To write some Rust code in a comment, use the triple graves:
# fn foo() {}
```
-If you want something that's not Rust code, you can add an annotation:
-
-```rust
-/// ```c
-/// printf("Hello, world\n");
-/// ```
-# fn foo() {}
-```
-
-This will highlight according to whatever language you're showing off.
-If you're only showing plain text, choose `text`.
-
-It's important to choose the correct annotation here, because `rustdoc` uses it
-in an interesting way: It can be used to actually test your examples in a
-library crate, so that they don't get out of date. If you have some C code but
-`rustdoc` thinks it's Rust because you left off the annotation, `rustdoc` will
-complain when trying to generate the documentation.
+This will add code highlighting. If you are only showing plain text, put `text`
+instead of `rust` after the triple graves (see below).
## Documentation as tests
It’s important to be mindful of `panic!`s when working with FFI. A `panic!`
across an FFI boundary is undefined behavior. If you’re writing code that may
-panic, you should run it in a closure with [`catch_unwind()`]:
+panic, you should run it in a closure with [`catch_unwind`]:
```rust
use std::panic::catch_unwind;
fn main() {}
```
-Please note that [`catch_unwind()`] will only catch unwinding panics, not
-those who abort the process. See the documentation of [`catch_unwind()`]
+Please note that [`catch_unwind`] will only catch unwinding panics, not
+those who abort the process. See the documentation of [`catch_unwind`]
for more information.
-[`catch_unwind()`]: ../std/panic/fn.catch_unwind.html
+[`catch_unwind`]: ../std/panic/fn.catch_unwind.html
# Representing opaque structs
.read_line(&mut guess)
```
-Here, we call the [`read_line()`][read_line] method on our handle.
+Here, we call the [`read_line`][read_line] method on our handle.
[Methods][method] are like associated functions, but are only available on a
particular instance of a type, rather than the type itself. We’re also passing
one argument to `read_line()`: `&mut guess`.
test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured
```
-We also get a non-zero status code. We can use `$?` on OS X and Linux:
+We also get a non-zero status code. We can use `$?` on macOS and Linux:
```bash
$ echo $?
Learning] project collects documentation from the community, and [Docs.rs]
builds documentation for individual Rust packages.
-## API Documentation
+# API Documentation
Rust provides a standard library with a number of features; [we host its
documentation here][api].
-## Extended Error Documentation
+# Extended Error Documentation
Many of Rust's errors come with error codes, and you can request extended
diagnostics from the compiler on those errors. We also [have the text of those
extended errors on the web][err], if you prefer to read them that way.
-## The Rust Bookshelf
+# The Rust Bookshelf
Rust provides a number of book-length sets of documentation, collectively
nicknamed 'The Rust Bookshelf.'
------------------------
+The `concat_idents` feature adds a macro for concatenating multiple identifiers
+into one identifier.
+## Examples
+```rust
+#![feature(concat_idents)]
+
+fn main() {
+ fn foobar() -> u32 { 23 }
+ let f = concat_idents!(foo, bar);
+ assert_eq!(f(), 23);
+}
+```
\ No newline at end of file
------------------------
+The `conservative_impl_trait` feature allows a conservative form of abstract
+return types.
+Abstract return types allow a function to hide a concrete return type behind a
+trait interface similar to trait objects, while still generating the same
+statically dispatched code as with concrete types.
+## Examples
+
+```rust
+#![feature(conservative_impl_trait)]
+
+fn even_iter() -> impl Iterator<Item=u32> {
+ (0..).map(|n| n * 2)
+}
+
+fn main() {
+ let first_four_even_numbers = even_iter().take(4).collect::<Vec<_>>();
+ assert_eq!(first_four_even_numbers, vec![0, 2, 4, 6]);
+}
+```
+
+## Background
+
+In today's Rust, you can write function signatures like:
+
+````rust,ignore
+fn consume_iter_static<I: Iterator<u8>>(iter: I) { }
+
+fn consume_iter_dynamic(iter: Box<Iterator<u8>>) { }
+````
+
+In both cases, the function does not depend on the exact type of the argument.
+The type held is "abstract", and is assumed only to satisfy a trait bound.
+
+* In the `_static` version using generics, each use of the function is
+ specialized to a concrete, statically-known type, giving static dispatch,
+ inline layout, and other performance wins.
+* In the `_dynamic` version using trait objects, the concrete argument type is
+ only known at runtime using a vtable.
+
+On the other hand, while you can write:
+
+````rust,ignore
+fn produce_iter_dynamic() -> Box<Iterator<u8>> { }
+````
+
+...but you _cannot_ write something like:
+
+````rust,ignore
+fn produce_iter_static() -> Iterator<u8> { }
+````
+
+That is, in today's Rust, abstract return types can only be written using trait
+objects, which can be a significant performance penalty. This RFC proposes
+"unboxed abstract types" as a way of achieving signatures like
+`produce_iter_static`. Like generics, unboxed abstract types guarantee static
+dispatch and inline data layout.
------------------------
+The `const_fn` feature allows marking free functions and inherent methods as
+`const`, enabling them to be called in constants contexts, with constant
+arguments.
+## Examples
+```rust
+#![feature(const_fn)]
+
+const fn double(x: i32) -> i32 {
+ x * 2
+}
+
+const FIVE: i32 = 5;
+const TEN: i32 = double(FIVE);
+
+fn main() {
+ assert_eq!(5, FIVE);
+ assert_eq!(10, TEN);
+}
+```
------------------------
+The `const_indexing` feature allows the constant evaluation of index operations
+on constant arrays and repeat expressions.
+## Examples
+```rust
+#![feature(const_indexing)]
+
+const ARR: [usize; 5] = [1, 2, 3, 4, 5];
+const ARR2: [usize; ARR[1]] = [42, 99];
+```
\ No newline at end of file
------------------------
+The `i128_type` feature adds support for 128 bit signed and unsigned integer
+types.
+```rust
+#![feature(i128_type)]
+
+fn main() {
+ assert_eq!(1u128 + 1u128, 2u128);
+ assert_eq!(u128::min_value(), 0);
+ assert_eq!(u128::max_value(), 340282366920938463463374607431768211455);
+
+ assert_eq!(1i128 - 2i128, -1i128);
+ assert_eq!(i128::min_value(), -170141183460469231731687303715884105728);
+ assert_eq!(i128::max_value(), 170141183460469231731687303715884105727);
+}
+```
------------------------
+The `non_ascii_idents` feature adds support for non-ASCII identifiers.
+## Examples
+```rust
+#![feature(non_ascii_idents)]
+
+const ε: f64 = 0.00001f64;
+const Π: f64 = 3.14f64;
+```
\ No newline at end of file
impl<T> Box<T> {
/// Allocates memory on the heap and then places `x` into it.
///
+ /// This doesn't actually allocate if `T` is zero-sized.
+ ///
/// # Examples
///
/// ```
//! Single-threaded reference-counting pointers.
//!
//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
-//! allocated in the heap. Invoking [`clone()`][clone] on [`Rc`] produces a new
+//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
//! pointer to the same value in the heap. When the last [`Rc`] pointer to a
//! given value is destroyed, the pointed-to value is also destroyed.
//!
//! threads. If you need multi-threaded, atomic reference counting, use
//! [`sync::Arc`][arc].
//!
-//! The [`downgrade()`][downgrade] method can be used to create a non-owning
+//! The [`downgrade`][downgrade] method can be used to create a non-owning
//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
//! to an [`Rc`], but this will return [`None`] if the value has
//! already been dropped.
mod imp {
use libc::{c_int, c_void, size_t};
- // Note that the symbols here are prefixed by default on OSX and Windows (we
+ // Note that the symbols here are prefixed by default on macOS and Windows (we
// don't explicitly request it), and on Android and DragonFly we explicitly
// request it as unprefixing cause segfaults (mismatches in allocators).
extern "C" {
data: Vec<T>,
}
-/// A container object that represents the result of the [`peek_mut()`] method
+/// A container object that represents the result of the [`peek_mut`] method
/// on `BinaryHeap`. See its documentation for details.
///
-/// [`peek_mut()`]: struct.BinaryHeap.html#method.peek_mut
+/// [`peek_mut`]: struct.BinaryHeap.html#method.peek_mut
#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
pub struct PeekMut<'a, T: 'a + Ord> {
heap: &'a mut BinaryHeap<T>,
//! `%`. The actual grammar for the formatting syntax is:
//!
//! ```text
-//! format_string := <text> [ format <text> ] *
+//! format_string := <text> [ maybe-format <text> ] *
+//! maybe-format := '{' '{' | '}' '}' | <format>
//! format := '{' [ argument ] [ ':' format_spec ] '}'
//! argument := integer | identifier
//!
//! the element type of the slice is `i32`, the element type of the iterator is
//! `&mut i32`.
//!
-//! * [`.iter()`] and [`.iter_mut()`] are the explicit methods to return the default
+//! * [`.iter`] and [`.iter_mut`] are the explicit methods to return the default
//! iterators.
-//! * Further methods that return iterators are [`.split()`], [`.splitn()`],
-//! [`.chunks()`], [`.windows()`] and more.
+//! * Further methods that return iterators are [`.split`], [`.splitn`],
+//! [`.chunks`], [`.windows`] and more.
//!
//! *[See also the slice primitive type](../../std/primitive.slice.html).*
//!
//! [`Ord`]: ../../std/cmp/trait.Ord.html
//! [`Iter`]: struct.Iter.html
//! [`Hash`]: ../../std/hash/trait.Hash.html
-//! [`.iter()`]: ../../std/primitive.slice.html#method.iter
-//! [`.iter_mut()`]: ../../std/primitive.slice.html#method.iter_mut
-//! [`.split()`]: ../../std/primitive.slice.html#method.split
-//! [`.splitn()`]: ../../std/primitive.slice.html#method.splitn
-//! [`.chunks()`]: ../../std/primitive.slice.html#method.chunks
-//! [`.windows()`]: ../../std/primitive.slice.html#method.windows
+//! [`.iter`]: ../../std/primitive.slice.html#method.iter
+//! [`.iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
+//! [`.split`]: ../../std/primitive.slice.html#method.split
+//! [`.splitn`]: ../../std/primitive.slice.html#method.splitn
+//! [`.chunks`]: ../../std/primitive.slice.html#method.chunks
+//! [`.windows`]: ../../std/primitive.slice.html#method.windows
#![stable(feature = "rust1", since = "1.0.0")]
// Many of the usings in this module are only used in the test configuration.
}
/// Returns a mutable reference to an element or subslice depending on the
- /// type of index (see [`get()`]) or `None` if the index is out of bounds.
+ /// type of index (see [`get`]) or `None` if the index is out of bounds.
///
- /// [`get()`]: #method.get
+ /// [`get`]: #method.get
///
/// # Examples
///
/// excluding `end`.
///
/// To get a mutable string slice instead, see the
- /// [`slice_mut_unchecked()`] method.
+ /// [`slice_mut_unchecked`] method.
///
- /// [`slice_mut_unchecked()`]: #method.slice_mut_unchecked
+ /// [`slice_mut_unchecked`]: #method.slice_mut_unchecked
///
/// # Safety
///
/// excluding `end`.
///
/// To get an immutable string slice instead, see the
- /// [`slice_unchecked()`] method.
+ /// [`slice_unchecked`] method.
///
- /// [`slice_unchecked()`]: #method.slice_unchecked
+ /// [`slice_unchecked`]: #method.slice_unchecked
///
/// # Safety
///
/// The two slices returned go from the start of the string slice to `mid`,
/// and from `mid` to the end of the string slice.
///
- /// To get mutable string slices instead, see the [`split_at_mut()`]
+ /// To get mutable string slices instead, see the [`split_at_mut`]
/// method.
///
- /// [`split_at_mut()`]: #method.split_at_mut
+ /// [`split_at_mut`]: #method.split_at_mut
///
/// # Panics
///
/// The two slices returned go from the start of the string slice to `mid`,
/// and from `mid` to the end of the string slice.
///
- /// To get immutable string slices instead, see the [`split_at()`] method.
+ /// To get immutable string slices instead, see the [`split_at`] method.
///
- /// [`split_at()`]: #method.split_at
+ /// [`split_at`]: #method.split_at
///
/// # Panics
///
/// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
///
/// If the pattern allows a reverse search but its results might differ
- /// from a forward search, the [`rsplit()`] method can be used.
+ /// from a forward search, the [`rsplit`] method can be used.
///
/// [`char`]: primitive.char.html
- /// [`rsplit()`]: #method.rsplit
+ /// [`rsplit`]: #method.rsplit
///
/// # Examples
///
/// assert_eq!(d, &["a", "b", "c"]);
/// ```
///
- /// Use [`split_whitespace()`] for this behavior.
+ /// Use [`split_whitespace`] for this behavior.
///
- /// [`split_whitespace()`]: #method.split_whitespace
+ /// [`split_whitespace`]: #method.split_whitespace
#[stable(feature = "rust1", since = "1.0.0")]
pub fn split<'a, P: Pattern<'a>>(&'a self, pat: P) -> Split<'a, P> {
core_str::StrExt::split(self, pat)
///
/// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
///
- /// For iterating from the front, the [`split()`] method can be used.
+ /// For iterating from the front, the [`split`] method can be used.
///
- /// [`split()`]: #method.split
+ /// [`split`]: #method.split
///
/// # Examples
///
/// The pattern can be a `&str`, [`char`], or a closure that determines the
/// split.
///
- /// Equivalent to [`split()`], except that the trailing substring
+ /// Equivalent to [`split`], except that the trailing substring
/// is skipped if empty.
///
- /// [`split()`]: #method.split
+ /// [`split`]: #method.split
///
/// This method can be used for string data that is _terminated_,
/// rather than _separated_ by a pattern.
/// [`char`]: primitive.char.html
///
/// If the pattern allows a reverse search but its results might differ
- /// from a forward search, the [`rsplit_terminator()`] method can be used.
+ /// from a forward search, the [`rsplit_terminator`] method can be used.
///
- /// [`rsplit_terminator()`]: #method.rsplit_terminator
+ /// [`rsplit_terminator`]: #method.rsplit_terminator
///
/// # Examples
///
///
/// [`char`]: primitive.char.html
///
- /// Equivalent to [`split()`], except that the trailing substring is
+ /// Equivalent to [`split`], except that the trailing substring is
/// skipped if empty.
///
- /// [`split()`]: #method.split
+ /// [`split`]: #method.split
///
/// This method can be used for string data that is _terminated_,
/// rather than _separated_ by a pattern.
/// reverse search, and it will be double ended if a forward/reverse
/// search yields the same elements.
///
- /// For iterating from the front, the [`split_terminator()`] method can be
+ /// For iterating from the front, the [`split_terminator`] method can be
/// used.
///
- /// [`split_terminator()`]: #method.split_terminator
+ /// [`split_terminator`]: #method.split_terminator
///
/// # Examples
///
/// The returned iterator will not be double ended, because it is
/// not efficient to support.
///
- /// If the pattern allows a reverse search, the [`rsplitn()`] method can be
+ /// If the pattern allows a reverse search, the [`rsplitn`] method can be
/// used.
///
- /// [`rsplitn()`]: #method.rsplitn
+ /// [`rsplitn`]: #method.rsplitn
///
/// # Examples
///
/// The returned iterator will not be double ended, because it is not
/// efficient to support.
///
- /// For splitting from the front, the [`splitn()`] method can be used.
+ /// For splitting from the front, the [`splitn`] method can be used.
///
- /// [`splitn()`]: #method.splitn
+ /// [`splitn`]: #method.splitn
///
/// # Examples
///
/// [`char`]: primitive.char.html
///
/// If the pattern allows a reverse search but its results might differ
- /// from a forward search, the [`rmatches()`] method can be used.
+ /// from a forward search, the [`rmatches`] method can be used.
///
- /// [`rmatches()`]: #method.rmatches
+ /// [`rmatches`]: #method.rmatches
///
/// # Examples
///
///
/// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
///
- /// For iterating from the front, the [`matches()`] method can be used.
+ /// For iterating from the front, the [`matches`] method can be used.
///
- /// [`matches()`]: #method.matches
+ /// [`matches`]: #method.matches
///
/// # Examples
///
/// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
///
/// If the pattern allows a reverse search but its results might differ
- /// from a forward search, the [`rmatch_indices()`] method can be used.
+ /// from a forward search, the [`rmatch_indices`] method can be used.
///
- /// [`rmatch_indices()`]: #method.rmatch_indices
+ /// [`rmatch_indices`]: #method.rmatch_indices
///
/// # Examples
///
///
/// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
///
- /// For iterating from the front, the [`match_indices()`] method can be used.
+ /// For iterating from the front, the [`match_indices`] method can be used.
///
- /// [`match_indices()`]: #method.match_indices
+ /// [`match_indices`]: #method.match_indices
///
/// # Examples
///
/// let hello = String::from("Hello, world!");
/// ```
///
-/// You can append a [`char`] to a `String` with the [`push()`] method, and
-/// append a [`&str`] with the [`push_str()`] method:
+/// You can append a [`char`] to a `String` with the [`push`] method, and
+/// append a [`&str`] with the [`push_str`] method:
///
/// ```
/// let mut hello = String::from("Hello, ");
/// ```
///
/// [`char`]: ../../std/primitive.char.html
-/// [`push()`]: #method.push
-/// [`push_str()`]: #method.push_str
+/// [`push`]: #method.push
+/// [`push_str`]: #method.push_str
///
/// If you have a vector of UTF-8 bytes, you can create a `String` from it with
-/// the [`from_utf8()`] method:
+/// the [`from_utf8`] method:
///
/// ```
/// // some bytes, in a vector
/// assert_eq!("💖", sparkle_heart);
/// ```
///
-/// [`from_utf8()`]: #method.from_utf8
+/// [`from_utf8`]: #method.from_utf8
///
/// # UTF-8
///
/// Indexing is intended to be a constant-time operation, but UTF-8 encoding
/// does not allow us to do this. Furthermore, it's not clear what sort of
/// thing the index should return: a byte, a codepoint, or a grapheme cluster.
-/// The [`bytes()`] and [`chars()`] methods return iterators over the first
+/// The [`bytes`] and [`chars`] methods return iterators over the first
/// two, respectively.
///
-/// [`bytes()`]: #method.bytes
-/// [`chars()`]: #method.chars
+/// [`bytes`]: #method.bytes
+/// [`chars`]: #method.chars
///
/// # Deref
///
///
/// This buffer is always stored on the heap.
///
-/// You can look at these with the [`as_ptr()`], [`len()`], and [`capacity()`]
+/// You can look at these with the [`as_ptr`], [`len`], and [`capacity`]
/// methods:
///
/// ```
/// assert_eq!(String::from("Once upon a time..."), s);
/// ```
///
-/// [`as_ptr()`]: #method.as_ptr
-/// [`len()`]: #method.len
-/// [`capacity()`]: #method.capacity
+/// [`as_ptr`]: #method.as_ptr
+/// [`len`]: #method.len
+/// [`capacity`]: #method.capacity
///
/// If a `String` has enough capacity, adding elements to it will not
/// re-allocate. For example, consider this program:
///
/// At first, we have no memory allocated at all, but as we append to the
/// string, it increases its capacity appropriately. If we instead use the
-/// [`with_capacity()`] method to allocate the correct capacity initially:
+/// [`with_capacity`] method to allocate the correct capacity initially:
///
/// ```
/// let mut s = String::with_capacity(25);
/// }
/// ```
///
-/// [`with_capacity()`]: #method.with_capacity
+/// [`with_capacity`]: #method.with_capacity
///
/// We end up with a different output:
///
/// A possible error value when converting a `String` from a UTF-8 byte vector.
///
-/// This type is the error type for the [`from_utf8()`] method on [`String`]. It
+/// This type is the error type for the [`from_utf8`] method on [`String`]. It
/// is designed in such a way to carefully avoid reallocations: the
-/// [`into_bytes()`] method will give back the byte vector that was used in the
+/// [`into_bytes`] method will give back the byte vector that was used in the
/// conversion attempt.
///
-/// [`from_utf8()`]: struct.String.html#method.from_utf8
+/// [`from_utf8`]: struct.String.html#method.from_utf8
/// [`String`]: struct.String.html
-/// [`into_bytes()`]: struct.FromUtf8Error.html#method.into_bytes
+/// [`into_bytes`]: struct.FromUtf8Error.html#method.into_bytes
///
/// The [`Utf8Error`] type provided by [`std::str`] represents an error that may
/// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's
/// an analogue to `FromUtf8Error`, and you can get one from a `FromUtf8Error`
-/// through the [`utf8_error()`] method.
+/// through the [`utf8_error`] method.
///
/// [`Utf8Error`]: ../../std/str/struct.Utf8Error.html
/// [`std::str`]: ../../std/str/index.html
/// [`u8`]: ../../std/primitive.u8.html
/// [`&str`]: ../../std/primitive.str.html
-/// [`utf8_error()`]: #method.utf8_error
+/// [`utf8_error`]: #method.utf8_error
///
/// # Examples
///
/// A possible error value when converting a `String` from a UTF-16 byte slice.
///
-/// This type is the error type for the [`from_utf16()`] method on [`String`].
+/// This type is the error type for the [`from_utf16`] method on [`String`].
///
-/// [`from_utf16()`]: struct.String.html#method.from_utf16
+/// [`from_utf16`]: struct.String.html#method.from_utf16
/// [`String`]: struct.String.html
///
/// # Examples
/// buffer. While that means that this initial operation is very
/// inexpensive, but may cause excessive allocation later, when you add
/// data. If you have an idea of how much data the `String` will hold,
- /// consider the [`with_capacity()`] method to prevent excessive
+ /// consider the [`with_capacity`] method to prevent excessive
/// re-allocation.
///
- /// [`with_capacity()`]: #method.with_capacity
+ /// [`with_capacity`]: #method.with_capacity
///
/// # Examples
///
/// Creates a new empty `String` with a particular capacity.
///
/// `String`s have an internal buffer to hold their data. The capacity is
- /// the length of that buffer, and can be queried with the [`capacity()`]
+ /// the length of that buffer, and can be queried with the [`capacity`]
/// method. This method creates an empty `String`, but one with an initial
/// buffer that can hold `capacity` bytes. This is useful when you may be
/// appending a bunch of data to the `String`, reducing the number of
/// reallocations it needs to do.
///
- /// [`capacity()`]: #method.capacity
+ /// [`capacity`]: #method.capacity
///
/// If the given capacity is `0`, no allocation will occur, and this method
- /// is identical to the [`new()`] method.
+ /// is identical to the [`new`] method.
///
- /// [`new()`]: #method.new
+ /// [`new`]: #method.new
///
/// # Examples
///
///
/// If you are sure that the byte slice is valid UTF-8, and you don't want
/// to incur the overhead of the validity check, there is an unsafe version
- /// of this function, [`from_utf8_unchecked()`], which has the same behavior
+ /// of this function, [`from_utf8_unchecked`], which has the same behavior
/// but skips the check.
///
- /// [`from_utf8_unchecked()`]: struct.String.html#method.from_utf8_unchecked
+ /// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked
///
/// This method will take care to not copy the vector, for efficiency's
/// sake.
///
/// If you need a `&str` instead of a `String`, consider
- /// [`str::from_utf8()`].
+ /// [`str::from_utf8`].
///
- /// [`str::from_utf8()`]: ../../std/str/fn.from_utf8.html
+ /// [`str::from_utf8`]: ../../std/str/fn.from_utf8.html
///
/// The inverse of this method is [`as_bytes`].
///
///
/// If you are sure that the byte slice is valid UTF-8, and you don't want
/// to incur the overhead of the conversion, there is an unsafe version
- /// of this function, [`from_utf8_unchecked()`], which has the same behavior
+ /// of this function, [`from_utf8_unchecked`], which has the same behavior
/// but skips the checks.
///
- /// [`from_utf8_unchecked()`]: struct.String.html#method.from_utf8_unchecked
+ /// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked
///
/// This function returns a [`Cow<'a, str>`]. If our byte slice is invalid
/// UTF-8, then we need to insert the replacement characters, which will
/// Converts a vector of bytes to a `String` without checking that the
/// string contains valid UTF-8.
///
- /// See the safe version, [`from_utf8()`], for more details.
+ /// See the safe version, [`from_utf8`], for more details.
///
- /// [`from_utf8()`]: struct.String.html#method.from_utf8
+ /// [`from_utf8`]: struct.String.html#method.from_utf8
///
/// # Safety
///
/// The capacity may be increased by more than `additional` bytes if it
/// chooses, to prevent frequent reallocations.
///
- /// If you do not want this "at least" behavior, see the [`reserve_exact()`]
+ /// If you do not want this "at least" behavior, see the [`reserve_exact`]
/// method.
///
- /// [`reserve_exact()`]: #method.reserve_exact
+ /// [`reserve_exact`]: #method.reserve_exact
///
/// # Panics
///
/// Ensures that this `String`'s capacity is `additional` bytes
/// larger than its length.
///
- /// Consider using the [`reserve()`] method unless you absolutely know
+ /// Consider using the [`reserve`] method unless you absolutely know
/// better than the allocator.
///
- /// [`reserve()`]: #method.reserve
+ /// [`reserve`]: #method.reserve
///
/// # Panics
///
/// Implements the `+=` operator for appending to a `String`.
///
-/// This has the same behavior as the [`push_str()`] method.
+/// This has the same behavior as the [`push_str`] method.
///
-/// [`push_str()`]: struct.String.html#method.push_str
+/// [`push_str`]: struct.String.html#method.push_str
#[stable(feature = "stringaddassign", since = "1.12.0")]
impl<'a> AddAssign<&'a str> for String {
#[inline]
///
/// This `enum` is slightly awkward: it will never actually exist. This error is
/// part of the type signature of the implementation of [`FromStr`] on
-/// [`String`]. The return type of [`from_str()`], requires that an error be
+/// [`String`]. The return type of [`from_str`], requires that an error be
/// defined, but, given that a [`String`] can always be made into a new
/// [`String`] without error, this type will never actually be returned. As
/// such, it is only here to satisfy said signature, and is useless otherwise.
///
/// [`FromStr`]: ../../std/str/trait.FromStr.html
/// [`String`]: struct.String.html
-/// [`from_str()`]: ../../std/str/trait.FromStr.html#tymethod.from_str
+/// [`from_str`]: ../../std/str/trait.FromStr.html#tymethod.from_str
#[stable(feature = "str_parse_error", since = "1.5.0")]
#[derive(Copy)]
pub enum ParseError {}
}
}
+// note: test pulls in libstd, which causes errors here
+#[cfg(not(test))]
+#[stable(feature = "string_from_box", since = "1.17.0")]
+impl From<Box<str>> for String {
+ fn from(s: Box<str>) -> String {
+ s.into_string()
+ }
+}
+
+#[stable(feature = "box_from_str", since = "1.17.0")]
+impl Into<Box<str>> for String {
+ fn into(self) -> Box<str> {
+ self.into_boxed_str()
+ }
+}
+
#[stable(feature = "string_from_cow_str", since = "1.14.0")]
impl<'a> From<Cow<'a, str>> for String {
fn from(s: Cow<'a, str>) -> String {
/// A draining iterator for `String`.
///
-/// This struct is created by the [`drain()`] method on [`String`]. See its
+/// This struct is created by the [`drain`] method on [`String`]. See its
/// documentation for more.
///
-/// [`drain()`]: struct.String.html#method.drain
+/// [`drain`]: struct.String.html#method.drain
/// [`String`]: struct.String.html
#[stable(feature = "drain", since = "1.6.0")]
pub struct Drain<'a> {
//!
//! # Examples
//!
-//! You can explicitly create a [`Vec<T>`] with [`new()`]:
+//! You can explicitly create a [`Vec<T>`] with [`new`]:
//!
//! ```
//! let v: Vec<i32> = Vec::new();
//! ```
//!
//! [`Vec<T>`]: ../../std/vec/struct.Vec.html
-//! [`new()`]: ../../std/vec/struct.Vec.html#method.new
+//! [`new`]: ../../std/vec/struct.Vec.html#method.new
//! [`push`]: ../../std/vec/struct.Vec.html#method.push
//! [`Index`]: ../../std/ops/trait.Index.html
//! [`IndexMut`]: ../../std/ops/trait.IndexMut.html
/// The pointer will never be null, so this type is null-pointer-optimized.
///
/// However, the pointer may not actually point to allocated memory. In particular,
-/// if you construct a `Vec` with capacity 0 via [`Vec::new()`], [`vec![]`][`vec!`],
-/// [`Vec::with_capacity(0)`][`Vec::with_capacity`], or by calling [`shrink_to_fit()`]
+/// if you construct a `Vec` with capacity 0 via [`Vec::new`], [`vec![]`][`vec!`],
+/// [`Vec::with_capacity(0)`][`Vec::with_capacity`], or by calling [`shrink_to_fit`]
/// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized
/// types inside a `Vec`, it will not allocate space for them. *Note that in this case
-/// the `Vec` may not report a [`capacity()`] of 0*. `Vec` will allocate if and only
-/// if [`mem::size_of::<T>()`]` * capacity() > 0`. In general, `Vec`'s allocation
+/// the `Vec` may not report a [`capacity`] of 0*. `Vec` will allocate if and only
+/// if [`mem::size_of::<T>`]` * capacity() > 0`. In general, `Vec`'s allocation
/// details are subtle enough that it is strongly recommended that you only
/// free memory allocated by a `Vec` by creating a new `Vec` and dropping it.
///
/// If a `Vec` *has* allocated memory, then the memory it points to is on the heap
/// (as defined by the allocator Rust is configured to use by default), and its
-/// pointer points to [`len()`] initialized elements in order (what you would see
-/// if you coerced it to a slice), followed by [`capacity()`]` - `[`len()`]
+/// pointer points to [`len`] initialized elements in order (what you would see
+/// if you coerced it to a slice), followed by [`capacity`]` - `[`len`]
/// logically uninitialized elements.
///
/// `Vec` will never perform a "small optimization" where elements are actually
///
/// `Vec` will never automatically shrink itself, even if completely empty. This
/// ensures no unnecessary allocations or deallocations occur. Emptying a `Vec`
-/// and then filling it back up to the same [`len()`] should incur no calls to
+/// and then filling it back up to the same [`len`] should incur no calls to
/// the allocator. If you wish to free up unused memory, use
-/// [`shrink_to_fit`][`shrink_to_fit()`].
+/// [`shrink_to_fit`][`shrink_to_fit`].
///
/// [`push`] and [`insert`] will never (re)allocate if the reported capacity is
/// sufficient. [`push`] and [`insert`] *will* (re)allocate if
-/// [`len()`]` == `[`capacity()`]. That is, the reported capacity is completely
+/// [`len`]` == `[`capacity`]. That is, the reported capacity is completely
/// accurate, and can be relied on. It can even be used to manually free the memory
/// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even
/// when not necessary.
///
/// `vec![x; n]`, `vec![a, b, c, d]`, and
/// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all produce a `Vec`
-/// with exactly the requested capacity. If [`len()`]` == `[`capacity()`],
+/// with exactly the requested capacity. If [`len`]` == `[`capacity`],
/// (as is the case for the [`vec!`] macro), then a `Vec<T>` can be converted to
/// and from a [`Box<[T]>`][owned slice] without reallocating or moving the elements.
///
/// [`String`]: ../../std/string/struct.String.html
/// [`&str`]: ../../std/primitive.str.html
/// [`Vec::with_capacity`]: ../../std/vec/struct.Vec.html#method.with_capacity
-/// [`Vec::new()`]: ../../std/vec/struct.Vec.html#method.new
-/// [`shrink_to_fit()`]: ../../std/vec/struct.Vec.html#method.shrink_to_fit
-/// [`capacity()`]: ../../std/vec/struct.Vec.html#method.capacity
-/// [`mem::size_of::<T>()`]: ../../std/mem/fn.size_of.html
-/// [`len()`]: ../../std/vec/struct.Vec.html#method.len
+/// [`Vec::new`]: ../../std/vec/struct.Vec.html#method.new
+/// [`shrink_to_fit`]: ../../std/vec/struct.Vec.html#method.shrink_to_fit
+/// [`capacity`]: ../../std/vec/struct.Vec.html#method.capacity
+/// [`mem::size_of::<T>`]: ../../std/mem/fn.size_of.html
+/// [`len`]: ../../std/vec/struct.Vec.html#method.len
/// [`push`]: ../../std/vec/struct.Vec.html#method.push
/// [`insert`]: ../../std/vec/struct.Vec.html#method.insert
/// [`reserve`]: ../../std/vec/struct.Vec.html#method.reserve
/// Converts the vector into [`Box<[T]>`][owned slice].
///
/// Note that this will drop any excess capacity. Calling this and
- /// converting back to a vector with [`into_vec()`] is equivalent to calling
- /// [`shrink_to_fit()`].
+ /// converting back to a vector with [`into_vec`] is equivalent to calling
+ /// [`shrink_to_fit`].
///
/// [owned slice]: ../../std/boxed/struct.Box.html
- /// [`into_vec()`]: ../../std/primitive.slice.html#method.into_vec
- /// [`shrink_to_fit()`]: #method.shrink_to_fit
+ /// [`into_vec`]: ../../std/primitive.slice.html#method.into_vec
+ /// [`shrink_to_fit`]: #method.shrink_to_fit
///
/// # Examples
///
}
}
+// note: test pulls in libstd, which causes errors here
+#[cfg(not(test))]
+#[stable(feature = "vec_from_box", since = "1.17.0")]
+impl<T> From<Box<[T]>> for Vec<T> {
+ fn from(s: Box<[T]>) -> Vec<T> {
+ s.into_vec()
+ }
+}
+
+#[stable(feature = "box_from_vec", since = "1.17.0")]
+impl<T> Into<Box<[T]>> for Vec<T> {
+ fn into(self) -> Box<[T]> {
+ self.into_boxed_slice()
+ }
+}
+
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> From<&'a str> for Vec<u8> {
fn from(s: &'a str) -> Vec<u8> {
use core::fmt;
use core::iter::{repeat, FromIterator, FusedIterator};
use core::mem;
-use core::ops::{Index, IndexMut};
+use core::ops::{Index, IndexMut, Place, Placer, InPlace};
use core::ptr;
use core::ptr::Shared;
use core::slice;
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_front(&mut self, value: T) {
- if self.is_full() {
- let old_cap = self.cap();
- self.buf.double();
- unsafe {
- self.handle_cap_increase(old_cap);
- }
- debug_assert!(!self.is_full());
- }
+ self.grow_if_necessary();
self.tail = self.wrap_sub(self.tail, 1);
let tail = self.tail;
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn push_back(&mut self, value: T) {
- if self.is_full() {
- let old_cap = self.cap();
- self.buf.double();
- unsafe {
- self.handle_cap_increase(old_cap);
- }
- debug_assert!(!self.is_full());
- }
+ self.grow_if_necessary();
let head = self.head;
self.head = self.wrap_add(self.head, 1);
#[stable(feature = "deque_extras_15", since = "1.5.0")]
pub fn insert(&mut self, index: usize, value: T) {
assert!(index <= self.len(), "index out of bounds");
- if self.is_full() {
- let old_cap = self.cap();
- self.buf.double();
- unsafe {
- self.handle_cap_increase(old_cap);
- }
- debug_assert!(!self.is_full());
- }
+ self.grow_if_necessary();
// Move the least number of elements in the ring buffer and insert
// the given object
self.truncate(len - del);
}
}
+
+ // This may panic or abort
+ #[inline]
+ fn grow_if_necessary(&mut self) {
+ if self.is_full() {
+ let old_cap = self.cap();
+ self.buf.double();
+ unsafe {
+ self.handle_cap_increase(old_cap);
+ }
+ debug_assert!(!self.is_full());
+ }
+ }
+
+ /// Returns a place for insertion at the back of the `VecDeque`.
+ ///
+ /// Using this method with placement syntax is equivalent to [`push_back`](#method.push_back),
+ /// but may be more efficient.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(collection_placement)]
+ /// #![feature(placement_in_syntax)]
+ ///
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.place_back() <- 3;
+ /// buf.place_back() <- 4;
+ /// assert_eq!(&buf, &[3, 4]);
+ /// ```
+ #[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+ pub fn place_back(&mut self) -> PlaceBack<T> {
+ PlaceBack { vec_deque: self }
+ }
+
+ /// Returns a place for insertion at the front of the `VecDeque`.
+ ///
+ /// Using this method with placement syntax is equivalent to [`push_front`](#method.push_front),
+ /// but may be more efficient.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(collection_placement)]
+ /// #![feature(placement_in_syntax)]
+ ///
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.place_front() <- 3;
+ /// buf.place_front() <- 4;
+ /// assert_eq!(&buf, &[4, 3]);
+ /// ```
+ #[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+ pub fn place_front(&mut self) -> PlaceFront<T> {
+ PlaceFront { vec_deque: self }
+ }
}
impl<T: Clone> VecDeque<T> {
/// Modifies the `VecDeque` in-place so that `len()` is equal to new_len,
- /// either by removing excess elements or by appending copies of a value to the back.
+ /// either by removing excess elements or by appending clones of `value` to the back.
///
/// # Examples
///
}
}
+/// A place for insertion at the back of a `VecDeque`.
+///
+/// See [`VecDeque::place_back`](struct.VecDeque.html#method.place_back) for details.
+#[must_use = "places do nothing unless written to with `<-` syntax"]
+#[unstable(feature = "collection_placement",
+ reason = "struct name and placement protocol are subject to change",
+ issue = "30172")]
+#[derive(Debug)]
+pub struct PlaceBack<'a, T: 'a> {
+ vec_deque: &'a mut VecDeque<T>,
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> Placer<T> for PlaceBack<'a, T> {
+ type Place = PlaceBack<'a, T>;
+
+ fn make_place(self) -> Self {
+ self.vec_deque.grow_if_necessary();
+ self
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> Place<T> for PlaceBack<'a, T> {
+ fn pointer(&mut self) -> *mut T {
+ unsafe { self.vec_deque.ptr().offset(self.vec_deque.head as isize) }
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> InPlace<T> for PlaceBack<'a, T> {
+ type Owner = &'a mut T;
+
+ unsafe fn finalize(mut self) -> &'a mut T {
+ let head = self.vec_deque.head;
+ self.vec_deque.head = self.vec_deque.wrap_add(head, 1);
+ &mut *(self.vec_deque.ptr().offset(head as isize))
+ }
+}
+
+/// A place for insertion at the front of a `VecDeque`.
+///
+/// See [`VecDeque::place_front`](struct.VecDeque.html#method.place_front) for details.
+#[must_use = "places do nothing unless written to with `<-` syntax"]
+#[unstable(feature = "collection_placement",
+ reason = "struct name and placement protocol are subject to change",
+ issue = "30172")]
+#[derive(Debug)]
+pub struct PlaceFront<'a, T: 'a> {
+ vec_deque: &'a mut VecDeque<T>,
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> Placer<T> for PlaceFront<'a, T> {
+ type Place = PlaceFront<'a, T>;
+
+ fn make_place(self) -> Self {
+ self.vec_deque.grow_if_necessary();
+ self
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> Place<T> for PlaceFront<'a, T> {
+ fn pointer(&mut self) -> *mut T {
+ let tail = self.vec_deque.wrap_sub(self.vec_deque.tail, 1);
+ unsafe { self.vec_deque.ptr().offset(tail as isize) }
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, T> InPlace<T> for PlaceFront<'a, T> {
+ type Owner = &'a mut T;
+
+ unsafe fn finalize(mut self) -> &'a mut T {
+ self.vec_deque.tail = self.vec_deque.wrap_sub(self.vec_deque.tail, 1);
+ &mut *(self.vec_deque.ptr().offset(self.vec_deque.tail as isize))
+ }
+}
+
#[cfg(test)]
mod tests {
use test;
}
}
}
+
}
#![feature(test)]
#![feature(unboxed_closures)]
#![feature(unicode)]
+#![feature(utf8_error_error_len)]
extern crate collections;
extern crate test;
extern crate std_unicode;
+extern crate core;
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
}
}
+#[test]
+fn from_utf8_error() {
+ macro_rules! test {
+ ($input: expr, $expected_valid_up_to: expr, $expected_error_len: expr) => {
+ let error = from_utf8($input).unwrap_err();
+ assert_eq!(error.valid_up_to(), $expected_valid_up_to);
+ assert_eq!(error.error_len(), $expected_error_len);
+ }
+ }
+ test!(b"A\xC3\xA9 \xFF ", 4, Some(1));
+ test!(b"A\xC3\xA9 \x80 ", 4, Some(1));
+ test!(b"A\xC3\xA9 \xC1 ", 4, Some(1));
+ test!(b"A\xC3\xA9 \xC1", 4, Some(1));
+ test!(b"A\xC3\xA9 \xC2", 4, None);
+ test!(b"A\xC3\xA9 \xC2 ", 4, Some(1));
+ test!(b"A\xC3\xA9 \xC2\xC0", 4, Some(1));
+ test!(b"A\xC3\xA9 \xE0", 4, None);
+ test!(b"A\xC3\xA9 \xE0\x9F", 4, Some(1));
+ test!(b"A\xC3\xA9 \xE0\xA0", 4, None);
+ test!(b"A\xC3\xA9 \xE0\xA0\xC0", 4, Some(2));
+ test!(b"A\xC3\xA9 \xE0\xA0 ", 4, Some(2));
+ test!(b"A\xC3\xA9 \xED\xA0\x80 ", 4, Some(1));
+ test!(b"A\xC3\xA9 \xF1", 4, None);
+ test!(b"A\xC3\xA9 \xF1\x80", 4, None);
+ test!(b"A\xC3\xA9 \xF1\x80\x80", 4, None);
+ test!(b"A\xC3\xA9 \xF1 ", 4, Some(1));
+ test!(b"A\xC3\xA9 \xF1\x80 ", 4, Some(2));
+ test!(b"A\xC3\xA9 \xF1\x80\x80 ", 4, Some(3));
+}
+
#[test]
fn test_as_bytes() {
// no null
use std::collections::VecDeque;
use std::fmt::Debug;
-use std::collections::vec_deque::Drain;
+use std::collections::vec_deque::{Drain};
use self::Taggy::*;
use self::Taggypar::*;
assert!(v.iter_mut().is_empty());
assert!(v.into_iter().is_empty());
}
+
+#[test]
+fn test_placement_in() {
+ let mut buf: VecDeque<isize> = VecDeque::new();
+ buf.place_back() <- 1;
+ buf.place_back() <- 2;
+ assert_eq!(buf, [1,2]);
+
+ buf.place_front() <- 3;
+ buf.place_front() <- 4;
+ assert_eq!(buf, [4,3,1,2]);
+
+ {
+ let ptr_head = buf.place_front() <- 5;
+ assert_eq!(*ptr_head, 5);
+ }
+ {
+ let ptr_tail = buf.place_back() <- 6;
+ assert_eq!(*ptr_tail, 6);
+ }
+ assert_eq!(buf, [5,4,3,1,2,6]);
+}
/// [`as`]: ../../book/casting-between-types.html#as
///
/// For an unsafe version of this function which ignores these checks, see
-/// [`from_u32_unchecked()`].
+/// [`from_u32_unchecked`].
///
-/// [`from_u32_unchecked()`]: fn.from_u32_unchecked.html
+/// [`from_u32_unchecked`]: fn.from_u32_unchecked.html
///
/// # Examples
///
///
/// This function is unsafe, as it may construct invalid `char` values.
///
-/// For a safe version of this function, see the [`from_u32()`] function.
+/// For a safe version of this function, see the [`from_u32`] function.
///
-/// [`from_u32()`]: fn.from_u32.html
+/// [`from_u32`]: fn.from_u32.html
///
/// # Examples
///
/// Returns an iterator that yields the hexadecimal Unicode escape of a
/// character, as `char`s.
///
-/// This `struct` is created by the [`escape_unicode()`] method on [`char`]. See
+/// This `struct` is created by the [`escape_unicode`] method on [`char`]. See
/// its documentation for more.
///
-/// [`escape_unicode()`]: ../../std/primitive.char.html#method.escape_unicode
+/// [`escape_unicode`]: ../../std/primitive.char.html#method.escape_unicode
/// [`char`]: ../../std/primitive.char.html
#[derive(Clone, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
/// An iterator that yields the literal escape code of a `char`.
///
-/// This `struct` is created by the [`escape_default()`] method on [`char`]. See
+/// This `struct` is created by the [`escape_default`] method on [`char`]. See
/// its documentation for more.
///
-/// [`escape_default()`]: ../../std/primitive.char.html#method.escape_default
+/// [`escape_default`]: ../../std/primitive.char.html#method.escape_default
/// [`char`]: ../../std/primitive.char.html
#[derive(Clone, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
/// An iterator that yields the literal escape code of a `char`.
///
-/// This `struct` is created by the [`escape_debug()`] method on [`char`]. See its
+/// This `struct` is created by the [`escape_debug`] method on [`char`]. See its
/// documentation for more.
///
-/// [`escape_debug()`]: ../../std/primitive.char.html#method.escape_debug
+/// [`escape_debug`]: ../../std/primitive.char.html#method.escape_debug
/// [`char`]: ../../std/primitive.char.html
#[unstable(feature = "char_escape_debug", issue = "35068")]
#[derive(Clone, Debug)]
/// ## Derivable
///
/// This trait can be used with `#[derive]` if all fields are `Clone`. The `derive`d
-/// implementation of [`clone()`] calls [`clone()`] on each field.
+/// implementation of [`clone`] calls [`clone`] on each field.
///
/// ## How can I implement `Clone`?
///
/// `Clone` cannot be `derive`d, but can be implemented as:
///
/// [`Copy`]: ../../std/marker/trait.Copy.html
-/// [`clone()`]: trait.Clone.html#tymethod.clone
+/// [`clone`]: trait.Clone.html#tymethod.clone
///
/// ```
/// #[derive(Copy)]
/// # Generic Impls
///
/// - [`From<T>`][From]` for U` implies `Into<U> for T`
-/// - [`into()`] is reflexive, which means that `Into<T> for T` is implemented
+/// - [`into`] is reflexive, which means that `Into<T> for T` is implemented
///
/// [`TryInto`]: trait.TryInto.html
/// [`Option<T>`]: ../../std/option/enum.Option.html
/// [`Result<T, E>`]: ../../std/result/enum.Result.html
/// [`String`]: ../../std/string/struct.String.html
/// [From]: trait.From.html
-/// [`into()`]: trait.Into.html#tymethod.into
+/// [`into`]: trait.Into.html#tymethod.into
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Into<T>: Sized {
/// Performs the conversion.
/// # Generic impls
///
/// - `From<T> for U` implies [`Into<U>`]` for T`
-/// - [`from()`] is reflexive, which means that `From<T> for T` is implemented
+/// - [`from`] is reflexive, which means that `From<T> for T` is implemented
///
/// [`TryFrom`]: trait.TryFrom.html
/// [`Option<T>`]: ../../std/option/enum.Option.html
/// [`Result<T, E>`]: ../../std/result/enum.Result.html
/// [`String`]: ../../std/string/struct.String.html
/// [`Into<U>`]: trait.Into.html
-/// [`from()`]: trait.From.html#tymethod.from
+/// [`from`]: trait.From.html#tymethod.from
#[stable(feature = "rust1", since = "1.0.0")]
pub trait From<T>: Sized {
/// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Display {
/// Formats the value using the given formatter.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::fmt;
+ ///
+ /// struct Position {
+ /// longitude: f32,
+ /// latitude: f32,
+ /// }
+ ///
+ /// impl fmt::Display for Position {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "({}, {})", self.longitude, self.latitude)
+ /// }
+ /// }
+ ///
+ /// assert_eq!("(1.987, 2.983)".to_owned(),
+ /// format!("{}", Position { longitude: 1.987, latitude: 2.983, }));
+ /// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter) -> Result;
}
}
impl<'a> Formatter<'a> {
-
// First up is the collection of functions used to execute a format string
// at runtime. This consumes all of the compile-time statics generated by
// the format! syntax extension.
///
/// This trait can be used with `#[derive]` if all fields implement `Hash`.
/// When `derive`d, the resulting hash will be the combination of the values
-/// from calling [`.hash()`] on each field.
+/// from calling [`.hash`] on each field.
///
/// ## How can I implement `Hash`?
///
/// [`Eq`]: ../../std/cmp/trait.Eq.html
/// [`HashMap`]: ../../std/collections/struct.HashMap.html
/// [`HashSet`]: ../../std/collections/struct.HashSet.html
-/// [`.hash()`]: #tymethod.hash
+/// [`.hash`]: #tymethod.hash
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Hash {
/// Feeds this value into the state given, updating the hasher as necessary.
/// Consumes the iterator, counting the number of iterations and returning it.
///
- /// This method will evaluate the iterator until its [`next()`] returns
+ /// This method will evaluate the iterator until its [`next`] returns
/// [`None`]. Once [`None`] is encountered, `count()` returns the number of
- /// times it called [`next()`].
+ /// times it called [`next`].
///
- /// [`next()`]: #tymethod.next
+ /// [`next`]: #tymethod.next
/// [`None`]: ../../std/option/enum.Option.html#variant.None
///
/// # Overflow Behavior
///
/// In other words, it zips two iterators together, into a single one.
///
- /// When either iterator returns [`None`], all further calls to [`next()`]
+ /// When either iterator returns [`None`], all further calls to [`next`]
/// will return [`None`].
///
/// # Examples
///
/// `zip()` is often used to zip an infinite iterator to a finite one.
/// This works because the finite iterator will eventually return [`None`],
- /// ending the zipper. Zipping with `(0..)` can look a lot like [`enumerate()`]:
+ /// ending the zipper. Zipping with `(0..)` can look a lot like [`enumerate`]:
///
/// ```
/// let enumerate: Vec<_> = "foo".chars().enumerate().collect();
/// assert_eq!((2, 'o'), zipper[2]);
/// ```
///
- /// [`enumerate()`]: trait.Iterator.html#method.enumerate
- /// [`next()`]: ../../std/iter/trait.Iterator.html#tymethod.next
+ /// [`enumerate`]: trait.Iterator.html#method.enumerate
+ /// [`next`]: ../../std/iter/trait.Iterator.html#tymethod.next
/// [`None`]: ../../std/option/enum.Option.html#variant.None
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
/// closure returns [`None`], it will try again, and call the closure on the
/// next element, seeing if it will return [`Some`].
///
- /// Why `filter_map()` and not just [`filter()`].[`map()`]? The key is in this
+ /// Why `filter_map()` and not just [`filter()`].[`map`]? The key is in this
/// part:
///
- /// [`filter()`]: #method.filter
- /// [`map()`]: #method.map
+ /// [`filter`]: #method.filter
+ /// [`map`]: #method.map
///
/// > If the closure returns [`Some(element)`][`Some`], then that element is returned.
///
/// assert_eq!(iter.next(), None);
/// ```
///
- /// Here's the same example, but with [`filter()`] and [`map()`]:
+ /// Here's the same example, but with [`filter`] and [`map`]:
///
/// ```
/// let a = ["1", "2", "lol"];
/// iterator.
///
/// `enumerate()` keeps its count as a [`usize`]. If you want to count by a
- /// different sized integer, the [`zip()`] function provides similar
+ /// different sized integer, the [`zip`] function provides similar
/// functionality.
///
/// # Overflow Behavior
///
/// [`usize::MAX`]: ../../std/usize/constant.MAX.html
/// [`usize`]: ../../std/primitive.usize.html
- /// [`zip()`]: #method.zip
+ /// [`zip`]: #method.zip
///
/// # Examples
///
/// Creates an iterator which can use `peek` to look at the next element of
/// the iterator without consuming it.
///
- /// Adds a [`peek()`] method to an iterator. See its documentation for
+ /// Adds a [`peek`] method to an iterator. See its documentation for
/// more information.
///
- /// Note that the underlying iterator is still advanced when [`peek()`] is
+ /// Note that the underlying iterator is still advanced when [`peek`] is
/// called for the first time: In order to retrieve the next element,
- /// [`next()`] is called on the underlying iterator, hence any side effects of
- /// the [`next()`] method will occur.
+ /// [`next`] is called on the underlying iterator, hence any side effects of
+ /// the [`next`] method will occur.
///
- /// [`peek()`]: struct.Peekable.html#method.peek
- /// [`next()`]: ../../std/iter/trait.Iterator.html#tymethod.next
+ /// [`peek`]: struct.Peekable.html#method.peek
+ /// [`next`]: ../../std/iter/trait.Iterator.html#tymethod.next
///
/// # Examples
///
Peekable{iter: self, peeked: None}
}
- /// Creates an iterator that [`skip()`]s elements based on a predicate.
+ /// Creates an iterator that [`skip`]s elements based on a predicate.
///
- /// [`skip()`]: #method.skip
+ /// [`skip`]: #method.skip
///
/// `skip_while()` takes a closure as an argument. It will call this
/// closure on each element of the iterator, and ignore elements
Take{iter: self, n: n}
}
- /// An iterator adaptor similar to [`fold()`] that holds internal state and
+ /// An iterator adaptor similar to [`fold`] that holds internal state and
/// produces a new iterator.
///
- /// [`fold()`]: #method.fold
+ /// [`fold`]: #method.fold
///
/// `scan()` takes two arguments: an initial value which seeds the internal
/// state, and a closure with two arguments, the first being a mutable
/// Creates an iterator that works like map, but flattens nested structure.
///
- /// The [`map()`] adapter is very useful, but only when the closure
+ /// The [`map`] adapter is very useful, but only when the closure
/// argument produces values. If it produces an iterator instead, there's
/// an extra layer of indirection. `flat_map()` will remove this extra layer
/// on its own.
///
- /// Another way of thinking about `flat_map()`: [`map()`]'s closure returns
+ /// Another way of thinking about `flat_map()`: [`map`]'s closure returns
/// one item for each element, and `flat_map()`'s closure returns an
/// iterator for each element.
///
- /// [`map()`]: #method.map
+ /// [`map`]: #method.map
///
/// # Examples
///
/// library, used in a variety of contexts.
///
/// The most basic pattern in which `collect()` is used is to turn one
- /// collection into another. You take a collection, call [`iter()`] on it,
+ /// collection into another. You take a collection, call [`iter`] on it,
/// do a bunch of transformations, and then `collect()` at the end.
///
/// One of the keys to `collect()`'s power is that many things you might
/// assert_eq!(Ok(vec![1, 3]), result);
/// ```
///
- /// [`iter()`]: ../../std/iter/trait.Iterator.html#tymethod.next
+ /// [`iter`]: ../../std/iter/trait.Iterator.html#tymethod.next
/// [`String`]: ../../std/string/struct.String.html
/// [`char`]: ../../std/primitive.char.html
/// [`Result`]: ../../std/result/enum.Result.html
/// collections: one from the left elements of the pairs, and one
/// from the right elements.
///
- /// This function is, in some sense, the opposite of [`zip()`].
+ /// This function is, in some sense, the opposite of [`zip`].
///
- /// [`zip()`]: #method.zip
+ /// [`zip`]: #method.zip
///
/// # Examples
///
(ts, us)
}
- /// Creates an iterator which [`clone()`]s all of its elements.
+ /// Creates an iterator which [`clone`]s all of its elements.
///
/// This is useful when you have an iterator over `&T`, but you need an
/// iterator over `T`.
///
- /// [`clone()`]: ../../std/clone/trait.Clone.html#tymethod.clone
+ /// [`clone`]: ../../std/clone/trait.Clone.html#tymethod.clone
///
/// # Examples
///
//! }
//! ```
//!
-//! An iterator has a method, [`next()`], which when called, returns an
-//! [`Option`]`<Item>`. [`next()`] will return `Some(Item)` as long as there
+//! An iterator has a method, [`next`], which when called, returns an
+//! [`Option`]`<Item>`. [`next`] will return `Some(Item)` as long as there
//! are elements, and once they've all been exhausted, will return `None` to
//! indicate that iteration is finished. Individual iterators may choose to
-//! resume iteration, and so calling [`next()`] again may or may not eventually
+//! resume iteration, and so calling [`next`] again may or may not eventually
//! start returning `Some(Item)` again at some point.
//!
//! [`Iterator`]'s full definition includes a number of other methods as well,
-//! but they are default methods, built on top of [`next()`], and so you get
+//! but they are default methods, built on top of [`next`], and so you get
//! them for free.
//!
//! Iterators are also composable, and it's common to chain them together to do
//! below for more details.
//!
//! [`Iterator`]: trait.Iterator.html
-//! [`next()`]: trait.Iterator.html#tymethod.next
+//! [`next`]: trait.Iterator.html#tymethod.next
//! [`Option`]: ../../std/option/enum.Option.html
//!
//! # The three forms of iteration
//! produce an iterator. What gives?
//!
//! There's a trait in the standard library for converting something into an
-//! iterator: [`IntoIterator`]. This trait has one method, [`into_iter()`],
+//! iterator: [`IntoIterator`]. This trait has one method, [`into_iter`],
//! which converts the thing implementing [`IntoIterator`] into an iterator.
//! Let's take a look at that `for` loop again, and what the compiler converts
//! it into:
//!
//! [`IntoIterator`]: trait.IntoIterator.html
-//! [`into_iter()`]: trait.IntoIterator.html#tymethod.into_iter
+//! [`into_iter`]: trait.IntoIterator.html#tymethod.into_iter
//!
//! ```
//! let values = vec![1, 2, 3, 4, 5];
//! ```
//!
//! First, we call `into_iter()` on the value. Then, we match on the iterator
-//! that returns, calling [`next()`] over and over until we see a `None`. At
+//! that returns, calling [`next`] over and over until we see a `None`. At
//! that point, we `break` out of the loop, and we're done iterating.
//!
//! There's one more subtle bit here: the standard library contains an
//! often called 'iterator adapters', as they're a form of the 'adapter
//! pattern'.
//!
-//! Common iterator adapters include [`map()`], [`take()`], and [`filter()`].
+//! Common iterator adapters include [`map`], [`take`], and [`filter`].
//! For more, see their documentation.
//!
-//! [`map()`]: trait.Iterator.html#method.map
-//! [`take()`]: trait.Iterator.html#method.take
-//! [`filter()`]: trait.Iterator.html#method.filter
+//! [`map`]: trait.Iterator.html#method.map
+//! [`take`]: trait.Iterator.html#method.take
+//! [`filter`]: trait.Iterator.html#method.filter
//!
//! # Laziness
//!
//! Iterators (and iterator [adapters](#adapters)) are *lazy*. This means that
//! just creating an iterator doesn't _do_ a whole lot. Nothing really happens
-//! until you call [`next()`]. This is sometimes a source of confusion when
-//! creating an iterator solely for its side effects. For example, the [`map()`]
+//! until you call [`next`]. This is sometimes a source of confusion when
+//! creating an iterator solely for its side effects. For example, the [`map`]
//! method calls a closure on each element it iterates over:
//!
//! ```
//! do nothing unless consumed
//! ```
//!
-//! The idiomatic way to write a [`map()`] for its side effects is to use a
+//! The idiomatic way to write a [`map`] for its side effects is to use a
//! `for` loop instead:
//!
//! ```
//! }
//! ```
//!
-//! [`map()`]: trait.Iterator.html#method.map
+//! [`map`]: trait.Iterator.html#method.map
//!
//! The two most common ways to evaluate an iterator are to use a `for` loop
-//! like this, or using the [`collect()`] method to produce a new collection.
+//! like this, or using the [`collect`] method to produce a new collection.
//!
-//! [`collect()`]: trait.Iterator.html#method.collect
+//! [`collect`]: trait.Iterator.html#method.collect
//!
//! # Infinity
//!
//! let numbers = 0..;
//! ```
//!
-//! It is common to use the [`take()`] iterator adapter to turn an infinite
+//! It is common to use the [`take`] iterator adapter to turn an infinite
//! iterator into a finite one:
//!
//! ```
//!
//! This will print the numbers `0` through `4`, each on their own line.
//!
-//! [`take()`]: trait.Iterator.html#method.take
+//! [`take`]: trait.Iterator.html#method.take
#![stable(feature = "rust1", since = "1.0.0")]
mod sources;
mod traits;
-/// An double-ended iterator with the direction inverted.
+/// A double-ended iterator with the direction inverted.
///
-/// This `struct` is created by the [`rev()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`rev`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`rev()`]: trait.Iterator.html#method.rev
+/// [`rev`]: trait.Iterator.html#method.rev
/// [`Iterator`]: trait.Iterator.html
#[derive(Clone, Debug)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
/// An iterator that clones the elements of an underlying iterator.
///
-/// This `struct` is created by the [`cloned()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`cloned`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`cloned()`]: trait.Iterator.html#method.cloned
+/// [`cloned`]: trait.Iterator.html#method.cloned
/// [`Iterator`]: trait.Iterator.html
#[stable(feature = "iter_cloned", since = "1.1.0")]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
/// An iterator that repeats endlessly.
///
-/// This `struct` is created by the [`cycle()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`cycle`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`cycle()`]: trait.Iterator.html#method.cycle
+/// [`cycle`]: trait.Iterator.html#method.cycle
/// [`Iterator`]: trait.Iterator.html
#[derive(Clone, Debug)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
/// An iterator that strings two iterators together.
///
-/// This `struct` is created by the [`chain()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`chain`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`chain()`]: trait.Iterator.html#method.chain
+/// [`chain`]: trait.Iterator.html#method.chain
/// [`Iterator`]: trait.Iterator.html
#[derive(Clone, Debug)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
/// An iterator that iterates two other iterators simultaneously.
///
-/// This `struct` is created by the [`zip()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`zip`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`zip()`]: trait.Iterator.html#method.zip
+/// [`zip`]: trait.Iterator.html#method.zip
/// [`Iterator`]: trait.Iterator.html
#[derive(Clone, Debug)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
/// An iterator that maps the values of `iter` with `f`.
///
-/// This `struct` is created by the [`map()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`map`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`map()`]: trait.Iterator.html#method.map
+/// [`map`]: trait.Iterator.html#method.map
/// [`Iterator`]: trait.Iterator.html
///
/// # Notes about side effects
///
-/// The [`map()`] iterator implements [`DoubleEndedIterator`], meaning that
-/// you can also [`map()`] backwards:
+/// The [`map`] iterator implements [`DoubleEndedIterator`], meaning that
+/// you can also [`map`] backwards:
///
/// ```rust
/// let v: Vec<i32> = vec![1, 2, 3].into_iter().map(|x| x + 1).rev().collect();
/// An iterator that filters the elements of `iter` with `predicate`.
///
-/// This `struct` is created by the [`filter()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`filter`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`filter()`]: trait.Iterator.html#method.filter
+/// [`filter`]: trait.Iterator.html#method.filter
/// [`Iterator`]: trait.Iterator.html
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
/// An iterator that uses `f` to both filter and map elements from `iter`.
///
-/// This `struct` is created by the [`filter_map()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`filter_map`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`filter_map()`]: trait.Iterator.html#method.filter_map
+/// [`filter_map`]: trait.Iterator.html#method.filter_map
/// [`Iterator`]: trait.Iterator.html
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
/// An iterator that yields the current count and the element during iteration.
///
-/// This `struct` is created by the [`enumerate()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`enumerate`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`enumerate()`]: trait.Iterator.html#method.enumerate
+/// [`enumerate`]: trait.Iterator.html#method.enumerate
/// [`Iterator`]: trait.Iterator.html
#[derive(Clone, Debug)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
/// An iterator with a `peek()` that returns an optional reference to the next
/// element.
///
-/// This `struct` is created by the [`peekable()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`peekable`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`peekable()`]: trait.Iterator.html#method.peekable
+/// [`peekable`]: trait.Iterator.html#method.peekable
/// [`Iterator`]: trait.Iterator.html
#[derive(Clone, Debug)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
impl<I: Iterator> Peekable<I> {
/// Returns a reference to the next() value without advancing the iterator.
///
- /// Like [`next()`], if there is a value, it is wrapped in a `Some(T)`.
+ /// Like [`next`], if there is a value, it is wrapped in a `Some(T)`.
/// But if the iteration is over, `None` is returned.
///
- /// [`next()`]: trait.Iterator.html#tymethod.next
+ /// [`next`]: trait.Iterator.html#tymethod.next
///
/// Because `peek()` returns a reference, and many iterators iterate over
/// references, there can be a possibly confusing situation where the
/// An iterator that rejects elements while `predicate` is true.
///
-/// This `struct` is created by the [`skip_while()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`skip_while`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`skip_while()`]: trait.Iterator.html#method.skip_while
+/// [`skip_while`]: trait.Iterator.html#method.skip_while
/// [`Iterator`]: trait.Iterator.html
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
/// An iterator that only accepts elements while `predicate` is true.
///
-/// This `struct` is created by the [`take_while()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`take_while`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`take_while()`]: trait.Iterator.html#method.take_while
+/// [`take_while`]: trait.Iterator.html#method.take_while
/// [`Iterator`]: trait.Iterator.html
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
/// An iterator that skips over `n` elements of `iter`.
///
-/// This `struct` is created by the [`skip()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`skip`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`skip()`]: trait.Iterator.html#method.skip
+/// [`skip`]: trait.Iterator.html#method.skip
/// [`Iterator`]: trait.Iterator.html
#[derive(Clone, Debug)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
/// An iterator that only iterates over the first `n` iterations of `iter`.
///
-/// This `struct` is created by the [`take()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`take`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`take()`]: trait.Iterator.html#method.take
+/// [`take`]: trait.Iterator.html#method.take
/// [`Iterator`]: trait.Iterator.html
#[derive(Clone, Debug)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
/// An iterator to maintain state while iterating another iterator.
///
-/// This `struct` is created by the [`scan()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`scan`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`scan()`]: trait.Iterator.html#method.scan
+/// [`scan`]: trait.Iterator.html#method.scan
/// [`Iterator`]: trait.Iterator.html
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
/// An iterator that maps each element to an iterator, and yields the elements
/// of the produced iterators.
///
-/// This `struct` is created by the [`flat_map()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`flat_map`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`flat_map()`]: trait.Iterator.html#method.flat_map
+/// [`flat_map`]: trait.Iterator.html#method.flat_map
/// [`Iterator`]: trait.Iterator.html
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
/// An iterator that yields `None` forever after the underlying iterator
/// yields `None` once.
///
-/// This `struct` is created by the [`fuse()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`fuse`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`fuse()`]: trait.Iterator.html#method.fuse
+/// [`fuse`]: trait.Iterator.html#method.fuse
/// [`Iterator`]: trait.Iterator.html
#[derive(Clone, Debug)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
/// An iterator that calls a function with a reference to each element before
/// yielding it.
///
-/// This `struct` is created by the [`inspect()`] method on [`Iterator`]. See its
+/// This `struct` is created by the [`inspect`] method on [`Iterator`]. See its
/// documentation for more.
///
-/// [`inspect()`]: trait.Iterator.html#method.inspect
+/// [`inspect`]: trait.Iterator.html#method.inspect
/// [`Iterator`]: trait.Iterator.html
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
/// An iterator that repeats an element endlessly.
///
-/// This `struct` is created by the [`repeat()`] function. See its documentation for more.
+/// This `struct` is created by the [`repeat`] function. See its documentation for more.
///
-/// [`repeat()`]: fn.repeat.html
+/// [`repeat`]: fn.repeat.html
#[derive(Clone, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Repeat<A> {
/// over and over and 🔁.
///
/// Infinite iterators like `repeat()` are often used with adapters like
-/// [`take()`], in order to make them finite.
+/// [`take`], in order to make them finite.
///
-/// [`take()`]: trait.Iterator.html#method.take
+/// [`take`]: trait.Iterator.html#method.take
///
/// # Examples
///
/// assert_eq!(Some(4), fours.next());
/// ```
///
-/// Going finite with [`take()`]:
+/// Going finite with [`take`]:
///
/// ```
/// use std::iter;
/// An iterator that yields nothing.
///
-/// This `struct` is created by the [`empty()`] function. See its documentation for more.
+/// This `struct` is created by the [`empty`] function. See its documentation for more.
///
-/// [`empty()`]: fn.empty.html
+/// [`empty`]: fn.empty.html
#[stable(feature = "iter_empty", since = "1.2.0")]
pub struct Empty<T>(marker::PhantomData<T>);
/// An iterator that yields an element exactly once.
///
-/// This `struct` is created by the [`once()`] function. See its documentation for more.
+/// This `struct` is created by the [`once`] function. See its documentation for more.
///
-/// [`once()`]: fn.once.html
+/// [`once`]: fn.once.html
#[derive(Clone, Debug)]
#[stable(feature = "iter_once", since = "1.2.0")]
pub struct Once<T> {
/// Creates an iterator that yields an element exactly once.
///
-/// This is commonly used to adapt a single value into a [`chain()`] of other
+/// This is commonly used to adapt a single value into a [`chain`] of other
/// kinds of iteration. Maybe you have an iterator that covers almost
/// everything, but you need an extra special case. Maybe you have a function
/// which works on iterators, but you only need to process one value.
///
-/// [`chain()`]: trait.Iterator.html#method.chain
+/// [`chain`]: trait.Iterator.html#method.chain
///
/// # Examples
///
/// created from an iterator. This is common for types which describe a
/// collection of some kind.
///
-/// `FromIterator`'s [`from_iter()`] is rarely called explicitly, and is instead
-/// used through [`Iterator`]'s [`collect()`] method. See [`collect()`]'s
+/// `FromIterator`'s [`from_iter`] is rarely called explicitly, and is instead
+/// used through [`Iterator`]'s [`collect`] method. See [`collect`]'s
/// documentation for more examples.
///
-/// [`from_iter()`]: #tymethod.from_iter
+/// [`from_iter`]: #tymethod.from_iter
/// [`Iterator`]: trait.Iterator.html
-/// [`collect()`]: trait.Iterator.html#method.collect
+/// [`collect`]: trait.Iterator.html#method.collect
///
/// See also: [`IntoIterator`].
///
/// assert_eq!(v, vec![5, 5, 5, 5, 5]);
/// ```
///
-/// Using [`collect()`] to implicitly use `FromIterator`:
+/// Using [`collect`] to implicitly use `FromIterator`:
///
/// ```
/// let five_fives = std::iter::repeat(5).take(5);
/// backwards, a good start is to know where the end is.
///
/// When implementing an `ExactSizeIterator`, You must also implement
-/// [`Iterator`]. When doing so, the implementation of [`size_hint()`] *must*
+/// [`Iterator`]. When doing so, the implementation of [`size_hint`] *must*
/// return the exact size of the iterator.
///
/// [`Iterator`]: trait.Iterator.html
-/// [`size_hint()`]: trait.Iterator.html#method.size_hint
+/// [`size_hint`]: trait.Iterator.html#method.size_hint
///
-/// The [`len()`] method has a default implementation, so you usually shouldn't
+/// The [`len`] method has a default implementation, so you usually shouldn't
/// implement it. However, you may be able to provide a more performant
/// implementation than the default, so overriding it in this case makes sense.
///
-/// [`len()`]: #method.len
+/// [`len`]: #method.len
///
/// # Examples
///
/// implementation, you can do so. See the [trait-level] docs for an
/// example.
///
- /// This function has the same safety guarantees as the [`size_hint()`]
+ /// This function has the same safety guarantees as the [`size_hint`]
/// function.
///
/// [trait-level]: trait.ExactSizeIterator.html
- /// [`size_hint()`]: trait.Iterator.html#method.size_hint
+ /// [`size_hint`]: trait.Iterator.html#method.size_hint
///
/// # Examples
///
/// Trait to represent types that can be created by summing up an iterator.
///
-/// This trait is used to implement the [`sum()`] method on iterators. Types which
-/// implement the trait can be generated by the [`sum()`] method. Like
+/// This trait is used to implement the [`sum`] method on iterators. Types which
+/// implement the trait can be generated by the [`sum`] method. Like
/// [`FromIterator`] this trait should rarely be called directly and instead
-/// interacted with through [`Iterator::sum()`].
+/// interacted with through [`Iterator::sum`].
///
-/// [`sum()`]: ../../std/iter/trait.Sum.html#tymethod.sum
+/// [`sum`]: ../../std/iter/trait.Sum.html#tymethod.sum
/// [`FromIterator`]: ../../std/iter/trait.FromIterator.html
-/// [`Iterator::sum()`]: ../../std/iter/trait.Iterator.html#method.sum
+/// [`Iterator::sum`]: ../../std/iter/trait.Iterator.html#method.sum
#[stable(feature = "iter_arith_traits", since = "1.12.0")]
pub trait Sum<A = Self>: Sized {
/// Method which takes an iterator and generates `Self` from the elements by
/// Trait to represent types that can be created by multiplying elements of an
/// iterator.
///
-/// This trait is used to implement the [`product()`] method on iterators. Types
-/// which implement the trait can be generated by the [`product()`] method. Like
+/// This trait is used to implement the [`product`] method on iterators. Types
+/// which implement the trait can be generated by the [`product`] method. Like
/// [`FromIterator`] this trait should rarely be called directly and instead
-/// interacted with through [`Iterator::product()`].
+/// interacted with through [`Iterator::product`].
///
-/// [`product()`]: ../../std/iter/trait.Product.html#tymethod.product
+/// [`product`]: ../../std/iter/trait.Product.html#tymethod.product
/// [`FromIterator`]: ../../std/iter/trait.FromIterator.html
-/// [`Iterator::product()`]: ../../std/iter/trait.Iterator.html#method.product
+/// [`Iterator::product`]: ../../std/iter/trait.Iterator.html#method.product
#[stable(feature = "iter_arith_traits", since = "1.12.0")]
pub trait Product<A = Self>: Sized {
/// Method which takes an iterator and generates `Self` from the elements by
/// that behave this way because it allows for some significant optimizations.
///
/// Note: In general, you should not use `FusedIterator` in generic bounds if
-/// you need a fused iterator. Instead, you should just call [`Iterator::fuse()`]
+/// you need a fused iterator. Instead, you should just call [`Iterator::fuse`]
/// on the iterator. If the iterator is already fused, the additional [`Fuse`]
/// wrapper will be a no-op with no performance penalty.
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
-/// [`Iterator::fuse()`]: ../../std/iter/trait.Iterator.html#method.fuse
+/// [`Iterator::fuse`]: ../../std/iter/trait.Iterator.html#method.fuse
/// [`Fuse`]: ../../std/iter/struct.Fuse.html
#[unstable(feature = "fused", issue = "35602")]
pub trait FusedIterator: Iterator {}
/// # Safety
///
/// This trait must only be implemented when the contract is upheld.
-/// Consumers of this trait must inspect [`.size_hint()`]’s upper bound.
+/// Consumers of this trait must inspect [`.size_hint`]’s upper bound.
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
/// [`usize::MAX`]: ../../std/usize/constant.MAX.html
-/// [`.size_hint()`]: ../../std/iter/trait.Iterator.html#method.size_hint
+/// [`.size_hint`]: ../../std/iter/trait.Iterator.html#method.size_hint
#[unstable(feature = "trusted_len", issue = "37572")]
pub unsafe trait TrustedLen : Iterator {}
/// On panic, this macro will print the values of the expressions with their
/// debug representations.
///
-/// Like [`assert!()`], this macro has a second version, where a custom
+/// Like [`assert!`], this macro has a second version, where a custom
/// panic message can be provided.
///
-/// [`assert!()`]: macro.assert.html
+/// [`assert!`]: macro.assert.html
///
/// # Examples
///
/// [`String`]'s buffer, leading to a double free.
///
/// Generalizing the latter case, any type implementing [`Drop`] can't be `Copy`, because it's
-/// managing some resource besides its own [`size_of::<T>()`] bytes.
+/// managing some resource besides its own [`size_of::<T>`] bytes.
///
/// If you try to implement `Copy` on a struct or enum containing non-`Copy` data, you will get
/// the error [E0204].
/// [`Vec<T>`]: ../../std/vec/struct.Vec.html
/// [`String`]: ../../std/string/struct.String.html
/// [`Drop`]: ../../std/ops/trait.Drop.html
-/// [`size_of::<T>()`]: ../../std/mem/fn.size_of.html
+/// [`size_of::<T>`]: ../../std/mem/fn.size_of.html
/// [`Clone`]: ../clone/trait.Clone.html
/// [`String`]: ../../std/string/struct.String.html
/// [`i32`]: ../../std/primitive.i32.html
/// the contained value.
///
/// This function will unsafely assume the pointer `src` is valid for
-/// [`size_of::<U>()`][size_of] bytes by transmuting `&T` to `&U` and then reading
+/// [`size_of::<U>`][size_of] bytes by transmuting `&T` to `&U` and then reading
/// the `&U`. It will also unsafely create a copy of the contained value instead of
/// moving out of `src`.
///
/// A classification of floating point numbers.
///
-/// This `enum` is used as the return type for [`f32::classify()`] and [`f64::classify()`]. See
+/// This `enum` is used as the return type for [`f32::classify`] and [`f64::classify`]. See
/// their documentation for more.
///
-/// [`f32::classify()`]: ../../std/primitive.f32.html#method.classify
-/// [`f64::classify()`]: ../../std/primitive.f64.html#method.classify
+/// [`f32::classify`]: ../../std/primitive.f32.html#method.classify
+/// [`f64::classify`]: ../../std/primitive.f64.html#method.classify
///
/// # Examples
///
/// An error which can be returned when parsing an integer.
///
/// This error is used as the error type for the `from_str_radix()` functions
-/// on the primitive integer types, such as [`i8::from_str_radix()`].
+/// on the primitive integer types, such as [`i8::from_str_radix`].
///
-/// [`i8::from_str_radix()`]: ../../std/primitive.i8.html#method.from_str_radix
+/// [`i8::from_str_radix`]: ../../std/primitive.i8.html#method.from_str_radix
#[derive(Debug, Clone, PartialEq, Eq)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ParseIntError {
//! contexts involving built-in types, this is usually not a problem.
//! However, using these operators in generic code, requires some
//! attention if values have to be reused as opposed to letting the operators
-//! consume them. One option is to occasionally use [`clone()`].
+//! consume them. One option is to occasionally use [`clone`].
//! Another option is to rely on the types involved providing additional
//! operator implementations for references. For example, for a user-defined
//! type `T` which is supposed to support addition, it is probably a good
//! [`FnOnce`]: trait.FnOnce.html
//! [`Add`]: trait.Add.html
//! [`Sub`]: trait.Sub.html
-//! [`clone()`]: ../clone/trait.Clone.html#tymethod.clone
+//! [`clone`]: ../clone/trait.Clone.html#tymethod.clone
#![stable(feature = "rust1", since = "1.0.0")]
/// A (half-open) range which is bounded at both ends: { x | start <= x < end }.
/// Use `start..end` (two dots) for its shorthand.
///
-/// See the [`contains()`](#method.contains) method for its characterization.
+/// See the [`contains`](#method.contains) method for its characterization.
///
/// # Examples
///
/// A range which is only bounded below: { x | start <= x }.
/// Use `start..` for its shorthand.
///
-/// See the [`contains()`](#method.contains) method for its characterization.
+/// See the [`contains`](#method.contains) method for its characterization.
///
/// Note: Currently, no overflow checking is done for the iterator
/// implementation; if you use an integer range and the integer overflows, it
/// A range which is only bounded above: { x | x < end }.
/// Use `..end` (two dots) for its shorthand.
///
-/// See the [`contains()`](#method.contains) method for its characterization.
+/// See the [`contains`](#method.contains) method for its characterization.
///
/// It cannot serve as an iterator because it doesn't have a starting point.
///
/// An inclusive range which is bounded at both ends: { x | start <= x <= end }.
/// Use `start...end` (three dots) for its shorthand.
///
-/// See the [`contains()`](#method.contains) method for its characterization.
+/// See the [`contains`](#method.contains) method for its characterization.
///
/// # Examples
///
/// An inclusive range which is only bounded above: { x | x <= end }.
/// Use `...end` (three dots) for its shorthand.
///
-/// See the [`contains()`](#method.contains) method for its characterization.
+/// See the [`contains`](#method.contains) method for its characterization.
///
/// It cannot serve as an iterator because it doesn't have a starting point.
///
/// allocations or resources, so care must be taken not to overwrite an object
/// that should be dropped.
///
-/// It does not immediately drop the contents of `src` either; it is rather
-/// *moved* into the memory location `dst` and will be dropped whenever that
-/// location goes out of scope.
+/// Additionally, it does not drop `src`. Semantically, `src` is moved into the
+/// location pointed to by `dst`.
///
/// This is appropriate for initializing uninitialized memory, or overwriting
/// memory that has previously been `read` from.
/// allocations or resources, so care must be taken not to overwrite an object
/// that should be dropped.
///
+/// Additionally, it does not drop `src`. Semantically, `src` is moved into the
+/// location pointed to by `dst`.
+///
/// This is appropriate for initializing uninitialized memory, or overwriting
/// memory that has previously been `read` from.
///
/// A trait to abstract the idea of creating a new instance of a type from a
/// string.
///
-/// `FromStr`'s [`from_str()`] method is often used implicitly, through
-/// [`str`]'s [`parse()`] method. See [`parse()`]'s documentation for examples.
+/// `FromStr`'s [`from_str`] method is often used implicitly, through
+/// [`str`]'s [`parse`] method. See [`parse`]'s documentation for examples.
///
-/// [`from_str()`]: #tymethod.from_str
+/// [`from_str`]: #tymethod.from_str
/// [`str`]: ../../std/primitive.str.html
-/// [`parse()`]: ../../std/primitive.str.html#method.parse
+/// [`parse`]: ../../std/primitive.str.html#method.parse
#[stable(feature = "rust1", since = "1.0.0")]
pub trait FromStr: Sized {
/// The associated error which can be returned from parsing.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Utf8Error {
valid_up_to: usize,
+ error_len: Option<u8>,
}
impl Utf8Error {
/// Returns the index in the given string up to which valid UTF-8 was
/// verified.
///
- /// It is the maximum index such that `from_utf8(input[..index])`
+ /// It is the maximum index such that `from_utf8(&input[..index])`
/// would return `Ok(_)`.
///
/// # Examples
/// ```
#[stable(feature = "utf8_error", since = "1.5.0")]
pub fn valid_up_to(&self) -> usize { self.valid_up_to }
+
+ /// Provide more information about the failure:
+ ///
+ /// * `None`: the end of the input was reached unexpectedly.
+ /// `self.valid_up_to()` is 1 to 3 bytes from the end of the input.
+ /// If a byte stream (such as a file or a network socket) is being decoded incrementally,
+ /// this could be a valid `char` whose UTF-8 byte sequence is spanning multiple chunks.
+ ///
+ /// * `Some(len)`: an unexpected byte was encountered.
+ /// The length provided is that of the invalid byte sequence
+ /// that starts at the index given by `valid_up_to()`.
+ /// Decoding should resume after that sequence
+ /// (after inserting a U+FFFD REPLACEMENT CHARACTER) in case of lossy decoding.
+ #[unstable(feature = "utf8_error_error_len", reason ="new", issue = "40494")]
+ pub fn error_len(&self) -> Option<usize> {
+ self.error_len.map(|len| len as usize)
+ }
}
/// Converts a slice of bytes to a string slice.
///
/// If you are sure that the byte slice is valid UTF-8, and you don't want to
/// incur the overhead of the validity check, there is an unsafe version of
-/// this function, [`from_utf8_unchecked()`][fromutf8u], which has the same
+/// this function, [`from_utf8_unchecked`][fromutf8u], which has the same
/// behavior but skips the check.
///
/// [fromutf8u]: fn.from_utf8_unchecked.html
///
/// If you need a `String` instead of a `&str`, consider
-/// [`String::from_utf8()`][string].
+/// [`String::from_utf8`][string].
///
/// [string]: ../../std/string/struct.String.html#method.from_utf8
///
/// Converts a slice of bytes to a string slice without checking
/// that the string contains valid UTF-8.
///
-/// See the safe version, [`from_utf8()`][fromutf8], for more information.
+/// See the safe version, [`from_utf8`][fromutf8], for more information.
///
/// [fromutf8]: fn.from_utf8.html
///
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Display for Utf8Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "invalid utf-8: invalid byte near index {}", self.valid_up_to)
+ if let Some(error_len) = self.error_len {
+ write!(f, "invalid utf-8 sequence of {} bytes from index {}",
+ error_len, self.valid_up_to)
+ } else {
+ write!(f, "incomplete utf-8 byte sequence from index {}", self.valid_up_to)
+ }
}
}
/// Iterator for the char (representing *Unicode Scalar Values*) of a string
///
-/// Created with the method [`chars()`].
+/// Created with the method [`chars`].
///
-/// [`chars()`]: ../../std/primitive.str.html#method.chars
+/// [`chars`]: ../../std/primitive.str.html#method.chars
#[derive(Clone, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Chars<'a> {
/// External iterator for a string's bytes.
/// Use with the `std::iter` module.
///
-/// Created with the method [`bytes()`].
+/// Created with the method [`bytes`].
///
-/// [`bytes()`]: ../../std/primitive.str.html#method.bytes
+/// [`bytes`]: ../../std/primitive.str.html#method.bytes
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone, Debug)]
pub struct Bytes<'a>(Cloned<slice::Iter<'a, u8>>);
generate_pattern_iterators! {
forward:
- /// Created with the method [`split()`].
+ /// Created with the method [`split`].
///
- /// [`split()`]: ../../std/primitive.str.html#method.split
+ /// [`split`]: ../../std/primitive.str.html#method.split
struct Split;
reverse:
- /// Created with the method [`rsplit()`].
+ /// Created with the method [`rsplit`].
///
- /// [`rsplit()`]: ../../std/primitive.str.html#method.rsplit
+ /// [`rsplit`]: ../../std/primitive.str.html#method.rsplit
struct RSplit;
stability:
#[stable(feature = "rust1", since = "1.0.0")]
generate_pattern_iterators! {
forward:
- /// Created with the method [`split_terminator()`].
+ /// Created with the method [`split_terminator`].
///
- /// [`split_terminator()`]: ../../std/primitive.str.html#method.split_terminator
+ /// [`split_terminator`]: ../../std/primitive.str.html#method.split_terminator
struct SplitTerminator;
reverse:
- /// Created with the method [`rsplit_terminator()`].
+ /// Created with the method [`rsplit_terminator`].
///
- /// [`rsplit_terminator()`]: ../../std/primitive.str.html#method.rsplit_terminator
+ /// [`rsplit_terminator`]: ../../std/primitive.str.html#method.rsplit_terminator
struct RSplitTerminator;
stability:
#[stable(feature = "rust1", since = "1.0.0")]
generate_pattern_iterators! {
forward:
- /// Created with the method [`splitn()`].
+ /// Created with the method [`splitn`].
///
- /// [`splitn()`]: ../../std/primitive.str.html#method.splitn
+ /// [`splitn`]: ../../std/primitive.str.html#method.splitn
struct SplitN;
reverse:
- /// Created with the method [`rsplitn()`].
+ /// Created with the method [`rsplitn`].
///
- /// [`rsplitn()`]: ../../std/primitive.str.html#method.rsplitn
+ /// [`rsplitn`]: ../../std/primitive.str.html#method.rsplitn
struct RSplitN;
stability:
#[stable(feature = "rust1", since = "1.0.0")]
generate_pattern_iterators! {
forward:
- /// Created with the method [`match_indices()`].
+ /// Created with the method [`match_indices`].
///
- /// [`match_indices()`]: ../../std/primitive.str.html#method.match_indices
+ /// [`match_indices`]: ../../std/primitive.str.html#method.match_indices
struct MatchIndices;
reverse:
- /// Created with the method [`rmatch_indices()`].
+ /// Created with the method [`rmatch_indices`].
///
- /// [`rmatch_indices()`]: ../../std/primitive.str.html#method.rmatch_indices
+ /// [`rmatch_indices`]: ../../std/primitive.str.html#method.rmatch_indices
struct RMatchIndices;
stability:
#[stable(feature = "str_match_indices", since = "1.5.0")]
generate_pattern_iterators! {
forward:
- /// Created with the method [`matches()`].
+ /// Created with the method [`matches`].
///
- /// [`matches()`]: ../../std/primitive.str.html#method.matches
+ /// [`matches`]: ../../std/primitive.str.html#method.matches
struct Matches;
reverse:
- /// Created with the method [`rmatches()`].
+ /// Created with the method [`rmatches`].
///
- /// [`rmatches()`]: ../../std/primitive.str.html#method.rmatches
+ /// [`rmatches`]: ../../std/primitive.str.html#method.rmatches
struct RMatches;
stability:
#[stable(feature = "str_matches", since = "1.2.0")]
delegate double ended;
}
-/// Created with the method [`lines()`].
+/// Created with the method [`lines`].
///
-/// [`lines()`]: ../../std/primitive.str.html#method.lines
+/// [`lines`]: ../../std/primitive.str.html#method.lines
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone, Debug)]
pub struct Lines<'a>(Map<SplitTerminator<'a, char>, LinesAnyMap>);
#[unstable(feature = "fused", issue = "35602")]
impl<'a> FusedIterator for Lines<'a> {}
-/// Created with the method [`lines_any()`].
+/// Created with the method [`lines_any`].
///
-/// [`lines_any()`]: ../../std/primitive.str.html#method.lines_any
+/// [`lines_any`]: ../../std/primitive.str.html#method.lines_any
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(since = "1.4.0", reason = "use lines()/Lines instead now")]
#[derive(Clone, Debug)]
while index < len {
let old_offset = index;
- macro_rules! err { () => {{
- return Err(Utf8Error {
- valid_up_to: old_offset
- })
- }}}
+ macro_rules! err {
+ ($error_len: expr) => {
+ return Err(Utf8Error {
+ valid_up_to: old_offset,
+ error_len: $error_len,
+ })
+ }
+ }
macro_rules! next { () => {{
index += 1;
// we needed data, but there was none: error!
if index >= len {
- err!()
+ err!(None)
}
v[index]
}}}
let first = v[index];
if first >= 128 {
let w = UTF8_CHAR_WIDTH[first as usize];
- let second = next!();
// 2-byte encoding is for codepoints \u{0080} to \u{07ff}
// first C2 80 last DF BF
// 3-byte encoding is for codepoints \u{0800} to \u{ffff}
// UTF8-4 = %xF0 %x90-BF 2( UTF8-tail ) / %xF1-F3 3( UTF8-tail ) /
// %xF4 %x80-8F 2( UTF8-tail )
match w {
- 2 => if second & !CONT_MASK != TAG_CONT_U8 {err!()},
+ 2 => if next!() & !CONT_MASK != TAG_CONT_U8 {
+ err!(Some(1))
+ },
3 => {
- match (first, second, next!() & !CONT_MASK) {
- (0xE0 , 0xA0 ... 0xBF, TAG_CONT_U8) |
- (0xE1 ... 0xEC, 0x80 ... 0xBF, TAG_CONT_U8) |
- (0xED , 0x80 ... 0x9F, TAG_CONT_U8) |
- (0xEE ... 0xEF, 0x80 ... 0xBF, TAG_CONT_U8) => {}
- _ => err!()
+ match (first, next!()) {
+ (0xE0 , 0xA0 ... 0xBF) |
+ (0xE1 ... 0xEC, 0x80 ... 0xBF) |
+ (0xED , 0x80 ... 0x9F) |
+ (0xEE ... 0xEF, 0x80 ... 0xBF) => {}
+ _ => err!(Some(1))
+ }
+ if next!() & !CONT_MASK != TAG_CONT_U8 {
+ err!(Some(2))
}
}
4 => {
- match (first, second, next!() & !CONT_MASK, next!() & !CONT_MASK) {
- (0xF0 , 0x90 ... 0xBF, TAG_CONT_U8, TAG_CONT_U8) |
- (0xF1 ... 0xF3, 0x80 ... 0xBF, TAG_CONT_U8, TAG_CONT_U8) |
- (0xF4 , 0x80 ... 0x8F, TAG_CONT_U8, TAG_CONT_U8) => {}
- _ => err!()
+ match (first, next!()) {
+ (0xF0 , 0x90 ... 0xBF) |
+ (0xF1 ... 0xF3, 0x80 ... 0xBF) |
+ (0xF4 , 0x80 ... 0x8F) => {}
+ _ => err!(Some(1))
+ }
+ if next!() & !CONT_MASK != TAG_CONT_U8 {
+ err!(Some(2))
+ }
+ if next!() & !CONT_MASK != TAG_CONT_U8 {
+ err!(Some(3))
}
}
- _ => err!()
+ _ => err!(Some(1))
}
index += 1;
} else {
//! This functionality is intended to be expanded over time as more surface
//! area for macro authors is stabilized.
//!
-//! See [the book](../../book/procedural-macros.html) for more.
+//! See [the book](../book/procedural-macros.html) for more.
#![crate_name = "proc_macro"]
#![stable(feature = "proc_macro_lib", since = "1.15.0")]
#![crate_type = "dylib"]
#![deny(warnings)]
#![deny(missing_docs)]
+#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
+ html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
+ html_root_url = "https://doc.rust-lang.org/nightly/",
+ html_playground_url = "https://play.rust-lang.org/",
+ issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
+ test(no_crate_inject, attr(deny(warnings))),
+ test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]
#![feature(rustc_private)]
#![feature(staged_api)]
tables: tables,
graph: graph,
fn_exit: fn_exit,
- loop_scopes: Vec::new()
+ loop_scopes: Vec::new(),
};
body_exit = cfg_builder.expr(&body.value, entry);
cfg_builder.add_contained_edge(body_exit, fn_exit);
- let CFGBuilder {graph, ..} = cfg_builder;
- CFG {graph: graph,
- entry: entry,
- exit: fn_exit}
+ let CFGBuilder { graph, .. } = cfg_builder;
+ CFG {
+ graph: graph,
+ entry: entry,
+ exit: fn_exit,
+ }
}
impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
self.add_ast_node(id, &[exit])
}
- hir::StmtExpr(ref expr, id) | hir::StmtSemi(ref expr, id) => {
+ hir::StmtExpr(ref expr, id) |
+ hir::StmtSemi(ref expr, id) => {
let exit = self.expr(&expr, pred);
self.add_ast_node(id, &[exit])
}
self.pat(&local.pat, init_exit)
}
- hir::DeclItem(_) => {
- pred
- }
+ hir::DeclItem(_) => pred,
}
}
PatKind::Path(_) |
PatKind::Lit(..) |
PatKind::Range(..) |
- PatKind::Wild => {
- self.add_ast_node(pat.id, &[pred])
- }
+ PatKind::Wild => self.add_ast_node(pat.id, &[pred]),
PatKind::Box(ref subpat) |
PatKind::Ref(ref subpat, _) |
}
PatKind::Struct(_, ref subpats, _) => {
- let pats_exit =
- self.pats_all(subpats.iter().map(|f| &f.node.pat), pred);
+ let pats_exit = self.pats_all(subpats.iter().map(|f| &f.node.pat), pred);
self.add_ast_node(pat.id, &[pats_exit])
}
let method_call = ty::MethodCall::expr(call_expr.id);
let fn_ty = match self.tables.method_map.get(&method_call) {
Some(method) => method.ty,
- None => self.tables.expr_ty_adjusted(func_or_rcvr)
+ None => self.tables.expr_ty_adjusted(func_or_rcvr),
};
let func_or_rcvr_exit = self.expr(func_or_rcvr, pred);
from_index: CFGIndex,
to_loop: LoopScope,
to_index: CFGIndex) {
- let mut data = CFGEdgeData {exiting_scopes: vec![] };
+ let mut data = CFGEdgeData { exiting_scopes: vec![] };
let mut scope = self.tcx.region_maps.node_extent(from_expr.id);
let target_scope = self.tcx.region_maps.node_extent(to_loop.loop_id);
while scope != target_scope {
}
span_bug!(expr.span, "no loop scope for id {}", loop_id);
}
- Err(err) => span_bug!(expr.span, "loop scope error: {}", err)
+ Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
}
}
}
DepTrackingMap {
phantom: PhantomData,
graph: graph,
- map: FxHashMap()
+ map: FxHashMap(),
}
}
use super::dep_node::{DepNode, WorkProductId};
use super::query::DepGraphQuery;
use super::raii;
+use super::safe::DepGraphSafe;
use super::thread::{DepGraphThreadData, DepMessage};
#[derive(Clone)]
op()
}
- pub fn with_task<OP,R>(&self, key: DepNode<DefId>, op: OP) -> R
- where OP: FnOnce() -> R
+ /// Starts a new dep-graph task. Dep-graph tasks are specified
+ /// using a free function (`task`) and **not** a closure -- this
+ /// is intentional because we want to exercise tight control over
+ /// what state they have access to. In particular, we want to
+ /// prevent implicit 'leaks' of tracked state into the task (which
+ /// could then be read without generating correct edges in the
+ /// dep-graph -- see the [README] for more details on the
+ /// dep-graph). To this end, the task function gets exactly two
+ /// pieces of state: the context `cx` and an argument `arg`. Both
+ /// of these bits of state must be of some type that implements
+ /// `DepGraphSafe` and hence does not leak.
+ ///
+ /// The choice of two arguments is not fundamental. One argument
+ /// would work just as well, since multiple values can be
+ /// collected using tuples. However, using two arguments works out
+ /// to be quite convenient, since it is common to need a context
+ /// (`cx`) and some argument (e.g., a `DefId` identifying what
+ /// item to process).
+ ///
+ /// For cases where you need some other number of arguments:
+ ///
+ /// - If you only need one argument, just use `()` for the `arg`
+ /// parameter.
+ /// - If you need 3+ arguments, use a tuple for the
+ /// `arg` parameter.
+ ///
+ /// [README]: README.md
+ pub fn with_task<C, A, R>(&self, key: DepNode<DefId>, cx: C, arg: A, task: fn(C, A) -> R) -> R
+ where C: DepGraphSafe, A: DepGraphSafe
{
let _task = self.in_task(key);
- op()
+ task(cx, arg)
}
pub fn read(&self, v: DepNode<DefId>) {
mod graph;
mod query;
mod raii;
+mod safe;
mod shadow;
mod thread;
mod visit;
pub use self::graph::DepGraph;
pub use self::graph::WorkProduct;
pub use self::query::DepGraphQuery;
+pub use self::safe::AssertDepGraphSafe;
+pub use self::safe::DepGraphSafe;
pub use self::visit::visit_all_bodies_in_krate;
pub use self::visit::visit_all_item_likes_in_krate;
pub use self::raii::DepTask;
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use hir::BodyId;
+use hir::def_id::DefId;
+use syntax::ast::NodeId;
+use ty::TyCtxt;
+
+/// The `DepGraphSafe` trait is used to specify what kinds of values
+/// are safe to "leak" into a task. The idea is that this should be
+/// only be implemented for things like the tcx as well as various id
+/// types, which will create reads in the dep-graph whenever the trait
+/// loads anything that might depend on the input program.
+pub trait DepGraphSafe {
+}
+
+/// A `BodyId` on its own doesn't give access to any particular state.
+/// You must fetch the state from the various maps or generate
+/// on-demand queries, all of which create reads.
+impl DepGraphSafe for BodyId {
+}
+
+/// A `NodeId` on its own doesn't give access to any particular state.
+/// You must fetch the state from the various maps or generate
+/// on-demand queries, all of which create reads.
+impl DepGraphSafe for NodeId {
+}
+
+/// A `DefId` on its own doesn't give access to any particular state.
+/// You must fetch the state from the various maps or generate
+/// on-demand queries, all of which create reads.
+impl DepGraphSafe for DefId {
+}
+
+/// The type context itself can be used to access all kinds of tracked
+/// state, but those accesses should always generate read events.
+impl<'a, 'gcx, 'tcx> DepGraphSafe for TyCtxt<'a, 'gcx, 'tcx> {
+}
+
+/// Tuples make it easy to build up state.
+impl<A, B> DepGraphSafe for (A, B)
+ where A: DepGraphSafe, B: DepGraphSafe
+{
+}
+
+/// No data here! :)
+impl DepGraphSafe for () {
+}
+
+/// A convenient override that lets you pass arbitrary state into a
+/// task. Every use should be accompanied by a comment explaining why
+/// it makes sense (or how it could be refactored away in the future).
+pub struct AssertDepGraphSafe<T>(pub T);
+
+impl<T> DepGraphSafe for AssertDepGraphSafe<T> {
+}
struct TrackingVisitor<'visit, 'tcx: 'visit, F: 'visit, V: 'visit> {
tcx: TyCtxt<'visit, 'tcx, 'tcx>,
dep_node_fn: &'visit mut F,
- visitor: &'visit mut V
+ visitor: &'visit mut V,
}
impl<'visit, 'tcx, F, V> ItemLikeVisitor<'tcx> for TrackingVisitor<'visit, 'tcx, F, V>
let mut tracking_visitor = TrackingVisitor {
tcx: tcx,
dep_node_fn: &mut dep_node_fn,
- visitor: visitor
+ visitor: visitor,
};
krate.visit_all_item_likes(&mut tracking_visitor)
}
pub fn visit_all_bodies_in_krate<'a, 'tcx, C>(tcx: TyCtxt<'a, 'tcx, 'tcx>, callback: C)
- where C: Fn(/* body_owner */ DefId, /* body id */ hir::BodyId),
+ where C: Fn(/* body_owner */
+ DefId,
+ /* body id */
+ hir::BodyId)
{
let krate = tcx.hir.krate();
for &body_id in &krate.body_ids {
trait_items: BTreeMap<hir::TraitItemId, hir::TraitItem>,
impl_items: BTreeMap<hir::ImplItemId, hir::ImplItem>,
bodies: BTreeMap<hir::BodyId, hir::Body>,
+ exported_macros: Vec<hir::MacroDef>,
trait_impls: BTreeMap<DefId, Vec<NodeId>>,
trait_default_impl: BTreeMap<DefId, NodeId>,
+ catch_scopes: Vec<NodeId>,
loop_scopes: Vec<NodeId>,
is_in_loop_condition: bool,
bodies: BTreeMap::new(),
trait_impls: BTreeMap::new(),
trait_default_impl: BTreeMap::new(),
+ exported_macros: Vec::new(),
+ catch_scopes: Vec::new(),
loop_scopes: Vec::new(),
is_in_loop_condition: false,
type_def_lifetime_params: DefIdMap(),
impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> {
fn visit_item(&mut self, item: &'lcx Item) {
- let hir_item = self.lctx.lower_item(item);
- self.lctx.items.insert(item.id, hir_item);
- visit::walk_item(self, item);
+ if let Some(hir_item) = self.lctx.lower_item(item) {
+ self.lctx.items.insert(item.id, hir_item);
+ visit::walk_item(self, item);
+ }
}
fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
let module = self.lower_mod(&c.module);
let attrs = self.lower_attrs(&c.attrs);
- let exported_macros = c.exported_macros.iter().map(|m| self.lower_macro_def(m)).collect();
let body_ids = body_ids(&self.bodies);
hir::Crate {
module: module,
attrs: attrs,
span: c.span,
- exported_macros: exported_macros,
+ exported_macros: hir::HirVec::from(self.exported_macros),
items: self.items,
trait_items: self.trait_items,
impl_items: self.impl_items,
span
}
+ fn with_catch_scope<T, F>(&mut self, catch_id: NodeId, f: F) -> T
+ where F: FnOnce(&mut LoweringContext) -> T
+ {
+ let len = self.catch_scopes.len();
+ self.catch_scopes.push(catch_id);
+
+ let result = f(self);
+ assert_eq!(len + 1, self.catch_scopes.len(),
+ "catch scopes should be added and removed in stack order");
+
+ self.catch_scopes.pop().unwrap();
+
+ result
+ }
+
fn with_loop_scope<T, F>(&mut self, loop_id: NodeId, f: F) -> T
where F: FnOnce(&mut LoweringContext) -> T
{
result
}
- fn with_new_loop_scopes<T, F>(&mut self, f: F) -> T
+ fn with_new_scopes<T, F>(&mut self, f: F) -> T
where F: FnOnce(&mut LoweringContext) -> T
{
let was_in_loop_condition = self.is_in_loop_condition;
self.is_in_loop_condition = false;
+ let catch_scopes = mem::replace(&mut self.catch_scopes, Vec::new());
let loop_scopes = mem::replace(&mut self.loop_scopes, Vec::new());
let result = f(self);
- mem::replace(&mut self.loop_scopes, loop_scopes);
+ self.catch_scopes = catch_scopes;
+ self.loop_scopes = loop_scopes;
self.is_in_loop_condition = was_in_loop_condition;
self.record_body(value, None))
}
ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => {
- self.with_new_loop_scopes(|this| {
+ self.with_new_scopes(|this| {
let body = this.lower_block(body);
let body = this.expr_block(body, ThinVec::new());
let body_id = this.record_body(body, Some(decl));
bounds,
items)
}
- ItemKind::Mac(_) => panic!("Shouldn't still be around"),
+ ItemKind::MacroDef(..) | ItemKind::Mac(..) => panic!("Shouldn't still be around"),
}
}
}
}
- fn lower_macro_def(&mut self, m: &MacroDef) -> hir::MacroDef {
- hir::MacroDef {
- name: m.ident.name,
- attrs: self.lower_attrs(&m.attrs),
- id: m.id,
- span: m.span,
- body: m.body.clone().into(),
- }
- }
-
fn lower_item_id(&mut self, i: &Item) -> SmallVector<hir::ItemId> {
- if let ItemKind::Use(ref view_path) = i.node {
- if let ViewPathList(_, ref imports) = view_path.node {
- return iter::once(i.id).chain(imports.iter().map(|import| import.node.id))
- .map(|id| hir::ItemId { id: id }).collect();
+ match i.node {
+ ItemKind::Use(ref view_path) => {
+ if let ViewPathList(_, ref imports) = view_path.node {
+ return iter::once(i.id).chain(imports.iter().map(|import| import.node.id))
+ .map(|id| hir::ItemId { id: id }).collect();
+ }
}
+ ItemKind::MacroDef(..) => return SmallVector::new(),
+ _ => {}
}
SmallVector::one(hir::ItemId { id: i.id })
}
- pub fn lower_item(&mut self, i: &Item) -> hir::Item {
+ pub fn lower_item(&mut self, i: &Item) -> Option<hir::Item> {
let mut name = i.ident.name;
let attrs = self.lower_attrs(&i.attrs);
let mut vis = self.lower_visibility(&i.vis);
+ if let ItemKind::MacroDef(ref tts) = i.node {
+ if i.attrs.iter().any(|attr| attr.name() == "macro_export") {
+ self.exported_macros.push(hir::MacroDef {
+ name: name, attrs: attrs, id: i.id, span: i.span, body: tts.clone().into(),
+ });
+ }
+ return None;
+ }
+
let node = self.with_parent_def(i.id, |this| {
this.lower_item_kind(i.id, &mut name, &attrs, &mut vis, &i.node)
});
- hir::Item {
+ Some(hir::Item {
id: i.id,
name: name,
attrs: attrs,
node: node,
vis: vis,
span: i.span,
- }
+ })
}
fn lower_foreign_item(&mut self, i: &ForeignItem) -> hir::ForeignItem {
this.lower_opt_sp_ident(opt_ident),
hir::LoopSource::Loop))
}
+ ExprKind::Catch(ref body) => {
+ // FIXME(cramertj): Add catch to HIR
+ self.with_catch_scope(e.id, |this| hir::ExprBlock(this.lower_block(body)))
+ }
ExprKind::Match(ref expr, ref arms) => {
hir::ExprMatch(P(self.lower_expr(expr)),
arms.iter().map(|x| self.lower_arm(x)).collect(),
hir::MatchSource::Normal)
}
ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => {
- self.with_new_loop_scopes(|this| {
+ self.with_new_scopes(|this| {
this.with_parent_def(e.id, |this| {
let expr = this.lower_expr(body);
hir::ExprClosure(this.lower_capture_clause(capture_clause),
// Err(err) => #[allow(unreachable_code)]
// return Carrier::from_error(From::from(err)),
// }
+
+ // FIXME(cramertj): implement breaking to catch
+ if !self.catch_scopes.is_empty() {
+ bug!("`?` in catch scopes is unimplemented")
+ }
+
let unstable_span = self.allow_internal_unstable("?", e.span);
// Carrier::translate(<expr>)
ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_str()),
ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) =>
DefPathData::ValueNs(i.ident.name.as_str()),
- ItemKind::Mac(..) if i.id == DUMMY_NODE_ID => return, // Scope placeholder
+ ItemKind::MacroDef(..) => DefPathData::MacroDef(i.ident.name.as_str()),
ItemKind::Mac(..) => return self.visit_macro_invoc(i.id, false),
ItemKind::Use(ref view_path) => {
match view_path.node {
self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str()));
}
- fn visit_macro_def(&mut self, macro_def: &'a MacroDef) {
- self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name.as_str()));
- }
-
fn visit_stmt(&mut self, stmt: &'a Stmt) {
match stmt.node {
StmtKind::Mac(..) => self.visit_macro_invoc(stmt.id, false),
The main case which fails today that I would like to support is:
-```text
+```rust
fn foo<T>(x: T, y: T) { ... }
fn bar() {
`X`, and thus inherits its UB/LB of `@mut int`. This leaves no
flexibility for `T` to later adjust to accommodate `@int`.
+Note: `@` and `@mut` are replaced with `Rc<T>` and `Rc<RefCell<T>>` in current Rust.
+
### What to do when not all bounds are present
In the prior discussion we assumed that A.ub was not top and B.lb was
"execute" by testing the value they are applied to and creating any
relevant bindings). So, for example:
- fn foo(x: isize, y: isize) { // -+
- // +------------+ // |
- // | +-----+ // |
- // | +-+ +-+ +-+ // |
- // | | | | | | | // |
- // v v v v v v v // |
- let z = x + y; // |
- ... // |
- } // -+
-
- fn bar() { ... }
+```rust
+fn foo(x: isize, y: isize) { // -+
+// +------------+ // |
+// | +-----+ // |
+// | +-+ +-+ +-+ // |
+// | | | | | | | // |
+// v v v v v v v // |
+ let z = x + y; // |
+ ... // |
+} // -+
+
+fn bar() { ... }
+```
In this example, there is a region for the fn body block as a whole,
and then a subregion for the declaration of the local variable.
particular when combined with `&mut` functions. For example, a call
like this one
- self.foo(self.bar())
+```rust
+self.foo(self.bar())
+```
where both `foo` and `bar` are `&mut self` functions will always yield
an error.
Here is a more involved example (which is safe) so we can see what's
going on:
- struct Foo { f: usize, g: usize }
- ...
- fn add(p: &mut usize, v: usize) {
- *p += v;
- }
- ...
- fn inc(p: &mut usize) -> usize {
- *p += 1; *p
- }
- fn weird() {
- let mut x: Box<Foo> = box Foo { ... };
- 'a: add(&mut (*x).f,
- 'b: inc(&mut (*x).f)) // (..)
- }
+```rust
+struct Foo { f: usize, g: usize }
+// ...
+fn add(p: &mut usize, v: usize) {
+ *p += v;
+}
+// ...
+fn inc(p: &mut usize) -> usize {
+ *p += 1; *p
+}
+fn weird() {
+ let mut x: Box<Foo> = box Foo { /* ... */ };
+ 'a: add(&mut (*x).f,
+ 'b: inc(&mut (*x).f)) // (..)
+}
+```
The important part is the line marked `(..)` which contains a call to
`add()`. The first argument is a mutable borrow of the field `f`. The
involved with `'a` in detail. We'll break apart all the steps involved
in a call expression:
- 'a: {
- 'a_arg1: let a_temp1: ... = add;
- 'a_arg2: let a_temp2: &'a mut usize = &'a mut (*x).f;
- 'a_arg3: let a_temp3: usize = {
- let b_temp1: ... = inc;
- let b_temp2: &'b = &'b mut (*x).f;
- 'b_call: b_temp1(b_temp2)
- };
- 'a_call: a_temp1(a_temp2, a_temp3) // (**)
- }
+```rust
+'a: {
+ 'a_arg1: let a_temp1: ... = add;
+ 'a_arg2: let a_temp2: &'a mut usize = &'a mut (*x).f;
+ 'a_arg3: let a_temp3: usize = {
+ let b_temp1: ... = inc;
+ let b_temp2: &'b = &'b mut (*x).f;
+ 'b_call: b_temp1(b_temp2)
+ };
+ 'a_call: a_temp1(a_temp2, a_temp3) // (**)
+}
+```
Here we see that the lifetime `'a` includes a number of substatements.
In particular, there is this lifetime I've called `'a_call` that
argument, it can still be *invalidated* by that evaluation. Consider
this similar but unsound example:
- struct Foo { f: usize, g: usize }
- ...
- fn add(p: &mut usize, v: usize) {
- *p += v;
- }
- ...
- fn consume(x: Box<Foo>) -> usize {
- x.f + x.g
- }
- fn weird() {
- let mut x: Box<Foo> = box Foo { ... };
- 'a: add(&mut (*x).f, consume(x)) // (..)
- }
+```rust
+struct Foo { f: usize, g: usize }
+// ...
+fn add(p: &mut usize, v: usize) {
+ *p += v;
+}
+// ...
+fn consume(x: Box<Foo>) -> usize {
+ x.f + x.g
+}
+fn weird() {
+ let mut x: Box<Foo> = box Foo { ... };
+ 'a: add(&mut (*x).f, consume(x)) // (..)
+}
+```
In this case, the second argument to `add` actually consumes `x`, thus
invalidating the first argument.
};
if output_template.is_empty() {
- bug!("empty string provided as RUST_REGION_GRAPH");
+ panic!("empty string provided as RUST_REGION_GRAPH");
}
if output_template.contains('%') {
let (span, msg) = self;
let mut diagnostic = Diagnostic::new(errors::Level::Warning, msg);
diagnostic.set_span(span);
- EarlyLint { id: id, diagnostic: diagnostic }
+ EarlyLint {
+ id: id,
+ diagnostic: diagnostic,
+ }
}
}
impl IntoEarlyLint for Diagnostic {
fn into_early_lint(self, id: LintId) -> EarlyLint {
- EarlyLint { id: id, diagnostic: self }
+ EarlyLint {
+ id: id,
+ diagnostic: self,
+ }
}
}
enum FindLintError {
NotFound,
- Removed
+ Removed,
}
impl LintStore {
self.tables = old_tables;
}
+ fn visit_body(&mut self, body: &'tcx hir::Body) {
+ run_lints!(self, check_body, late_passes, body);
+ hir_visit::walk_body(self, body);
+ run_lints!(self, check_body_post, late_passes, body);
+ }
+
fn visit_item(&mut self, it: &'tcx hir::Item) {
self.with_lint_attrs(&it.attrs, |cx| {
run_lints!(cx, check_item, late_passes, it);
NoLint,
// The lint is either renamed or removed. This is the warning
// message.
- Warning(String)
+ Warning(String),
}
/// Checks the name of a lint for its existence, and whether it was
// FIXME: eliminate the duplication with `Visitor`. But this also
// contains a few lint-specific methods with no equivalent in `Visitor`.
pub trait LateLintPass<'a, 'tcx>: LintPass {
+ fn check_body(&mut self, _: &LateContext, _: &'tcx hir::Body) { }
+ fn check_body_post(&mut self, _: &LateContext, _: &'tcx hir::Body) { }
fn check_name(&mut self, _: &LateContext, _: Span, _: ast::Name) { }
fn check_crate(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Crate) { }
fn check_crate_post(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Crate) { }
pub enum NativeLibraryKind {
NativeStatic, // native static library (.a archive)
NativeStaticNobundle, // native static library, which doesn't get bundled into .rlibs
- NativeFramework, // OSX-specific
+ NativeFramework, // macOS-specific
NativeUnknown, // default way to specify a dynamic library
}
}
pub enum LoadedMacro {
- MacroRules(ast::MacroDef),
+ MacroDef(ast::Item),
ProcMacro(Rc<SyntaxExtension>),
}
use hir::def_id::DefId;
use ty::subst::Substs;
use ty::{self, AdtDef, ClosureSubsts, Region, Ty};
+use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
use util::ppaux;
use rustc_back::slice;
use hir::InlineAsm;
}
/// Lowered representation of a single function.
-// Do not implement clone for Mir, which can be accidently done and kind of expensive.
-#[derive(RustcEncodable, RustcDecodable, Debug)]
+#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Mir<'tcx> {
/// List of basic blocks. References to basic block use a newtyped index type `BasicBlock`
/// that indexes into this vector.
}
}
}
+
+
+/*
+ * TypeFoldable implementations for MIR types
+ */
+
+impl<'tcx> TypeFoldable<'tcx> for Mir<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ Mir {
+ basic_blocks: self.basic_blocks.fold_with(folder),
+ visibility_scopes: self.visibility_scopes.clone(),
+ promoted: self.promoted.fold_with(folder),
+ return_ty: self.return_ty.fold_with(folder),
+ local_decls: self.local_decls.fold_with(folder),
+ arg_count: self.arg_count,
+ upvar_decls: self.upvar_decls.clone(),
+ spread_arg: self.spread_arg,
+ span: self.span,
+ cache: cache::Cache::new()
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.basic_blocks.visit_with(visitor) ||
+ self.promoted.visit_with(visitor) ||
+ self.return_ty.visit_with(visitor) ||
+ self.local_decls.visit_with(visitor)
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for LocalDecl<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ LocalDecl {
+ ty: self.ty.fold_with(folder),
+ ..self.clone()
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.ty.visit_with(visitor)
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for BasicBlockData<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ BasicBlockData {
+ statements: self.statements.fold_with(folder),
+ terminator: self.terminator.fold_with(folder),
+ is_cleanup: self.is_cleanup
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.statements.visit_with(visitor) || self.terminator.visit_with(visitor)
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Statement<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ use mir::StatementKind::*;
+
+ let kind = match self.kind {
+ Assign(ref lval, ref rval) => Assign(lval.fold_with(folder), rval.fold_with(folder)),
+ SetDiscriminant { ref lvalue, variant_index } => SetDiscriminant {
+ lvalue: lvalue.fold_with(folder),
+ variant_index: variant_index
+ },
+ StorageLive(ref lval) => StorageLive(lval.fold_with(folder)),
+ StorageDead(ref lval) => StorageDead(lval.fold_with(folder)),
+ InlineAsm { ref asm, ref outputs, ref inputs } => InlineAsm {
+ asm: asm.clone(),
+ outputs: outputs.fold_with(folder),
+ inputs: inputs.fold_with(folder)
+ },
+ Nop => Nop,
+ };
+ Statement {
+ source_info: self.source_info,
+ kind: kind
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ use mir::StatementKind::*;
+
+ match self.kind {
+ Assign(ref lval, ref rval) => { lval.visit_with(visitor) || rval.visit_with(visitor) }
+ SetDiscriminant { ref lvalue, .. } |
+ StorageLive(ref lvalue) |
+ StorageDead(ref lvalue) => lvalue.visit_with(visitor),
+ InlineAsm { ref outputs, ref inputs, .. } =>
+ outputs.visit_with(visitor) || inputs.visit_with(visitor),
+ Nop => false,
+ }
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ use mir::TerminatorKind::*;
+
+ let kind = match self.kind {
+ Goto { target } => Goto { target: target },
+ SwitchInt { ref discr, switch_ty, ref values, ref targets } => SwitchInt {
+ discr: discr.fold_with(folder),
+ switch_ty: switch_ty.fold_with(folder),
+ values: values.clone(),
+ targets: targets.clone()
+ },
+ Drop { ref location, target, unwind } => Drop {
+ location: location.fold_with(folder),
+ target: target,
+ unwind: unwind
+ },
+ DropAndReplace { ref location, ref value, target, unwind } => DropAndReplace {
+ location: location.fold_with(folder),
+ value: value.fold_with(folder),
+ target: target,
+ unwind: unwind
+ },
+ Call { ref func, ref args, ref destination, cleanup } => {
+ let dest = destination.as_ref().map(|&(ref loc, dest)| {
+ (loc.fold_with(folder), dest)
+ });
+
+ Call {
+ func: func.fold_with(folder),
+ args: args.fold_with(folder),
+ destination: dest,
+ cleanup: cleanup
+ }
+ },
+ Assert { ref cond, expected, ref msg, target, cleanup } => {
+ let msg = if let AssertMessage::BoundsCheck { ref len, ref index } = *msg {
+ AssertMessage::BoundsCheck {
+ len: len.fold_with(folder),
+ index: index.fold_with(folder),
+ }
+ } else {
+ msg.clone()
+ };
+ Assert {
+ cond: cond.fold_with(folder),
+ expected: expected,
+ msg: msg,
+ target: target,
+ cleanup: cleanup
+ }
+ },
+ Resume => Resume,
+ Return => Return,
+ Unreachable => Unreachable,
+ };
+ Terminator {
+ source_info: self.source_info,
+ kind: kind
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ use mir::TerminatorKind::*;
+
+ match self.kind {
+ SwitchInt { ref discr, switch_ty, .. } =>
+ discr.visit_with(visitor) || switch_ty.visit_with(visitor),
+ Drop { ref location, ..} => location.visit_with(visitor),
+ DropAndReplace { ref location, ref value, ..} =>
+ location.visit_with(visitor) || value.visit_with(visitor),
+ Call { ref func, ref args, ref destination, .. } => {
+ let dest = if let Some((ref loc, _)) = *destination {
+ loc.visit_with(visitor)
+ } else { false };
+ dest || func.visit_with(visitor) || args.visit_with(visitor)
+ },
+ Assert { ref cond, ref msg, .. } => {
+ if cond.visit_with(visitor) {
+ if let AssertMessage::BoundsCheck { ref len, ref index } = *msg {
+ len.visit_with(visitor) || index.visit_with(visitor)
+ } else {
+ false
+ }
+ } else {
+ false
+ }
+ },
+ Goto { .. } |
+ Resume |
+ Return |
+ Unreachable => false
+ }
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Lvalue<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ match self {
+ &Lvalue::Projection(ref p) => Lvalue::Projection(p.fold_with(folder)),
+ _ => self.clone()
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ if let &Lvalue::Projection(ref p) = self {
+ p.visit_with(visitor)
+ } else {
+ false
+ }
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Rvalue<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ use mir::Rvalue::*;
+ match *self {
+ Use(ref op) => Use(op.fold_with(folder)),
+ Repeat(ref op, len) => Repeat(op.fold_with(folder), len),
+ Ref(region, bk, ref lval) => Ref(region.fold_with(folder), bk, lval.fold_with(folder)),
+ Len(ref lval) => Len(lval.fold_with(folder)),
+ Cast(kind, ref op, ty) => Cast(kind, op.fold_with(folder), ty.fold_with(folder)),
+ BinaryOp(op, ref rhs, ref lhs) =>
+ BinaryOp(op, rhs.fold_with(folder), lhs.fold_with(folder)),
+ CheckedBinaryOp(op, ref rhs, ref lhs) =>
+ CheckedBinaryOp(op, rhs.fold_with(folder), lhs.fold_with(folder)),
+ UnaryOp(op, ref val) => UnaryOp(op, val.fold_with(folder)),
+ Discriminant(ref lval) => Discriminant(lval.fold_with(folder)),
+ Box(ty) => Box(ty.fold_with(folder)),
+ Aggregate(ref kind, ref fields) => {
+ let kind = match *kind {
+ AggregateKind::Array(ty) => AggregateKind::Array(ty.fold_with(folder)),
+ AggregateKind::Tuple => AggregateKind::Tuple,
+ AggregateKind::Adt(def, v, substs, n) =>
+ AggregateKind::Adt(def, v, substs.fold_with(folder), n),
+ AggregateKind::Closure(id, substs) =>
+ AggregateKind::Closure(id, substs.fold_with(folder))
+ };
+ Aggregate(kind, fields.fold_with(folder))
+ }
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ use mir::Rvalue::*;
+ match *self {
+ Use(ref op) => op.visit_with(visitor),
+ Repeat(ref op, _) => op.visit_with(visitor),
+ Ref(region, _, ref lval) => region.visit_with(visitor) || lval.visit_with(visitor),
+ Len(ref lval) => lval.visit_with(visitor),
+ Cast(_, ref op, ty) => op.visit_with(visitor) || ty.visit_with(visitor),
+ BinaryOp(_, ref rhs, ref lhs) |
+ CheckedBinaryOp(_, ref rhs, ref lhs) =>
+ rhs.visit_with(visitor) || lhs.visit_with(visitor),
+ UnaryOp(_, ref val) => val.visit_with(visitor),
+ Discriminant(ref lval) => lval.visit_with(visitor),
+ Box(ty) => ty.visit_with(visitor),
+ Aggregate(ref kind, ref fields) => {
+ (match *kind {
+ AggregateKind::Array(ty) => ty.visit_with(visitor),
+ AggregateKind::Tuple => false,
+ AggregateKind::Adt(_, _, substs, _) => substs.visit_with(visitor),
+ AggregateKind::Closure(_, substs) => substs.visit_with(visitor)
+ }) || fields.visit_with(visitor)
+ }
+ }
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Operand<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ match *self {
+ Operand::Consume(ref lval) => Operand::Consume(lval.fold_with(folder)),
+ Operand::Constant(ref c) => Operand::Constant(c.fold_with(folder)),
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ match *self {
+ Operand::Consume(ref lval) => lval.visit_with(visitor),
+ Operand::Constant(ref c) => c.visit_with(visitor)
+ }
+ }
+}
+
+impl<'tcx, B, V> TypeFoldable<'tcx> for Projection<'tcx, B, V>
+ where B: TypeFoldable<'tcx>, V: TypeFoldable<'tcx>
+{
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ use mir::ProjectionElem::*;
+
+ let base = self.base.fold_with(folder);
+ let elem = match self.elem {
+ Deref => Deref,
+ Field(f, ty) => Field(f, ty.fold_with(folder)),
+ Index(ref v) => Index(v.fold_with(folder)),
+ ref elem => elem.clone()
+ };
+
+ Projection {
+ base: base,
+ elem: elem
+ }
+ }
+
+ fn super_visit_with<Vs: TypeVisitor<'tcx>>(&self, visitor: &mut Vs) -> bool {
+ use mir::ProjectionElem::*;
+
+ self.base.visit_with(visitor) ||
+ match self.elem {
+ Field(_, ty) => ty.visit_with(visitor),
+ Index(ref v) => v.visit_with(visitor),
+ _ => false
+ }
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Constant<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ Constant {
+ span: self.span.clone(),
+ ty: self.ty.fold_with(folder),
+ literal: self.literal.fold_with(folder)
+ }
+ }
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.ty.visit_with(visitor) || self.literal.visit_with(visitor)
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for Literal<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ match *self {
+ Literal::Item { def_id, substs } => Literal::Item {
+ def_id: def_id,
+ substs: substs.fold_with(folder)
+ },
+ _ => self.clone()
+ }
+ }
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ match *self {
+ Literal::Item { substs, .. } => substs.visit_with(visitor),
+ _ => false
+ }
+ }
+}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
-pub enum SizeKind { Exact, Min }
+pub enum SizeKind {
+ Exact,
+ Min,
+}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct FieldInfo {
always_encode_mir: bool = (false, parse_bool, [TRACKED],
"encode MIR of all functions into the crate metadata"),
osx_rpath_install_name: bool = (false, parse_bool, [TRACKED],
- "pass `-install_name @rpath/...` to the OSX linker"),
+ "pass `-install_name @rpath/...` to the macOS linker"),
sanitizer: Option<Sanitizer> = (None, parse_sanitizer, [TRACKED],
"Use a sanitizer"),
}
ObjectSafetyViolation,
};
+use errors::DiagnosticBuilder;
use fmt_macros::{Parser, Piece, Position};
+use hir::{intravisit, Local, Pat};
+use hir::intravisit::{Visitor, NestedVisitorMap};
+use hir::map::NodeExpr;
use hir::def_id::DefId;
use infer::{self, InferCtxt};
use infer::type_variable::TypeVariableOrigin;
use rustc::lint::builtin::EXTRA_REQUIREMENT_IN_IMPL;
+use std::fmt;
+use syntax::ast;
use ty::{self, AdtKind, ToPredicate, ToPolyTraitRef, Ty, TyCtxt, TypeFoldable};
use ty::error::ExpectedFound;
use ty::fast_reject;
use ty::subst::Subst;
use util::nodemap::{FxHashMap, FxHashSet};
-use std::fmt;
-use syntax::ast;
-use hir::{intravisit, Local, Pat};
-use hir::intravisit::{Visitor, NestedVisitorMap};
use syntax_pos::{DUMMY_SP, Span};
-use errors::DiagnosticBuilder;
+
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct TraitErrorKey<'tcx> {
err.span_label(cause.span, &format!("cannot infer type for `{}`", name));
- let expr = self.tcx.hir.expect_expr(cause.body_id);
-
let mut local_visitor = FindLocalByTypeVisitor {
infcx: &self,
target_ty: &ty,
found_pattern: None,
};
- local_visitor.visit_expr(expr);
+ // #40294: cause.body_id can also be a fn declaration.
+ // Currently, if it's anything other than NodeExpr, we just ignore it
+ match self.tcx.hir.find(cause.body_id) {
+ Some(NodeExpr(expr)) => local_visitor.visit_expr(expr),
+ _ => ()
+ }
if let Some(pattern) = local_visitor.found_pattern {
let pattern_span = pattern.span;
let new_trait = tcx.mk_dynamic(
ty::Binder(tcx.mk_existential_predicates(iter)), r_b);
let InferOk { obligations, .. } =
- self.infcx.sub_types(false, &obligation.cause, new_trait, target)
+ self.infcx.eq_types(false, &obligation.cause, new_trait, target)
.map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
// [T; n] -> [T].
(&ty::TyArray(a, _), &ty::TySlice(b)) => {
let InferOk { obligations, .. } =
- self.infcx.sub_types(false, &obligation.cause, a, b)
+ self.infcx.eq_types(false, &obligation.cause, a, b)
.map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
}
});
let new_struct = tcx.mk_adt(def, tcx.mk_substs(params));
let InferOk { obligations, .. } =
- self.infcx.sub_types(false, &obligation.cause, new_struct, target)
+ self.infcx.eq_types(false, &obligation.cause, new_struct, target)
.map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
pub struct OverlapError {
pub with_impl: DefId,
pub trait_desc: String,
- pub self_desc: Option<String>
+ pub self_desc: Option<String>,
}
/// Given a subst for the requested impl, translate it to a subst
}
pub struct SpecializesCache {
- map: FxHashMap<(DefId, DefId), bool>
+ map: FxHashMap<(DefId, DefId), bool>,
}
impl SpecializesCache {
#[derive(Clone, Copy, Debug)]
pub struct ExpectedFound<T> {
pub expected: T,
- pub found: T
+ pub found: T,
}
// Data structures used in type unification
fn new(root_mode: RootMode) -> LocalPathBuffer {
LocalPathBuffer {
root_mode: root_mode,
- str: String::new()
+ str: String::new(),
}
}
fn into_string(self) -> String {
self.str
}
-
}
impl ItemPathBuffer for LocalPathBuffer {
pub struct CycleError<'a> {
span: Span,
- cycle: RefMut<'a, [(Span, Query)]>
+ cycle: RefMut<'a, [(Span, Query)]>,
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
queries::mir::get(self, DUMMY_SP, did).borrow()
}
+ /// Given the DefId of an item, returns its MIR, borrowed immutably.
+ /// Returns None if there is no MIR for the DefId
+ pub fn maybe_item_mir(self, did: DefId) -> Option<Ref<'gcx, Mir<'gcx>>> {
+ if did.is_local() && !self.maps.mir.borrow().contains_key(&did) {
+ return None;
+ }
+
+ if !did.is_local() && !self.sess.cstore.is_item_mir_available(did) {
+ return None;
+ }
+
+ Some(self.item_mir(did))
+ }
+
/// If `type_needs_drop` returns true, then `ty` is definitely
/// non-copy and *might* have a destructor attached; if it returns
/// false, then `ty` definitely has no destructor (i.e. no drop glue).
use ty::{self, Lift, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
use rustc_data_structures::accumulate_vec::AccumulateVec;
+use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use std::rc::Rc;
use syntax::abi;
self.expected.visit_with(visitor) || self.found.visit_with(visitor)
}
}
+
+impl<'tcx, T: TypeFoldable<'tcx>, I: Idx> TypeFoldable<'tcx> for IndexVec<I, T> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ self.iter().map(|x| x.fold_with(folder)).collect()
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.iter().any(|t| t.visit_with(visitor))
+ }
+}
/// at least as big as the scope `fr.scope`".
pub struct FreeRegion {
pub scope: region::CodeExtent,
- pub bound_region: BoundRegion
+ pub bound_region: BoundRegion,
}
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash,
// Anonymous region for the implicit env pointer parameter
// to a closure
- BrEnv
+ BrEnv,
}
/// When a region changed from late-bound to early-bound when #32330
pub fn principal(&self) -> Option<ExistentialTraitRef<'tcx>> {
match self.get(0) {
Some(&ExistentialPredicate::Trait(tr)) => Some(tr),
- _ => None
+ _ => None,
}
}
ty::Binder(&self.0)
}
- pub fn map_bound_ref<F,U>(&self, f: F) -> Binder<U>
+ pub fn map_bound_ref<F, U>(&self, f: F) -> Binder<U>
where F: FnOnce(&T) -> U
{
self.as_ref().map_bound(f)
}
- pub fn map_bound<F,U>(self, f: F) -> Binder<U>
+ pub fn map_bound<F, U>(self, f: F) -> Binder<U>
where F: FnOnce(T) -> U
{
ty::Binder(f(self.0))
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct IntVid {
- pub index: u32
+ pub index: u32,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct FloatVid {
- pub index: u32
+ pub index: u32,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct RegionVid {
- pub index: u32
+ pub index: u32,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct SkolemizedRegionVid {
- pub index: u32
+ pub index: u32,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
/// `infer::freshen` for more details.
FreshTy(u32),
FreshIntTy(u32),
- FreshFloatTy(u32)
+ FreshFloatTy(u32),
}
/// A `ProjectionPredicate` for an `ExistentialTraitRef`.
pub struct ExistentialProjection<'tcx> {
pub trait_ref: ExistentialTraitRef<'tcx>,
pub item_name: Name,
- pub ty: Ty<'tcx>
+ pub ty: Ty<'tcx>,
}
pub type PolyExistentialProjection<'tcx> = Binder<ExistentialProjection<'tcx>>;
ty::ProjectionPredicate {
projection_ty: ty::ProjectionTy {
trait_ref: self.trait_ref.with_self_ty(tcx, self_ty),
- item_name: self.item_name
+ item_name: self.item_name,
},
- ty: self.ty
+ ty: self.ty,
}
}
}
match *self {
ty::ReEarlyBound(..) => true,
ty::ReLateBound(..) => true,
- _ => false
+ _ => false,
}
}
pub fn is_nil(&self) -> bool {
match self.sty {
TyTuple(ref tys, _) => tys.is_empty(),
- _ => false
+ _ => false,
}
}
pub fn is_ty_var(&self) -> bool {
match self.sty {
TyInfer(TyVar(_)) => true,
- _ => false
+ _ => false,
}
}
pub fn is_self(&self) -> bool {
match self.sty {
TyParam(ref p) => p.is_self(),
- _ => false
+ _ => false,
}
}
pub fn is_structural(&self) -> bool {
match self.sty {
TyAdt(..) | TyTuple(..) | TyArray(..) | TyClosure(..) => true,
- _ => self.is_slice() | self.is_trait()
+ _ => self.is_slice() | self.is_trait(),
}
}
pub fn is_simd(&self) -> bool {
match self.sty {
TyAdt(def, _) => def.repr.simd,
- _ => false
+ _ => false,
}
}
pub fn is_region_ptr(&self) -> bool {
match self.sty {
TyRef(..) => true,
- _ => false
+ _ => false,
}
}
pub fn is_unsafe_ptr(&self) -> bool {
match self.sty {
TyRawPtr(_) => return true,
- _ => return false
+ _ => return false,
}
}
pub fn is_trait(&self) -> bool {
match self.sty {
TyDynamic(..) => true,
- _ => false
+ _ => false,
}
}
TyInfer(FreshTy(_)) => true,
TyInfer(FreshIntTy(_)) => true,
TyInfer(FreshFloatTy(_)) => true,
- _ => false
+ _ => false,
}
}
pub fn is_char(&self) -> bool {
match self.sty {
TyChar => true,
- _ => false
+ _ => false,
}
}
pub fn is_signed(&self) -> bool {
match self.sty {
TyInt(_) => true,
- _ => false
+ _ => false,
}
}
match self.sty {
TyInt(ast::IntTy::Is) | TyUint(ast::UintTy::Us) => false,
TyInt(..) | TyUint(..) | TyFloat(..) => true,
- _ => false
+ _ => false,
}
}
},
TyRef(_, mt) => Some(mt),
TyRawPtr(mt) if explicit => Some(mt),
- _ => None
+ _ => None,
}
}
pub fn builtin_index(&self) -> Option<Ty<'tcx>> {
match self.sty {
TyArray(ty, _) | TySlice(ty) => Some(ty),
- _ => None
+ _ => None,
}
}
pub fn is_fn(&self) -> bool {
match self.sty {
TyFnDef(..) | TyFnPtr(_) => true,
- _ => false
+ _ => false,
}
}
TyDynamic(ref tt, ..) => tt.principal().map(|p| p.def_id()),
TyAdt(def, _) => Some(def.did),
TyClosure(id, _) => Some(id),
- _ => None
+ _ => None,
}
}
pub fn ty_adt_def(&self) -> Option<&'tcx AdtDef> {
match self.sty {
TyAdt(adt, _) => Some(adt),
- _ => None
+ _ => None,
}
}
use hir::map as hir_map;
use traits::{self, Reveal};
use ty::{self, Ty, TyCtxt, TypeAndMut, TypeFlags, TypeFoldable};
-use ty::{ParameterEnvironment};
+use ty::ParameterEnvironment;
use ty::fold::TypeVisitor;
use ty::layout::{Layout, LayoutError};
use ty::TypeVariants::*;
type Disr = ConstInt;
- pub trait IntTypeExt {
+pub trait IntTypeExt {
fn to_ty<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>;
fn disr_incr<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, val: Option<Disr>)
-> Option<Disr>;
fn assert_ty_matches(&self, val: Disr);
fn initial_discriminant<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Disr;
- }
+}
macro_rules! typed_literal {
pub enum CopyImplementationError<'tcx> {
InfrigingField(&'tcx ty::FieldDef),
NotAnAdt,
- HasDestructor
+ HasDestructor,
}
/// Describes whether a type is representable. For types that are not
tcx.infer_ctxt(self.clone(), Reveal::UserFacing).enter(|infcx| {
let (adt, substs) = match self_type.sty {
ty::TyAdt(adt, substs) => (adt, substs),
- _ => return Err(CopyImplementationError::NotAnAdt)
+ _ => return Err(CopyImplementationError::NotAnAdt),
};
let field_implements_copy = |field: &ty::FieldDef| {
let cause = traits::ObligationCause::dummy();
match traits::fully_normalize(&infcx, cause, &field.ty(tcx, substs)) {
Ok(ty) => !infcx.type_moves_by_default(ty, span),
- Err(..) => false
+ Err(..) => false,
}
};
}
}
}
- _ => ()
+ _ => (),
}
false
}
adt.variants[0].fields.get(i).map(|f| f.ty(self, substs))
}
(&TyTuple(ref v, _), None) => v.get(i).cloned(),
- _ => None
+ _ => None,
}
}
pub fn struct_tail(self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
while let TyAdt(def, substs) = ty.sty {
if !def.is_struct() {
- break
+ break;
}
match def.struct_variant().fields.last() {
Some(f) => ty = f.ty(self, substs),
- None => break
+ None => break,
}
}
ty
let (mut a, mut b) = (source, target);
while let (&TyAdt(a_def, a_substs), &TyAdt(b_def, b_substs)) = (&a.sty, &b.sty) {
if a_def != b_def || !a_def.is_struct() {
- break
+ break;
}
match a_def.struct_variant().fields.last() {
Some(f) => {
a = f.ty(self, a_substs);
b = f.ty(self, b_substs);
}
- _ => break
+ _ => break,
}
}
(a, b)
let dtor_did = match dtor_did {
Some(dtor) => dtor,
- None => return None
+ None => return None,
};
// RFC 1238: if the destructor method is tagged with the
}
tcx.layout_depth.set(depth+1);
- let layout = Layout::compute_uncached(self, infcx)?;
+ let layout = Layout::compute_uncached(self, infcx);
+ tcx.layout_depth.set(depth);
+ let layout = layout?;
if can_cache {
tcx.layout_cache.borrow_mut().insert(self, layout);
}
- tcx.layout_depth.set(depth);
Ok(layout)
}
substs_a.types().zip(substs_b.types()).all(|(a, b)| same_type(a, b))
}
- _ => {
- a == b
- }
+ _ => a == b,
}
}
if val == 0 {
groups.push(format!("{}", group));
- break
+ break;
} else {
groups.push(format!("{:03}", group));
}
type HANDLE = *mut u8;
use libc::size_t;
use std::mem;
- #[repr(C)] #[allow(non_snake_case)]
+ #[repr(C)]
+ #[allow(non_snake_case)]
struct PROCESS_MEMORY_COUNTERS {
cb: DWORD,
PageFaultCount: DWORD,
}
pub struct Indenter {
- _cannot_construct_outside_of_this_module: ()
+ _cannot_construct_outside_of_this_module: (),
}
impl Drop for Indenter {
match fs::create_dir(path) {
Ok(()) => return Ok(()),
Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => return Ok(()),
- Err(ref e) if e.kind() == io::ErrorKind::NotFound => {}
+ Err(ref e) if e.kind() == io::ErrorKind::NotFound => (),
Err(e) => return Err(e),
}
match path.parent() {
use target::TargetOptions;
pub fn opts() -> TargetOptions {
- // ELF TLS is only available in OSX 10.7+. If you try to compile for 10.6
+ // ELF TLS is only available in macOS 10.7+. If you try to compile for 10.6
// either the linker will complain if it is used or the binary will end up
- // segfaulting at runtime when run on 10.6. Rust by default supports OSX
+ // segfaulting at runtime when run on 10.6. Rust by default supports macOS
// 10.7+, but there is a standard environment variable,
// MACOSX_DEPLOYMENT_TARGET, which is used to signal targeting older
- // versions of OSX. For example compiling on 10.10 with
+ // versions of macOS. For example compiling on 10.10 with
// MACOSX_DEPLOYMENT_TARGET set to 10.6 will cause the linker to generate
// warnings about the usage of ELF TLS.
//
}).unwrap_or((10, 7));
TargetOptions {
- // OSX has -dead_strip, which doesn't rely on function_sections
+ // macOS has -dead_strip, which doesn't rely on function_sections
function_sections: false,
dynamic_linking: true,
executables: true,
/// Whether the target toolchain is like OpenBSD's.
/// Only useful for compiling against OpenBSD, for configuring abi when returning a struct.
pub is_like_openbsd: bool,
- /// Whether the target toolchain is like OSX's. Only useful for compiling against iOS/OS X, in
- /// particular running dsymutil and some other stuff like `-dead_strip`. Defaults to false.
+ /// Whether the target toolchain is like macOS's. Only useful for compiling against iOS/macOS,
+ /// in particular running dsymutil and some other stuff like `-dead_strip`. Defaults to false.
pub is_like_osx: bool,
/// Whether the target toolchain is like Solaris's.
/// Only useful for compiling against Illumos/Solaris,
pub type LoanDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, LoanDataFlowOperator>;
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- tcx.dep_graph.with_task(DepNode::BorrowCheckKrate, || {
+ tcx.dep_graph.with_task(DepNode::BorrowCheckKrate, tcx, (), check_crate_task);
+
+ fn check_crate_task<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (): ()) {
tcx.visit_all_bodies_in_krate(|body_owner_def_id, body_id| {
- tcx.dep_graph.with_task(DepNode::BorrowCheck(body_owner_def_id), || {
- borrowck_fn(tcx, body_id);
- });
+ tcx.dep_graph.with_task(DepNode::BorrowCheck(body_owner_def_id),
+ tcx,
+ body_id,
+ borrowck_fn);
});
- });
+ }
}
/// Collection of conclusions determined via borrow checker analyses.
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::collections::range::RangeArgument;
use std::fmt::Debug;
use std::iter::{self, FromIterator};
use std::slice;
self.raw.iter_mut().enumerate().map(IntoIdx { _marker: PhantomData })
}
+ #[inline]
+ pub fn drain<'a, R: RangeArgument<usize>>(
+ &'a mut self, range: R) -> impl Iterator<Item=T> + 'a {
+ self.raw.drain(range)
+ }
+
+ #[inline]
+ pub fn drain_enumerated<'a, R: RangeArgument<usize>>(
+ &'a mut self, range: R) -> impl Iterator<Item=(I, T)> + 'a {
+ self.raw.drain(range).enumerate().map(IntoIdx { _marker: PhantomData })
+ }
+
#[inline]
pub fn last(&self) -> Option<I> {
self.len().checked_sub(1).map(I::new)
pub fn truncate(&mut self, a: usize) {
self.raw.truncate(a)
}
+
+ #[inline]
+ pub fn get(&self, index: I) -> Option<&T> {
+ self.raw.get(index.index())
+ }
+
+ #[inline]
+ pub fn get_mut(&mut self, index: I) -> Option<&mut T> {
+ self.raw.get_mut(index.index())
+ }
}
impl<I: Idx, T> Index<I> for IndexVec<I, T> {
#![feature(associated_consts)]
#![feature(unsize)]
#![feature(i128_type)]
+#![feature(conservative_impl_trait)]
#![cfg_attr(unix, feature(libc))]
#![cfg_attr(test, feature(test))]
use serialize::json;
use std::env;
-use std::mem;
use std::ffi::{OsString, OsStr};
use std::fs;
use std::io::{self, Write};
};
write_out_deps(sess, &outputs, &crate_name);
+ if sess.opts.output_types.contains_key(&OutputType::DepInfo) &&
+ sess.opts.output_types.keys().count() == 1 {
+ return Ok(())
+ }
let arena = DroplessArena::new();
let arenas = GlobalArenas::new();
let whitelisted_legacy_custom_derives = registry.take_whitelisted_custom_derives();
let Registry { syntax_exts, early_lint_passes, late_lint_passes, lint_groups,
- llvm_passes, attributes, mir_passes, .. } = registry;
+ llvm_passes, attributes, .. } = registry;
sess.track_errors(|| {
let mut ls = sess.lint_store.borrow_mut();
}
*sess.plugin_llvm_passes.borrow_mut() = llvm_passes;
- sess.mir_passes.borrow_mut().extend(mir_passes);
*sess.plugin_attributes.borrow_mut() = attributes.clone();
})?;
krate
});
- krate.exported_macros = mem::replace(&mut resolver.exported_macros, Vec::new());
-
krate = time(time_passes, "maybe building test harness", || {
syntax::test::modify_for_testing(&sess.parse_sess,
&mut resolver,
passes.push_pass(box mir::transform::simplify::SimplifyCfg::new("elaborate-drops"));
// No lifetime analysis based on borrowing can be done from here on out.
+ passes.push_pass(box mir::transform::inline::Inline);
passes.push_pass(box mir::transform::instcombine::InstCombine::new());
passes.push_pass(box mir::transform::deaggregator::Deaggregator);
passes.push_pass(box mir::transform::copy_prop::CopyPropagation);
Allow <foo>
-D <foo> Deny <foo>
-F <foo> Forbid <foo> \
- (deny, and deny all overrides)
+ (deny <foo> and all attempts to override)
");
const X86_WHITELIST: &'static [&'static str] = &["avx\0", "avx2\0", "bmi\0", "bmi2\0", "sse\0",
"sse2\0", "sse3\0", "sse4.1\0", "sse4.2\0",
"ssse3\0", "tbm\0", "lzcnt\0", "popcnt\0",
- "sse4a\0", "rdrnd\0", "rdseed\0"];
+ "sse4a\0", "rdrnd\0", "rdseed\0", "fma\0"];
/// Add `target_feature = "..."` cfgs for a variety of platform
/// specific features (SSE, NEON etc.).
let mut annotations_position = vec![];
let mut line_len = 0;
let mut p = 0;
- let mut ann_iter = annotations.iter().peekable();
- while let Some(annotation) = ann_iter.next() {
- let peek = ann_iter.peek();
- if let Some(next) = peek {
- if overlaps(next, annotation) && !annotation.is_line() && !next.is_line()
+ for (i, annotation) in annotations.iter().enumerate() {
+ for (j, next) in annotations.iter().enumerate() {
+ if overlaps(next, annotation, 0) // This label overlaps with another one and both
+ && !annotation.is_line() // take space (they have text and are not
+ && !next.is_line() // multiline lines).
&& annotation.has_label()
+ && j > i
+ && p == 0 // We're currently on the first line, move the label one line down
{
// This annotation needs a new line in the output.
p += 1;
+ break;
}
}
annotations_position.push((p, annotation));
- if let Some(next) = peek {
- let l = if let Some(ref label) = next.label {
- label.len() + 2
- } else {
- 0
- };
- if (overlaps(next, annotation) // Do not allow two labels to be in the same line
- || next.end_col + l > annotation.start_col) // if they overlap including
- // padding, to avoid situations like:
- //
- // fn foo(x: u32) {
- // -------^------
- // | |
- // fn_spanx_span
- //
- && !annotation.is_line() // Do not add a new line if this annotation or the
- && !next.is_line() // next are vertical line placeholders.
- && annotation.has_label() // Both labels must have some text, otherwise
- && next.has_label() // they are not overlapping.
- {
- p += 1;
+ for (j, next) in annotations.iter().enumerate() {
+ if j > i {
+ let l = if let Some(ref label) = next.label {
+ label.len() + 2
+ } else {
+ 0
+ };
+ if overlaps(next, annotation, l) // Do not allow two labels to be in the same
+ // line if they overlap including padding, to
+ // avoid situations like:
+ //
+ // fn foo(x: u32) {
+ // -------^------
+ // | |
+ // fn_spanx_span
+ //
+ && !annotation.is_line() // Do not add a new line if this annotation
+ && !next.is_line() // or the next are vertical line placeholders.
+ && annotation.has_label() // Both labels must have some text, otherwise
+ && next.has_label() // they are not overlapping.
+ {
+ p += 1;
+ break;
+ }
}
}
if line_len < p {
(b_start..b_end + extra).contains(a_start) ||
(a_start..a_end + extra).contains(b_start)
}
-fn overlaps(a1: &Annotation, a2: &Annotation) -> bool {
- num_overlap(a1.start_col, a1.end_col, a2.start_col, a2.end_col, false)
+fn overlaps(a1: &Annotation, a2: &Annotation, padding: usize) -> bool {
+ num_overlap(a1.start_col, a1.end_col + padding, a2.start_col, a2.end_col, false)
}
fn emit_to_destination(rendered_buffer: &Vec<Vec<StyledString>>,
hi_opt: Option<&Loc>) {
let (lo, hi_opt) = (lo.col.to_usize(), hi_opt.map(|hi| hi.col.to_usize()));
if let Some(line) = line_opt {
- if line.len() > lo {
+ if let Some(lo) = line.char_indices().map(|(i, _)| i).nth(lo) {
+ let hi_opt = hi_opt.and_then(|hi| line.char_indices().map(|(i, _)| i).nth(hi));
buf.push_str(match hi_opt {
Some(hi) => &line[lo..hi],
None => &line[lo..],
clean_work_products.insert(wp.clone());
}
- tcx.dep_graph.with_task(n, || ()); // create the node with no inputs
+ tcx.dep_graph.with_task(n, (), (), create_node);
+
+ fn create_node((): (), (): ()) {
+ // just create the node with no inputs
+ }
}
}
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedUnsafe {
fn check_expr(&mut self, cx: &LateContext, e: &hir::Expr) {
+ /// Return the NodeId for an enclosing scope that is also `unsafe`
+ fn is_enclosed(cx: &LateContext, id: ast::NodeId) -> Option<(String, ast::NodeId)> {
+ let parent_id = cx.tcx.hir.get_parent_node(id);
+ if parent_id != id {
+ if cx.tcx.used_unsafe.borrow().contains(&parent_id) {
+ Some(("block".to_string(), parent_id))
+ } else if let Some(hir::map::NodeItem(&hir::Item {
+ node: hir::ItemFn(_, hir::Unsafety::Unsafe, _, _, _, _),
+ ..
+ })) = cx.tcx.hir.find(parent_id) {
+ Some(("fn".to_string(), parent_id))
+ } else {
+ is_enclosed(cx, parent_id)
+ }
+ } else {
+ None
+ }
+ }
if let hir::ExprBlock(ref blk) = e.node {
// Don't warn about generated blocks, that'll just pollute the output.
if blk.rules == hir::UnsafeBlock(hir::UserProvided) &&
!cx.tcx.used_unsafe.borrow().contains(&blk.id) {
- cx.span_lint(UNUSED_UNSAFE, blk.span, "unnecessary `unsafe` block");
+
+ let mut db = cx.struct_span_lint(UNUSED_UNSAFE, blk.span,
+ "unnecessary `unsafe` block");
+
+ db.span_label(blk.span, &"unnecessary `unsafe` block");
+ if let Some((kind, id)) = is_enclosed(cx, blk.id) {
+ db.span_note(cx.tcx.hir.span(id),
+ &format!("because it's nested under this `unsafe` {}", kind));
+ }
+ db.emit();
}
}
}
}
let is_osx = sess.target.target.options.is_like_osx;
if lib.kind == cstore::NativeFramework && !is_osx {
- let msg = "native frameworks are only available on OSX targets";
+ let msg = "native frameworks are only available on macOS targets";
match span {
Some(span) => span_err!(sess, span, E0455, "{}", msg),
None => sess.err(msg),
sess.imported_macro_spans.borrow_mut()
.insert(local_span, (name.to_string(), data.get_span(id.index, sess)));
- LoadedMacro::MacroRules(ast::MacroDef {
+ LoadedMacro::MacroDef(ast::Item {
ident: ast::Ident::with_empty_ctxt(name),
id: ast::DUMMY_NODE_ID,
span: local_span,
attrs: attrs,
- body: body.into(),
+ node: ast::ItemKind::MacroDef(body.into()),
+ vis: ast::Visibility::Inherited,
})
}
"##,
E0455: r##"
-Linking with `kind=framework` is only supported when targeting OS X,
+Linking with `kind=framework` is only supported when targeting macOS,
as frameworks are specific to that operating system.
Erroneous code example:
//
// And here we run into yet another obscure archive bug: in which metadata
// loaded from archives may have trailing garbage bytes. Awhile back one of
-// our tests was failing sporadically on the OSX 64-bit builders (both nopt
+// our tests was failing sporadically on the macOS 64-bit builders (both nopt
// and opt) by having ebml generate an out-of-bounds panic when looking at
// metadata.
//
lib.display()));
continue;
}
+
+ // Ok so at this point we've determined that `(lib, kind)` above is
+ // a candidate crate to load, and that `slot` is either none (this
+ // is the first crate of its kind) or if some the previous path has
+ // the exact same hash (e.g. it's the exact same crate).
+ //
+ // In principle these two candidate crates are exactly the same so
+ // we can choose either of them to link. As a stupidly gross hack,
+ // however, we favor crate in the sysroot.
+ //
+ // You can find more info in rust-lang/rust#39518 and various linked
+ // issues, but the general gist is that during testing libstd the
+ // compilers has two candidates to choose from: one in the sysroot
+ // and one in the deps folder. These two crates are the exact same
+ // crate but if the compiler chooses the one in the deps folder
+ // it'll cause spurious errors on Windows.
+ //
+ // As a result, we favor the sysroot crate here. Note that the
+ // candidates are all canonicalized, so we canonicalize the sysroot
+ // as well.
+ if let Some((ref prev, _)) = ret {
+ let sysroot = self.sess.sysroot();
+ let sysroot = sysroot.canonicalize()
+ .unwrap_or(sysroot.to_path_buf());
+ if prev.starts_with(&sysroot) {
+ continue
+ }
+ }
*slot = Some((hash, metadata));
ret = Some((lib, kind));
}
(https://github.com/rust-lang/rust/issues/39283)");
}
- if temp_lifetime.is_some() {
+ if !expr_ty.is_never() && temp_lifetime.is_some() {
this.cfg.push(block, Statement {
source_info: source_info,
kind: StatementKind::StorageLive(temp.clone())
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! MIR-based callgraph.
+//!
+//! This only considers direct calls
+
+use rustc::hir::def_id::DefId;
+use rustc_data_structures::graph;
+
+use rustc::mir::*;
+use rustc::mir::visit::*;
+
+use rustc::ty;
+
+use rustc::util::nodemap::DefIdMap;
+
+pub struct CallGraph {
+ node_map: DefIdMap<graph::NodeIndex>,
+ graph: graph::Graph<DefId, ()>
+}
+
+impl CallGraph {
+ // FIXME: allow for construction of a callgraph that inspects
+ // cross-crate MIRs if available.
+ pub fn build<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>) -> CallGraph {
+ let def_ids = tcx.maps.mir.borrow().keys();
+
+ let mut callgraph = CallGraph {
+ node_map: DefIdMap(),
+ graph: graph::Graph::new()
+ };
+
+ for def_id in def_ids {
+ if !def_id.is_local() { continue; }
+
+ let idx = callgraph.add_node(def_id);
+
+ let mut call_visitor = CallVisitor {
+ caller: idx,
+ graph: &mut callgraph
+ };
+
+ let mir = tcx.item_mir(def_id);
+ call_visitor.visit_mir(&mir);
+ }
+
+ callgraph
+ }
+
+ // Iterate over the strongly-connected components of the graph
+ pub fn scc_iter(&self) -> SCCIterator {
+ SCCIterator::new(&self.graph)
+ }
+
+ // Get the def_id for the given graph node
+ pub fn def_id(&self, node: graph::NodeIndex) -> DefId {
+ *self.graph.node_data(node)
+ }
+
+ fn add_node(&mut self, id: DefId) -> graph::NodeIndex {
+ let graph = &mut self.graph;
+ *self.node_map.entry(id).or_insert_with(|| {
+ graph.add_node(id)
+ })
+ }
+}
+
+struct CallVisitor<'a> {
+ caller: graph::NodeIndex,
+ graph: &'a mut CallGraph
+}
+
+impl<'a, 'tcx> Visitor<'tcx> for CallVisitor<'a> {
+ fn visit_terminator_kind(&mut self, _block: BasicBlock,
+ kind: &TerminatorKind<'tcx>, _loc: Location) {
+ if let TerminatorKind::Call {
+ func: Operand::Constant(ref f)
+ , .. } = *kind {
+ if let ty::TyFnDef(def_id, _, _) = f.ty.sty {
+ let callee = self.graph.add_node(def_id);
+ self.graph.graph.add_edge(self.caller, callee, ());
+ }
+ }
+ }
+}
+
+struct StackElement<'g> {
+ node: graph::NodeIndex,
+ lowlink: usize,
+ children: graph::AdjacentTargets<'g, DefId, ()>
+}
+
+/**
+ * Iterator over strongly-connected-components using Tarjan's algorithm[1]
+ *
+ * [1]: https://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
+ */
+pub struct SCCIterator<'g> {
+ graph: &'g graph::Graph<DefId, ()>,
+ index: usize,
+ node_indices: Vec<Option<usize>>,
+ scc_stack: Vec<graph::NodeIndex>,
+ current_scc: Vec<graph::NodeIndex>,
+ visit_stack: Vec<StackElement<'g>>,
+}
+
+impl<'g> SCCIterator<'g> {
+ pub fn new(graph: &'g graph::Graph<DefId, ()>) -> SCCIterator<'g> {
+ if graph.len_nodes() == 0 {
+ return SCCIterator {
+ graph: graph,
+ index: 0,
+ node_indices: Vec::new(),
+ scc_stack: Vec::new(),
+ current_scc: Vec::new(),
+ visit_stack: Vec::new()
+ };
+ }
+
+ let first = graph::NodeIndex(0);
+
+ SCCIterator::with_entry(graph, first)
+ }
+
+ pub fn with_entry(graph: &'g graph::Graph<DefId, ()>,
+ entry: graph::NodeIndex) -> SCCIterator<'g> {
+ let mut iter = SCCIterator {
+ graph: graph,
+ index: 0,
+ node_indices: Vec::with_capacity(graph.len_nodes()),
+ scc_stack: Vec::new(),
+ current_scc: Vec::new(),
+ visit_stack: Vec::new()
+ };
+
+ iter.visit_one(entry);
+
+ iter
+ }
+
+ fn get_next(&mut self) {
+ self.current_scc.clear();
+
+ while !self.visit_stack.is_empty() {
+ self.visit_children();
+
+ let node = self.visit_stack.pop().unwrap();
+
+ if let Some(last) = self.visit_stack.last_mut() {
+ if last.lowlink > node.lowlink {
+ last.lowlink = node.lowlink;
+ }
+ }
+
+ debug!("TarjanSCC: Popped node {:?} : lowlink = {:?}; index = {:?}",
+ node.node, node.lowlink, self.node_index(node.node).unwrap());
+
+ if node.lowlink != self.node_index(node.node).unwrap() {
+ continue;
+ }
+
+ loop {
+ let n = self.scc_stack.pop().unwrap();
+ self.current_scc.push(n);
+ self.set_node_index(n, !0);
+ if n == node.node { return; }
+ }
+ }
+ }
+
+ fn visit_one(&mut self, node: graph::NodeIndex) {
+ self.index += 1;
+ let idx = self.index;
+ self.set_node_index(node, idx);
+ self.scc_stack.push(node);
+ self.visit_stack.push(StackElement {
+ node: node,
+ lowlink: self.index,
+ children: self.graph.successor_nodes(node)
+ });
+ debug!("TarjanSCC: Node {:?} : index = {:?}", node, idx);
+ }
+
+ fn visit_children(&mut self) {
+ while let Some(child) = self.visit_stack.last_mut().unwrap().children.next() {
+ if let Some(child_num) = self.node_index(child) {
+ let cur = self.visit_stack.last_mut().unwrap();
+ if cur.lowlink > child_num {
+ cur.lowlink = child_num;
+ }
+ } else {
+ self.visit_one(child);
+ }
+ }
+ }
+
+ fn node_index(&self, node: graph::NodeIndex) -> Option<usize> {
+ self.node_indices.get(node.node_id()).and_then(|&idx| idx)
+ }
+
+ fn set_node_index(&mut self, node: graph::NodeIndex, idx: usize) {
+ let i = node.node_id();
+ if i >= self.node_indices.len() {
+ self.node_indices.resize(i + 1, None);
+ }
+ self.node_indices[i] = Some(idx);
+ }
+}
+
+impl<'g> Iterator for SCCIterator<'g> {
+ type Item = Vec<graph::NodeIndex>;
+
+ fn next(&mut self) -> Option<Vec<graph::NodeIndex>> {
+ self.get_next();
+
+ if self.current_scc.is_empty() {
+ // Try a new root for the next SCC, if the node_indices
+ // map is doesn't contain all nodes, use the smallest one
+ // with no entry, otherwise find the first empty node.
+ //
+ // FIXME: This should probably use a set of precomputed
+ // roots instead
+ if self.node_indices.len() < self.graph.len_nodes() {
+ let idx = graph::NodeIndex(self.node_indices.len());
+ self.visit_one(idx);
+ } else {
+ for idx in 0..self.node_indices.len() {
+ if self.node_indices[idx].is_none() {
+ let idx = graph::NodeIndex(idx);
+ self.visit_one(idx);
+ break;
+ }
+ }
+ }
+ self.get_next();
+ }
+
+ if self.current_scc.is_empty() {
+ None
+ } else {
+ Some(self.current_scc.clone())
+ }
+ }
+}
pub mod diagnostics;
pub mod build;
+pub mod callgraph;
pub mod def_use;
pub mod graphviz;
mod hair;
pub fn provide(providers: &mut Providers) {
mir_map::provide(providers);
transform::qualify_consts::provide(providers);
-}
+}
\ No newline at end of file
use std::mem;
pub fn build_mir_for_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- tcx.dep_graph.with_task(DepNode::MirKrate, || {
+ tcx.dep_graph.with_task(DepNode::MirKrate, tcx, (), build_mir_for_crate_task);
+
+ fn build_mir_for_crate_task<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (): ()) {
tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
tcx.item_mir(body_owner_def_id);
});
- });
+ }
}
pub fn provide(providers: &mut Providers) {
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Inlining pass for MIR functions
+
+use rustc::hir::def_id::DefId;
+
+use rustc_data_structures::bitvec::BitVector;
+use rustc_data_structures::indexed_vec::{Idx, IndexVec};
+use rustc_data_structures::graph;
+
+use rustc::dep_graph::DepNode;
+use rustc::mir::*;
+use rustc::mir::transform::{MirMapPass, MirPassHook, MirSource, Pass};
+use rustc::mir::visit::*;
+use rustc::traits;
+use rustc::ty::{self, Ty, TyCtxt};
+use rustc::ty::subst::{Subst,Substs};
+use rustc::util::nodemap::{DefIdSet};
+
+use super::simplify::{remove_dead_blocks, CfgSimplifier};
+
+use syntax::{attr};
+use syntax::abi::Abi;
+
+use callgraph;
+
+const DEFAULT_THRESHOLD: usize = 50;
+const HINT_THRESHOLD: usize = 100;
+
+const INSTR_COST: usize = 5;
+const CALL_PENALTY: usize = 25;
+
+const UNKNOWN_SIZE_COST: usize = 10;
+
+pub struct Inline;
+
+impl<'tcx> MirMapPass<'tcx> for Inline {
+ fn run_pass<'a>(
+ &mut self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ hooks: &mut [Box<for<'s> MirPassHook<'s>>]) {
+
+ if tcx.sess.opts.debugging_opts.mir_opt_level < 2 { return; }
+
+ let _ignore = tcx.dep_graph.in_ignore();
+
+ let callgraph = callgraph::CallGraph::build(tcx);
+
+ let mut inliner = Inliner {
+ tcx: tcx,
+ };
+
+ let def_ids = tcx.maps.mir.borrow().keys();
+ for &def_id in &def_ids {
+ if !def_id.is_local() { continue; }
+
+ let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
+ let mut mir = if let Some(mir) = tcx.maps.mir.borrow().get(&def_id) {
+ mir.borrow_mut()
+ } else {
+ continue;
+ };
+
+ tcx.dep_graph.write(DepNode::Mir(def_id));
+
+ let id = tcx.hir.as_local_node_id(def_id).unwrap();
+ let src = MirSource::from_node(tcx, id);
+
+ for hook in &mut *hooks {
+ hook.on_mir_pass(tcx, src, &mut mir, self, false);
+ }
+ }
+
+ for scc in callgraph.scc_iter() {
+ inliner.inline_scc(&callgraph, &scc);
+ }
+
+ for def_id in def_ids {
+ if !def_id.is_local() { continue; }
+
+ let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id));
+ let mut mir = tcx.maps.mir.borrow()[&def_id].borrow_mut();
+ tcx.dep_graph.write(DepNode::Mir(def_id));
+
+ let id = tcx.hir.as_local_node_id(def_id).unwrap();
+ let src = MirSource::from_node(tcx, id);
+
+ for hook in &mut *hooks {
+ hook.on_mir_pass(tcx, src, &mut mir, self, true);
+ }
+ }
+ }
+}
+
+impl<'tcx> Pass for Inline { }
+
+struct Inliner<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+}
+
+#[derive(Copy, Clone)]
+struct CallSite<'tcx> {
+ caller: DefId,
+ callee: DefId,
+ substs: &'tcx Substs<'tcx>,
+ bb: BasicBlock,
+ location: SourceInfo,
+}
+
+impl<'a, 'tcx> Inliner<'a, 'tcx> {
+ fn inline_scc(&mut self, callgraph: &callgraph::CallGraph, scc: &[graph::NodeIndex]) -> bool {
+ let mut callsites = Vec::new();
+ let mut in_scc = DefIdSet();
+
+ let mut inlined_into = DefIdSet();
+
+ for &node in scc {
+ let def_id = callgraph.def_id(node);
+
+ // Don't inspect functions from other crates
+ let id = if let Some(id) = self.tcx.hir.as_local_node_id(def_id) {
+ id
+ } else {
+ continue;
+ };
+ let src = MirSource::from_node(self.tcx, id);
+ if let MirSource::Fn(_) = src {
+ if let Some(mir) = self.tcx.maybe_item_mir(def_id) {
+ for (bb, bb_data) in mir.basic_blocks().iter_enumerated() {
+ // Don't inline calls that are in cleanup blocks.
+ if bb_data.is_cleanup { continue; }
+
+ // Only consider direct calls to functions
+ let terminator = bb_data.terminator();
+ if let TerminatorKind::Call {
+ func: Operand::Constant(ref f), .. } = terminator.kind {
+ if let ty::TyFnDef(callee_def_id, substs, _) = f.ty.sty {
+ callsites.push(CallSite {
+ caller: def_id,
+ callee: callee_def_id,
+ substs: substs,
+ bb: bb,
+ location: terminator.source_info
+ });
+ }
+ }
+ }
+
+ in_scc.insert(def_id);
+ }
+ }
+ }
+
+ // Move callsites that are in the the SCC to the end so
+ // they're inlined after calls to outside the SCC
+ let mut first_call_in_scc = callsites.len();
+
+ let mut i = 0;
+ while i < first_call_in_scc {
+ let f = callsites[i].caller;
+ if in_scc.contains(&f) {
+ first_call_in_scc -= 1;
+ callsites.swap(i, first_call_in_scc);
+ } else {
+ i += 1;
+ }
+ }
+
+ let mut local_change;
+ let mut changed = false;
+
+ loop {
+ local_change = false;
+ let mut csi = 0;
+ while csi < callsites.len() {
+ let callsite = callsites[csi];
+ csi += 1;
+
+ let _task = self.tcx.dep_graph.in_task(DepNode::Mir(callsite.caller));
+ self.tcx.dep_graph.write(DepNode::Mir(callsite.caller));
+
+ let callee_mir = {
+ if let Some(callee_mir) = self.tcx.maybe_item_mir(callsite.callee) {
+ if !self.should_inline(callsite, &callee_mir) {
+ continue;
+ }
+
+ callee_mir.subst(self.tcx, callsite.substs)
+ } else {
+ continue;
+ }
+
+ };
+
+ let mut caller_mir = {
+ let map = self.tcx.maps.mir.borrow();
+ let mir = map.get(&callsite.caller).unwrap();
+ mir.borrow_mut()
+ };
+
+ let start = caller_mir.basic_blocks().len();
+
+ if !self.inline_call(callsite, &mut caller_mir, callee_mir) {
+ continue;
+ }
+
+ inlined_into.insert(callsite.caller);
+
+ // Add callsites from inlined function
+ for (bb, bb_data) in caller_mir.basic_blocks().iter_enumerated().skip(start) {
+ // Only consider direct calls to functions
+ let terminator = bb_data.terminator();
+ if let TerminatorKind::Call {
+ func: Operand::Constant(ref f), .. } = terminator.kind {
+ if let ty::TyFnDef(callee_def_id, substs, _) = f.ty.sty {
+ // Don't inline the same function multiple times.
+ if callsite.callee != callee_def_id {
+ callsites.push(CallSite {
+ caller: callsite.caller,
+ callee: callee_def_id,
+ substs: substs,
+ bb: bb,
+ location: terminator.source_info
+ });
+ }
+ }
+ }
+ }
+
+ csi -= 1;
+ if scc.len() == 1 {
+ callsites.swap_remove(csi);
+ } else {
+ callsites.remove(csi);
+ }
+
+ local_change = true;
+ changed = true;
+ }
+
+ if !local_change {
+ break;
+ }
+ }
+
+ // Simplify functions we inlined into.
+ for def_id in inlined_into {
+ let _task = self.tcx.dep_graph.in_task(DepNode::Mir(def_id));
+ self.tcx.dep_graph.write(DepNode::Mir(def_id));
+
+ let mut caller_mir = {
+ let map = self.tcx.maps.mir.borrow();
+ let mir = map.get(&def_id).unwrap();
+ mir.borrow_mut()
+ };
+
+ debug!("Running simplify cfg on {:?}", def_id);
+ CfgSimplifier::new(&mut caller_mir).simplify();
+ remove_dead_blocks(&mut caller_mir);
+ }
+ changed
+ }
+
+ fn should_inline(&self, callsite: CallSite<'tcx>,
+ callee_mir: &'a Mir<'tcx>) -> bool {
+
+ let tcx = self.tcx;
+
+ // Don't inline closures that have captures
+ // FIXME: Handle closures better
+ if callee_mir.upvar_decls.len() > 0 {
+ return false;
+ }
+
+
+ let attrs = tcx.get_attrs(callsite.callee);
+ let hint = attr::find_inline_attr(None, &attrs[..]);
+
+ let hinted = match hint {
+ // Just treat inline(always) as a hint for now,
+ // there are cases that prevent inlining that we
+ // need to check for first.
+ attr::InlineAttr::Always => true,
+ attr::InlineAttr::Never => return false,
+ attr::InlineAttr::Hint => true,
+ attr::InlineAttr::None => false,
+ };
+
+ // Only inline local functions if they would be eligible for cross-crate
+ // inlining. This is to ensure that the final crate doesn't have MIR that
+ // reference unexported symbols
+ if callsite.callee.is_local() {
+ if callsite.substs.types().count() == 0 && !hinted {
+ return false;
+ }
+ }
+
+ let mut threshold = if hinted {
+ HINT_THRESHOLD
+ } else {
+ DEFAULT_THRESHOLD
+ };
+
+ // Significantly lower the threshold for inlining cold functions
+ if attr::contains_name(&attrs[..], "cold") {
+ threshold /= 5;
+ }
+
+ // Give a bonus functions with a small number of blocks,
+ // We normally have two or three blocks for even
+ // very small functions.
+ if callee_mir.basic_blocks().len() <= 3 {
+ threshold += threshold / 4;
+ }
+
+ // FIXME: Give a bonus to functions with only a single caller
+
+ let id = tcx.hir.as_local_node_id(callsite.caller).expect("Caller not local");
+ let param_env = ty::ParameterEnvironment::for_item(tcx, id);
+
+ let mut first_block = true;
+ let mut cost = 0;
+
+ // Traverse the MIR manually so we can account for the effects of
+ // inlining on the CFG.
+ let mut work_list = vec![START_BLOCK];
+ let mut visited = BitVector::new(callee_mir.basic_blocks().len());
+ while let Some(bb) = work_list.pop() {
+ if !visited.insert(bb.index()) { continue; }
+ let blk = &callee_mir.basic_blocks()[bb];
+
+ for stmt in &blk.statements {
+ // Don't count StorageLive/StorageDead in the inlining cost.
+ match stmt.kind {
+ StatementKind::StorageLive(_) |
+ StatementKind::StorageDead(_) |
+ StatementKind::Nop => {}
+ _ => cost += INSTR_COST
+ }
+ }
+ let term = blk.terminator();
+ let mut is_drop = false;
+ match term.kind {
+ TerminatorKind::Drop { ref location, target, unwind } |
+ TerminatorKind::DropAndReplace { ref location, target, unwind, .. } => {
+ is_drop = true;
+ work_list.push(target);
+ // If the location doesn't actually need dropping, treat it like
+ // a regular goto.
+ let ty = location.ty(&callee_mir, tcx).subst(tcx, callsite.substs);
+ let ty = ty.to_ty(tcx);
+ if tcx.type_needs_drop_given_env(ty, ¶m_env) {
+ cost += CALL_PENALTY;
+ if let Some(unwind) = unwind {
+ work_list.push(unwind);
+ }
+ } else {
+ cost += INSTR_COST;
+ }
+ }
+
+ TerminatorKind::Unreachable |
+ TerminatorKind::Call { destination: None, .. } if first_block => {
+ // If the function always diverges, don't inline
+ // unless the cost is zero
+ threshold = 0;
+ }
+
+ TerminatorKind::Call {func: Operand::Constant(ref f), .. } => {
+ if let ty::TyFnDef(.., f) = f.ty.sty {
+ // Don't give intrinsics the extra penalty for calls
+ if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic {
+ cost += INSTR_COST;
+ } else {
+ cost += CALL_PENALTY;
+ }
+ }
+ }
+ TerminatorKind::Assert { .. } => cost += CALL_PENALTY,
+ _ => cost += INSTR_COST
+ }
+
+ if !is_drop {
+ for &succ in &term.successors()[..] {
+ work_list.push(succ);
+ }
+ }
+
+ first_block = false;
+ }
+
+ // Count up the cost of local variables and temps, if we know the size
+ // use that, otherwise we use a moderately-large dummy cost.
+
+ let ptr_size = tcx.data_layout.pointer_size.bytes();
+
+ for v in callee_mir.vars_and_temps_iter() {
+ let v = &callee_mir.local_decls[v];
+ let ty = v.ty.subst(tcx, callsite.substs);
+ // Cost of the var is the size in machine-words, if we know
+ // it.
+ if let Some(size) = type_size_of(tcx, param_env.clone(), ty) {
+ cost += (size / ptr_size) as usize;
+ } else {
+ cost += UNKNOWN_SIZE_COST;
+ }
+ }
+
+ debug!("Inline cost for {:?} is {}", callsite.callee, cost);
+
+ if let attr::InlineAttr::Always = hint {
+ true
+ } else {
+ cost <= threshold
+ }
+ }
+
+
+ fn inline_call(&self, callsite: CallSite<'tcx>,
+ caller_mir: &mut Mir<'tcx>, mut callee_mir: Mir<'tcx>) -> bool {
+
+ // Don't inline a function into itself
+ if callsite.caller == callsite.callee { return false; }
+
+ let _task = self.tcx.dep_graph.in_task(DepNode::Mir(callsite.caller));
+
+
+ let terminator = caller_mir[callsite.bb].terminator.take().unwrap();
+ match terminator.kind {
+ // FIXME: Handle inlining of diverging calls
+ TerminatorKind::Call { args, destination: Some(destination), cleanup, .. } => {
+
+ debug!("Inlined {:?} into {:?}", callsite.callee, callsite.caller);
+
+ let is_box_free = Some(callsite.callee) == self.tcx.lang_items.box_free_fn();
+
+ let mut local_map = IndexVec::with_capacity(callee_mir.local_decls.len());
+ let mut scope_map = IndexVec::with_capacity(callee_mir.visibility_scopes.len());
+ let mut promoted_map = IndexVec::with_capacity(callee_mir.promoted.len());
+
+ for mut scope in callee_mir.visibility_scopes.iter().cloned() {
+ if scope.parent_scope.is_none() {
+ scope.parent_scope = Some(callsite.location.scope);
+ scope.span = callee_mir.span;
+ }
+
+ scope.span = callsite.location.span;
+
+ let idx = caller_mir.visibility_scopes.push(scope);
+ scope_map.push(idx);
+ }
+
+ for loc in callee_mir.vars_and_temps_iter() {
+ let mut local = callee_mir.local_decls[loc].clone();
+
+ if let Some(ref mut source_info) = local.source_info {
+ source_info.scope = scope_map[source_info.scope];
+
+ source_info.span = callsite.location.span;
+ }
+
+ let idx = caller_mir.local_decls.push(local);
+ local_map.push(idx);
+ }
+
+ for p in callee_mir.promoted.iter().cloned() {
+ let idx = caller_mir.promoted.push(p);
+ promoted_map.push(idx);
+ }
+
+ // If the call is something like `a[*i] = f(i)`, where
+ // `i : &mut usize`, then just duplicating the `a[*i]`
+ // Lvalue could result in two different locations if `f`
+ // writes to `i`. To prevent this we need to create a temporary
+ // borrow of the lvalue and pass the destination as `*temp` instead.
+ fn dest_needs_borrow(lval: &Lvalue) -> bool {
+ match *lval {
+ Lvalue::Projection(ref p) => {
+ match p.elem {
+ ProjectionElem::Deref |
+ ProjectionElem::Index(_) => true,
+ _ => dest_needs_borrow(&p.base)
+ }
+ }
+ // Static variables need a borrow because the callee
+ // might modify the same static.
+ Lvalue::Static(_) => true,
+ _ => false
+ }
+ }
+
+ let dest = if dest_needs_borrow(&destination.0) {
+ debug!("Creating temp for return destination");
+ let dest = Rvalue::Ref(
+ self.tcx.mk_region(ty::ReErased),
+ BorrowKind::Mut,
+ destination.0);
+
+ let ty = dest.ty(caller_mir, self.tcx);
+
+ let temp = LocalDecl::new_temp(ty);
+
+ let tmp = caller_mir.local_decls.push(temp);
+ let tmp = Lvalue::Local(tmp);
+
+ let stmt = Statement {
+ source_info: callsite.location,
+ kind: StatementKind::Assign(tmp.clone(), dest)
+ };
+ caller_mir[callsite.bb]
+ .statements.push(stmt);
+ tmp.deref()
+ } else {
+ destination.0
+ };
+
+ let return_block = destination.1;
+
+ let args : Vec<_> = if is_box_free {
+ assert!(args.len() == 1);
+ // box_free takes a Box, but is defined with a *mut T, inlining
+ // needs to generate the cast.
+ // FIXME: we should probably just generate correct MIR in the first place...
+
+ let arg = if let Operand::Consume(ref lval) = args[0] {
+ lval.clone()
+ } else {
+ bug!("Constant arg to \"box_free\"");
+ };
+
+ let ptr_ty = args[0].ty(caller_mir, self.tcx);
+ vec![self.cast_box_free_arg(arg, ptr_ty, &callsite, caller_mir)]
+ } else {
+ // Copy the arguments if needed.
+ self.make_call_args(args, &callsite, caller_mir)
+ };
+
+ let bb_len = caller_mir.basic_blocks().len();
+ let mut integrator = Integrator {
+ block_idx: bb_len,
+ args: &args,
+ local_map: local_map,
+ scope_map: scope_map,
+ promoted_map: promoted_map,
+ _callsite: callsite,
+ destination: dest,
+ return_block: return_block,
+ cleanup_block: cleanup,
+ in_cleanup_block: false
+ };
+
+
+ for (bb, mut block) in callee_mir.basic_blocks_mut().drain_enumerated(..) {
+ integrator.visit_basic_block_data(bb, &mut block);
+ caller_mir.basic_blocks_mut().push(block);
+ }
+
+ let terminator = Terminator {
+ source_info: callsite.location,
+ kind: TerminatorKind::Goto { target: BasicBlock::new(bb_len) }
+ };
+
+ caller_mir[callsite.bb].terminator = Some(terminator);
+
+ true
+ }
+ kind => {
+ caller_mir[callsite.bb].terminator = Some(Terminator {
+ source_info: terminator.source_info,
+ kind: kind
+ });
+ false
+ }
+ }
+ }
+
+ fn cast_box_free_arg(&self, arg: Lvalue<'tcx>, ptr_ty: Ty<'tcx>,
+ callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Operand<'tcx> {
+ let arg = Rvalue::Ref(
+ self.tcx.mk_region(ty::ReErased),
+ BorrowKind::Mut,
+ arg.deref());
+
+ let ty = arg.ty(caller_mir, self.tcx);
+ let ref_tmp = LocalDecl::new_temp(ty);
+ let ref_tmp = caller_mir.local_decls.push(ref_tmp);
+ let ref_tmp = Lvalue::Local(ref_tmp);
+
+ let ref_stmt = Statement {
+ source_info: callsite.location,
+ kind: StatementKind::Assign(ref_tmp.clone(), arg)
+ };
+
+ caller_mir[callsite.bb]
+ .statements.push(ref_stmt);
+
+ let pointee_ty = match ptr_ty.sty {
+ ty::TyRawPtr(tm) | ty::TyRef(_, tm) => tm.ty,
+ _ if ptr_ty.is_box() => ptr_ty.boxed_ty(),
+ _ => bug!("Invalid type `{:?}` for call to box_free", ptr_ty)
+ };
+ let ptr_ty = self.tcx.mk_mut_ptr(pointee_ty);
+
+ let raw_ptr = Rvalue::Cast(CastKind::Misc, Operand::Consume(ref_tmp), ptr_ty);
+
+ let cast_tmp = LocalDecl::new_temp(ptr_ty);
+ let cast_tmp = caller_mir.local_decls.push(cast_tmp);
+ let cast_tmp = Lvalue::Local(cast_tmp);
+
+ let cast_stmt = Statement {
+ source_info: callsite.location,
+ kind: StatementKind::Assign(cast_tmp.clone(), raw_ptr)
+ };
+
+ caller_mir[callsite.bb]
+ .statements.push(cast_stmt);
+
+ Operand::Consume(cast_tmp)
+ }
+
+ fn make_call_args(&self, args: Vec<Operand<'tcx>>,
+ callsite: &CallSite<'tcx>, caller_mir: &mut Mir<'tcx>) -> Vec<Operand<'tcx>> {
+ let tcx = self.tcx;
+ // FIXME: Analysis of the usage of the arguments to avoid
+ // unnecessary temporaries.
+ args.into_iter().map(|a| {
+ if let Operand::Consume(Lvalue::Local(local)) = a {
+ if caller_mir.local_kind(local) == LocalKind::Temp {
+ // Reuse the operand if it's a temporary already
+ return a;
+ }
+ }
+
+ debug!("Creating temp for argument");
+ // Otherwise, create a temporary for the arg
+ let arg = Rvalue::Use(a);
+
+ let ty = arg.ty(caller_mir, tcx);
+
+ let arg_tmp = LocalDecl::new_temp(ty);
+ let arg_tmp = caller_mir.local_decls.push(arg_tmp);
+ let arg_tmp = Lvalue::Local(arg_tmp);
+
+ let stmt = Statement {
+ source_info: callsite.location,
+ kind: StatementKind::Assign(arg_tmp.clone(), arg)
+ };
+ caller_mir[callsite.bb].statements.push(stmt);
+ Operand::Consume(arg_tmp)
+ }).collect()
+ }
+}
+
+fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env: ty::ParameterEnvironment<'tcx>,
+ ty: Ty<'tcx>) -> Option<u64> {
+ tcx.infer_ctxt(param_env, traits::Reveal::All).enter(|infcx| {
+ ty.layout(&infcx).ok().map(|layout| {
+ layout.size(&tcx.data_layout).bytes()
+ })
+ })
+}
+
+/**
+ * Integrator.
+ *
+ * Integrates blocks from the callee function into the calling function.
+ * Updates block indices, references to locals and other control flow
+ * stuff.
+ */
+struct Integrator<'a, 'tcx: 'a> {
+ block_idx: usize,
+ args: &'a [Operand<'tcx>],
+ local_map: IndexVec<Local, Local>,
+ scope_map: IndexVec<VisibilityScope, VisibilityScope>,
+ promoted_map: IndexVec<Promoted, Promoted>,
+ _callsite: CallSite<'tcx>,
+ destination: Lvalue<'tcx>,
+ return_block: BasicBlock,
+ cleanup_block: Option<BasicBlock>,
+ in_cleanup_block: bool,
+}
+
+impl<'a, 'tcx> Integrator<'a, 'tcx> {
+ fn update_target(&self, tgt: BasicBlock) -> BasicBlock {
+ let new = BasicBlock::new(tgt.index() + self.block_idx);
+ debug!("Updating target `{:?}`, new: `{:?}`", tgt, new);
+ new
+ }
+
+ fn update_local(&self, local: Local) -> Option<Local> {
+ let idx = local.index();
+ if idx < (self.args.len() + 1) {
+ return None;
+ }
+ let idx = idx - (self.args.len() + 1);
+ let local = Local::new(idx);
+ self.local_map.get(local).cloned()
+ }
+
+ fn arg_index(&self, arg: Local) -> Option<usize> {
+ let idx = arg.index();
+ if idx > 0 && idx <= self.args.len() {
+ Some(idx - 1)
+ } else {
+ None
+ }
+ }
+}
+
+impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> {
+ fn visit_lvalue(&mut self,
+ lvalue: &mut Lvalue<'tcx>,
+ _ctxt: LvalueContext<'tcx>,
+ _location: Location) {
+ if let Lvalue::Local(ref mut local) = *lvalue {
+ if let Some(l) = self.update_local(*local) {
+ // Temp or Var; update the local reference
+ *local = l;
+ return;
+ }
+ }
+ if let Lvalue::Local(local) = *lvalue {
+ if local == RETURN_POINTER {
+ // Return pointer; update the lvalue itself
+ *lvalue = self.destination.clone();
+ } else if local.index() < (self.args.len() + 1) {
+ // Argument, once again update the the lvalue itself
+ let idx = local.index() - 1;
+ if let Operand::Consume(ref lval) = self.args[idx] {
+ *lvalue = lval.clone();
+ } else {
+ bug!("Arg operand `{:?}` is not an Lvalue use.", idx)
+ }
+ }
+ } else {
+ self.super_lvalue(lvalue, _ctxt, _location)
+ }
+ }
+
+ fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
+ if let Operand::Consume(Lvalue::Local(arg)) = *operand {
+ if let Some(idx) = self.arg_index(arg) {
+ let new_arg = self.args[idx].clone();
+ *operand = new_arg;
+ return;
+ }
+ }
+ self.super_operand(operand, location);
+ }
+
+ fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
+ self.in_cleanup_block = data.is_cleanup;
+ self.super_basic_block_data(block, data);
+ self.in_cleanup_block = false;
+ }
+
+ fn visit_terminator_kind(&mut self, block: BasicBlock,
+ kind: &mut TerminatorKind<'tcx>, loc: Location) {
+ self.super_terminator_kind(block, kind, loc);
+
+ match *kind {
+ TerminatorKind::Goto { ref mut target} => {
+ *target = self.update_target(*target);
+ }
+ TerminatorKind::SwitchInt { ref mut targets, .. } => {
+ for tgt in targets {
+ *tgt = self.update_target(*tgt);
+ }
+ }
+ TerminatorKind::Drop { ref mut target, ref mut unwind, .. } |
+ TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
+ *target = self.update_target(*target);
+ if let Some(tgt) = *unwind {
+ *unwind = Some(self.update_target(tgt));
+ } else if !self.in_cleanup_block {
+ // Unless this drop is in a cleanup block, add an unwind edge to
+ // the orignal call's cleanup block
+ *unwind = self.cleanup_block;
+ }
+ }
+ TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
+ if let Some((_, ref mut tgt)) = *destination {
+ *tgt = self.update_target(*tgt);
+ }
+ if let Some(tgt) = *cleanup {
+ *cleanup = Some(self.update_target(tgt));
+ } else if !self.in_cleanup_block {
+ // Unless this call is in a cleanup block, add an unwind edge to
+ // the orignal call's cleanup block
+ *cleanup = self.cleanup_block;
+ }
+ }
+ TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
+ *target = self.update_target(*target);
+ if let Some(tgt) = *cleanup {
+ *cleanup = Some(self.update_target(tgt));
+ } else if !self.in_cleanup_block {
+ // Unless this assert is in a cleanup block, add an unwind edge to
+ // the orignal call's cleanup block
+ *cleanup = self.cleanup_block;
+ }
+ }
+ TerminatorKind::Return => {
+ *kind = TerminatorKind::Goto { target: self.return_block };
+ }
+ TerminatorKind::Resume => {
+ if let Some(tgt) = self.cleanup_block {
+ *kind = TerminatorKind::Goto { target: tgt }
+ }
+ }
+ TerminatorKind::Unreachable => { }
+ }
+ }
+
+ fn visit_visibility_scope(&mut self, scope: &mut VisibilityScope) {
+ *scope = self.scope_map[*scope];
+ }
+
+ fn visit_literal(&mut self, literal: &mut Literal<'tcx>, loc: Location) {
+ if let Literal::Promoted { ref mut index } = *literal {
+ if let Some(p) = self.promoted_map.get(*index).cloned() {
+ *index = p;
+ }
+ } else {
+ self.super_literal(literal, loc);
+ }
+ }
+}
pub mod deaggregator;
pub mod instcombine;
pub mod copy_prop;
+pub mod inline;
}
impl<'a, 'tcx: 'a> CfgSimplifier<'a, 'tcx> {
- fn new(mir: &'a mut Mir<'tcx>) -> Self {
+ pub fn new(mir: &'a mut Mir<'tcx>) -> Self {
let mut pred_count = IndexVec::from_elem(0u32, mir.basic_blocks());
// we can't use mir.predecessors() here because that counts
}
}
- fn simplify(mut self) {
+ pub fn simplify(mut self) {
loop {
let mut changed = false;
if !changed { break }
}
+
+ self.strip_nops()
}
// Collapse a goto chain starting from `start`
terminator.kind = TerminatorKind::Goto { target: first_succ };
true
}
+
+ fn strip_nops(&mut self) {
+ for blk in self.basic_blocks.iter_mut() {
+ blk.statements.retain(|stmt| if let StatementKind::Nop = stmt.kind {
+ false
+ } else {
+ true
+ })
+ }
+ }
}
-fn remove_dead_blocks(mir: &mut Mir) {
+pub fn remove_dead_blocks(mir: &mut Mir) {
let mut seen = BitVector::new(mir.basic_blocks().len());
for (bb, _) in traversal::preorder(mir) {
seen.insert(bb.index());
fn visit_attribute(&mut self, attr: &'v ast::Attribute) {
self.record("Attribute", Id::None, attr);
}
-
- fn visit_macro_def(&mut self, macro_def: &'v ast::MacroDef) {
- self.record("MacroDef", Id::None, macro_def);
- ast_visit::walk_macro_def(self, macro_def)
- }
}
use rustc::lint::{EarlyLintPassObject, LateLintPassObject, LintId, Lint};
use rustc::session::Session;
-use rustc::mir::transform::MirMapPass;
-
use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT};
use syntax::ext::base::MacroExpanderFn;
use syntax::symbol::Symbol;
#[doc(hidden)]
pub late_lint_passes: Vec<LateLintPassObject>,
- #[doc(hidden)]
- pub mir_passes: Vec<Box<for<'pcx> MirMapPass<'pcx>>>,
-
#[doc(hidden)]
pub lint_groups: HashMap<&'static str, Vec<LintId>>,
lint_groups: HashMap::new(),
llvm_passes: vec![],
attributes: vec![],
- mir_passes: Vec::new(),
whitelisted_custom_derives: Vec::new(),
}
}
self.lint_groups.insert(name, to.into_iter().map(|x| LintId::of(x)).collect());
}
- /// Register a MIR pass
- pub fn register_mir_pass(&mut self, pass: Box<for<'pcx> MirMapPass<'pcx>>) {
- self.mir_passes.push(pass);
- }
-
/// Register an LLVM pass.
///
/// Registration with LLVM itself is handled through static C++ objects with
use syntax::ast::{Variant, ViewPathGlob, ViewPathList, ViewPathSimple};
use syntax::ext::base::SyntaxExtension;
use syntax::ext::base::Determinacy::Undetermined;
-use syntax::ext::expand::mark_tts;
use syntax::ext::hygiene::Mark;
use syntax::ext::tt::macro_rules;
use syntax::parse::token;
self.define(parent, ident, TypeNS, (module, vis, sp, expansion));
self.current_module = module;
}
- ItemKind::Mac(_) => panic!("unexpanded macro in resolve!"),
+ ItemKind::MacroDef(..) | ItemKind::Mac(_) => unreachable!(),
}
}
})
}
+ pub fn macro_def_scope(&mut self, expansion: Mark) -> Module<'a> {
+ let def_id = self.macro_defs[&expansion];
+ if let Some(id) = self.definitions.as_local_node_id(def_id) {
+ self.local_macro_def_scopes[&id]
+ } else {
+ let module_def_id = ty::DefIdTree::parent(&*self, def_id).unwrap();
+ self.get_extern_crate_root(module_def_id.krate)
+ }
+ }
+
pub fn get_macro(&mut self, def: Def) -> Rc<SyntaxExtension> {
let def_id = match def {
Def::Macro(def_id, ..) => def_id,
return ext.clone();
}
- let mut macro_rules = match self.session.cstore.load_macro(def_id, &self.session) {
- LoadedMacro::MacroRules(macro_rules) => macro_rules,
+ let macro_def = match self.session.cstore.load_macro(def_id, &self.session) {
+ LoadedMacro::MacroDef(macro_def) => macro_def,
LoadedMacro::ProcMacro(ext) => return ext,
};
- let mark = Mark::fresh();
- let invocation = self.arenas.alloc_invocation_data(InvocationData {
- module: Cell::new(self.get_extern_crate_root(def_id.krate)),
- def_index: CRATE_DEF_INDEX,
- const_expr: false,
- legacy_scope: Cell::new(LegacyScope::Empty),
- expansion: Cell::new(LegacyScope::Empty),
- });
- self.invocations.insert(mark, invocation);
- macro_rules.body = mark_tts(macro_rules.stream(), mark).into();
- let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, ¯o_rules));
+ let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, ¯o_def));
self.macro_map.insert(def_id, ext.clone());
ext
}
fn visit_item(&mut self, item: &'a Item) {
let macro_use = match item.node {
- ItemKind::Mac(ref mac) => {
- if mac.node.path.segments.is_empty() {
- self.legacy_scope = LegacyScope::Expansion(self.visit_invoc(item.id));
- } else {
- self.resolver.define_macro(item, &mut self.legacy_scope);
- }
+ ItemKind::MacroDef(..) => {
+ self.resolver.define_macro(item, &mut self.legacy_scope);
+ return
+ }
+ ItemKind::Mac(..) => {
+ self.legacy_scope = LegacyScope::Expansion(self.visit_invoc(item.id));
return
}
ItemKind::Mod(..) => self.resolver.contains_macro_use(&item.attrs),
// Trait paths in bounds or impls.
Trait,
// Expression paths `path`, with optional parent context.
- Expr(Option<&'a ExprKind>),
+ Expr(Option<&'a Expr>),
// Paths in path patterns `Path`.
Pat,
// Paths in struct expressions and patterns `Path { .. }`.
ValueNS => "method or associated constant",
MacroNS => bug!("associated macro"),
},
- PathSource::Expr(parent) => match parent {
+ PathSource::Expr(parent) => match parent.map(|p| &p.node) {
// "function" here means "anything callable" rather than `Def::Fn`,
// this is not precise but usually more helpful than just "value".
Some(&ExprKind::Call(..)) => "function",
// We passed through a module.
ModuleRibKind(Module<'a>),
- // We passed through a `macro_rules!` statement with the given expansion
- MacroDefinition(Mark),
+ // We passed through a `macro_rules!` statement
+ MacroDefinition(DefId),
// All bindings in this rib are type parameters that can't be used
// from the default of a type parameter because they're not declared
}
}
- fn get_macro(&self, resolver: &mut Resolver<'a>) -> Rc<SyntaxExtension> {
+ fn def_ignoring_ambiguity(&self) -> Def {
match self.kind {
- NameBindingKind::Import { binding, .. } => binding.get_macro(resolver),
- NameBindingKind::Ambiguity { b1, .. } => b1.get_macro(resolver),
- _ => resolver.get_macro(self.def()),
+ NameBindingKind::Import { binding, .. } => binding.def_ignoring_ambiguity(),
+ NameBindingKind::Ambiguity { b1, .. } => b1.def_ignoring_ambiguity(),
+ _ => self.def(),
}
}
+ fn get_macro(&self, resolver: &mut Resolver<'a>) -> Rc<SyntaxExtension> {
+ resolver.get_macro(self.def_ignoring_ambiguity())
+ }
+
// We sometimes need to treat variants as `pub` for backwards compatibility
fn pseudo_vis(&self) -> ty::Visibility {
if self.is_variant() { ty::Visibility::Public } else { self.vis }
pub definitions: Definitions,
- // Maps the node id of a statement to the expansions of the `macro_rules!`s
- // immediately above the statement (if appropriate).
- macros_at_scope: FxHashMap<NodeId, Vec<Mark>>,
-
graph_root: Module<'a>,
prelude: Option<Module<'a>>,
dummy_binding: &'a NameBinding<'a>,
use_extern_macros: bool, // true if `#![feature(use_extern_macros)]`
- pub exported_macros: Vec<ast::MacroDef>,
crate_loader: &'a mut CrateLoader,
macro_names: FxHashSet<Name>,
builtin_macros: FxHashMap<Name, &'a NameBinding<'a>>,
lexical_macro_resolutions: Vec<(Name, &'a Cell<LegacyScope<'a>>)>,
macro_map: FxHashMap<DefId, Rc<SyntaxExtension>>,
+ macro_defs: FxHashMap<Mark, DefId>,
+ local_macro_def_scopes: FxHashMap<NodeId, Module<'a>>,
macro_exports: Vec<Export>,
pub whitelisted_legacy_custom_derives: Vec<Name>,
pub found_unresolved_macro: bool,
let features = session.features.borrow();
+ let mut macro_defs = FxHashMap();
+ macro_defs.insert(Mark::root(), root_def_id);
+
Resolver {
session: session,
definitions: definitions,
- macros_at_scope: FxHashMap(),
// The outermost module has def ID 0; this is not reflected in the
// AST.
// `#![feature(proc_macro)]` implies `#[feature(extern_macros)]`
use_extern_macros: features.use_extern_macros || features.proc_macro,
- exported_macros: Vec::new(),
crate_loader: crate_loader,
macro_names: FxHashSet(),
builtin_macros: FxHashMap(),
macro_map: FxHashMap(),
macro_exports: Vec::new(),
invocations: invocations,
+ macro_defs: macro_defs,
+ local_macro_def_scopes: FxHashMap(),
name_already_seen: FxHashMap(),
whitelisted_legacy_custom_derives: Vec::new(),
proc_macro_enabled: features.proc_macro,
}
}
- if let MacroDefinition(mac) = self.ribs[ns][i].kind {
+ if let MacroDefinition(def) = self.ribs[ns][i].kind {
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
- let (source_ctxt, source_macro) = ident.ctxt.source();
- if source_macro == mac {
- ident.ctxt = source_ctxt;
+ let ctxt_data = ident.ctxt.data();
+ if def == self.macro_defs[&ctxt_data.outer_mark] {
+ ident.ctxt = ctxt_data.prev_ctxt;
}
}
}
None
}
- fn resolve_crate_var(&mut self, mut crate_var_ctxt: SyntaxContext) -> Module<'a> {
- while crate_var_ctxt.source().0 != SyntaxContext::empty() {
- crate_var_ctxt = crate_var_ctxt.source().0;
+ fn resolve_crate_var(&mut self, crate_var_ctxt: SyntaxContext) -> Module<'a> {
+ let mut ctxt_data = crate_var_ctxt.data();
+ while ctxt_data.prev_ctxt != SyntaxContext::empty() {
+ ctxt_data = ctxt_data.prev_ctxt.data();
}
- let module = self.invocations[&crate_var_ctxt.source().1].module.get();
+ let module = self.macro_def_scope(ctxt_data.outer_mark);
if module.is_local() { self.graph_root } else { module }
}
NormalRibKind => {
// Continue
}
- MacroDefinition(mac) => {
+ MacroDefinition(def) => {
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
- let (source_ctxt, source_macro) = ident.ctxt.source();
- if source_macro == mac {
- ident.ctxt = source_ctxt;
+ let ctxt_data = ident.ctxt.data();
+ if def == self.macro_defs[&ctxt_data.outer_mark] {
+ ident.ctxt = ctxt_data.prev_ctxt;
}
}
_ => {
}
}
- ItemKind::ExternCrate(_) => {
+ ItemKind::ExternCrate(_) | ItemKind::MacroDef(..) => {
// do nothing, these are just around to be encoded
}
// Descend into the block.
for stmt in &block.stmts {
- if let Some(marks) = self.macros_at_scope.remove(&stmt.id) {
- num_macro_definition_ribs += marks.len() as u32;
- for mark in marks {
- self.ribs[ValueNS].push(Rib::new(MacroDefinition(mark)));
- self.label_ribs.push(Rib::new(MacroDefinition(mark)));
+ if let ast::StmtKind::Item(ref item) = stmt.node {
+ if let ast::ItemKind::MacroDef(..) = item.node {
+ num_macro_definition_ribs += 1;
+ let def = self.definitions.local_def_id(item.id);
+ self.ribs[ValueNS].push(Rib::new(MacroDefinition(def)));
+ self.label_ribs.push(Rib::new(MacroDefinition(def)));
}
}
source: PathSource)
-> PathResolution {
let segments = &path.segments.iter().map(|seg| seg.identifier).collect::<Vec<_>>();
- self.smart_resolve_path_fragment(id, qself, segments, path.span, source)
+ let ident_span = path.segments.last().map_or(path.span, |seg| seg.span);
+ self.smart_resolve_path_fragment(id, qself, segments, path.span, ident_span, source)
}
fn smart_resolve_path_fragment(&mut self,
qself: Option<&QSelf>,
path: &[Ident],
span: Span,
+ ident_span: Span,
source: PathSource)
-> PathResolution {
let ns = source.namespace();
let expected = source.descr_expected();
let path_str = names_to_string(path);
let code = source.error_code(def.is_some());
- let (base_msg, fallback_label) = if let Some(def) = def {
+ let (base_msg, fallback_label, base_span) = if let Some(def) = def {
(format!("expected {}, found {} `{}`", expected, def.kind_name(), path_str),
- format!("not a {}", expected))
+ format!("not a {}", expected), span)
} else {
let item_str = path[path.len() - 1];
let (mod_prefix, mod_str) = if path.len() == 1 {
(mod_prefix, format!("`{}`", names_to_string(mod_path)))
};
(format!("cannot find {} `{}` in {}{}", expected, item_str, mod_prefix, mod_str),
- format!("not found in {}", mod_str))
+ format!("not found in {}", mod_str), ident_span)
};
- let mut err = this.session.struct_span_err_with_code(span, &base_msg, code);
+ let mut err = this.session.struct_span_err_with_code(base_span, &base_msg, code);
// Emit special messages for unresolved `Self` and `self`.
if is_self_type(path, ns) {
err.span_label(span, &format!("type aliases cannot be used for traits"));
return err;
}
- (Def::Mod(..), PathSource::Expr(Some(parent))) => match *parent {
+ (Def::Mod(..), PathSource::Expr(Some(parent))) => match parent.node {
ExprKind::Field(_, ident) => {
- err.span_label(span, &format!("did you mean `{}::{}`?",
- path_str, ident.node));
+ err.span_label(parent.span, &format!("did you mean `{}::{}`?",
+ path_str, ident.node));
return err;
}
ExprKind::MethodCall(ident, ..) => {
- err.span_label(span, &format!("did you mean `{}::{}(...)`?",
- path_str, ident.node));
+ err.span_label(parent.span, &format!("did you mean `{}::{}(...)`?",
+ path_str, ident.node));
return err;
}
_ => {}
// Try Levenshtein if nothing else worked.
if let Some(candidate) = this.lookup_typo_candidate(path, ns, is_expected) {
- err.span_label(span, &format!("did you mean `{}`?", candidate));
+ err.span_label(ident_span, &format!("did you mean `{}`?", candidate));
return err;
}
// Fallback label.
- err.span_label(span, &fallback_label);
+ err.span_label(base_span, &fallback_label);
err
};
let report_errors = |this: &mut Self, def: Option<Def>| {
// Make sure `A::B` in `<T as A>::B::C` is a trait item.
let ns = if qself.position + 1 == path.len() { ns } else { TypeNS };
let res = self.smart_resolve_path_fragment(id, None, &path[..qself.position + 1],
- span, PathSource::TraitItem(ns));
+ span, span, PathSource::TraitItem(ns));
return Some(PathResolution::with_unresolved_segments(
res.base_def(), res.unresolved_segments() + path.len() - qself.position - 1
));
path: &[Ident],
ns: Namespace,
filter_fn: FilterFn)
- -> Option<String>
+ -> Option<Symbol>
where FilterFn: Fn(Def) -> bool
{
let add_module_candidates = |module: Module, names: &mut Vec<Name>| {
};
let mut names = Vec::new();
- let prefix_str = if path.len() == 1 {
+ if path.len() == 1 {
// Search in lexical scope.
// Walk backwards up the ribs in scope and collect candidates.
for rib in self.ribs[ns].iter().rev() {
names.push(*name);
}
}
- String::new()
} else {
// Search in module.
let mod_path = &path[..path.len() - 1];
if let PathResult::Module(module) = self.resolve_path(mod_path, Some(TypeNS), None) {
add_module_candidates(module, &mut names);
}
- names_to_string(mod_path) + "::"
- };
+ }
let name = path[path.len() - 1].name;
// Make sure error reporting is deterministic.
names.sort_by_key(|name| name.as_str());
match find_best_match_for_name(names.iter(), &name.as_str(), None) {
- Some(found) if found != name => Some(format!("{}{}", prefix_str, found)),
+ Some(found) if found != name => Some(found),
_ => None,
}
}
self.with_resolved_label(label, id, |this| this.visit_block(block));
}
- fn resolve_expr(&mut self, expr: &Expr, parent: Option<&ExprKind>) {
+ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
// First, record candidate traits for this expression if it could
// result in the invocation of a method call.
// Equivalent to `visit::walk_expr` + passing some context to children.
ExprKind::Field(ref subexpression, _) => {
- self.resolve_expr(subexpression, Some(&expr.node));
+ self.resolve_expr(subexpression, Some(expr));
}
ExprKind::MethodCall(_, ref types, ref arguments) => {
let mut arguments = arguments.iter();
- self.resolve_expr(arguments.next().unwrap(), Some(&expr.node));
+ self.resolve_expr(arguments.next().unwrap(), Some(expr));
for argument in arguments {
self.resolve_expr(argument, None);
}
});
}
ExprKind::Call(ref callee, ref arguments) => {
- self.resolve_expr(callee, Some(&expr.node));
+ self.resolve_expr(callee, Some(expr));
for argument in arguments {
self.resolve_expr(argument, None);
}
if ident.name == lookup_name && ns == namespace {
if filter_fn(name_binding.def()) {
// create the path
- let span = name_binding.span;
let mut segms = path_segments.clone();
- segms.push(ident.into());
+ segms.push(ast::PathSegment::from_ident(ident, name_binding.span));
let path = Path {
- span: span,
+ span: name_binding.span,
segments: segms,
};
// the entity is accessible in the following cases:
if let Some(module) = name_binding.module() {
// form the path
let mut path_segments = path_segments.clone();
- path_segments.push(ident.into());
+ path_segments.push(ast::PathSegment::from_ident(ident, name_binding.span));
if !in_module_is_extern || name_binding.vis == ty::Visibility::Public {
// add the module to the lookup
use rustc::hir::def::{Def, Export};
use rustc::hir::map::{self, DefCollector};
use rustc::ty;
-use std::cell::Cell;
-use std::rc::Rc;
use syntax::ast::{self, Name, Ident};
-use syntax::attr;
+use syntax::attr::{self, HasAttrs};
use syntax::errors::DiagnosticBuilder;
-use syntax::ext::base::{self, Determinacy, MultiModifier, MultiDecorator};
-use syntax::ext::base::{Resolver as SyntaxResolver, SyntaxExtension};
-use syntax::ext::base::MacroKind;
-use syntax::ext::expand::{Expansion, mark_tts};
+use syntax::ext::base::{self, Annotatable, Determinacy, MultiModifier, MultiDecorator};
+use syntax::ext::base::{MacroKind, SyntaxExtension, Resolver as SyntaxResolver};
+use syntax::ext::expand::{Expansion, ExpansionKind, Invocation, InvocationKind, find_attr_invoc};
use syntax::ext::hygiene::Mark;
+use syntax::ext::placeholders::placeholder;
use syntax::ext::tt::macro_rules;
use syntax::feature_gate::{self, emit_feature_err, GateIssue};
use syntax::fold::{self, Folder};
use syntax::ptr::P;
use syntax::symbol::{Symbol, keywords};
use syntax::util::lev_distance::find_best_match_for_name;
-use syntax::visit::Visitor;
use syntax_pos::{Span, DUMMY_SP};
+use std::cell::Cell;
+use std::mem;
+use std::rc::Rc;
+
#[derive(Clone)]
pub struct InvocationData<'a> {
pub module: Cell<Module<'a>>,
pub struct LegacyBinding<'a> {
pub parent: Cell<LegacyScope<'a>>,
pub name: ast::Name,
- ext: Rc<SyntaxExtension>,
+ def_id: DefId,
pub span: Span,
}
path.segments[0].identifier.name = keywords::CrateRoot.name();
let module = self.0.resolve_crate_var(ident.ctxt);
if !module.is_local() {
+ let span = path.segments[0].span;
path.segments.insert(1, match module.kind {
- ModuleKind::Def(_, name) => ast::Ident::with_empty_ctxt(name).into(),
+ ModuleKind::Def(_, name) => ast::PathSegment::from_ident(
+ ast::Ident::with_empty_ctxt(name), span
+ ),
_ => unreachable!(),
})
}
invocation.expansion.set(visitor.legacy_scope);
}
- fn add_ext(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
+ fn add_builtin(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
let def_id = DefId {
krate: BUILTIN_MACROS_CRATE,
index: DefIndex::new(self.macro_map.len()),
self.builtin_macros.insert(ident.name, binding);
}
- fn add_expansions_at_stmt(&mut self, id: ast::NodeId, macros: Vec<Mark>) {
- self.macros_at_scope.insert(id, macros);
- }
-
fn resolve_imports(&mut self) {
ImportResolver { resolver: self }.resolve_imports()
}
None
}
- fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind,
- force: bool) -> Result<Rc<SyntaxExtension>, Determinacy> {
+ fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
+ -> Result<Option<Rc<SyntaxExtension>>, Determinacy> {
+ let def = match invoc.kind {
+ InvocationKind::Attr { attr: None, .. } => return Ok(None),
+ _ => match self.resolve_invoc_to_def(invoc, scope, force) {
+ Ok(def) => def,
+ Err(determinacy) => return Err(determinacy),
+ },
+ };
+ self.macro_defs.insert(invoc.expansion_data.mark, def.def_id());
+ Ok(Some(self.get_macro(def)))
+ }
+
+ fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
+ -> Result<Rc<SyntaxExtension>, Determinacy> {
+ self.resolve_macro_to_def(scope, path, kind, force).map(|def| self.get_macro(def))
+ }
+}
+
+impl<'a> Resolver<'a> {
+ fn resolve_invoc_to_def(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
+ -> Result<Def, Determinacy> {
+ let (attr, traits, item) = match invoc.kind {
+ InvocationKind::Attr { ref mut attr, ref traits, ref mut item } => (attr, traits, item),
+ InvocationKind::Bang { ref mac, .. } => {
+ return self.resolve_macro_to_def(scope, &mac.node.path, MacroKind::Bang, force);
+ }
+ InvocationKind::Derive { name, span, .. } => {
+ let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
+ return self.resolve_macro_to_def(scope, &path, MacroKind::Derive, force);
+ }
+ };
+
+ let (attr_name, path) = {
+ let attr = attr.as_ref().unwrap();
+ (attr.name(), ast::Path::from_ident(attr.span, Ident::with_empty_ctxt(attr.name())))
+ };
+
+ let mut determined = true;
+ match self.resolve_macro_to_def(scope, &path, MacroKind::Attr, force) {
+ Ok(def) => return Ok(def),
+ Err(Determinacy::Undetermined) => determined = false,
+ Err(Determinacy::Determined) if force => return Err(Determinacy::Determined),
+ Err(Determinacy::Determined) => {}
+ }
+
+ for &(name, span) in traits {
+ let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
+ match self.resolve_macro(scope, &path, MacroKind::Derive, force) {
+ Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext {
+ if inert_attrs.contains(&attr_name) {
+ // FIXME(jseyfried) Avoid `mem::replace` here.
+ let dummy_item = placeholder(ExpansionKind::Items, ast::DUMMY_NODE_ID)
+ .make_items().pop().unwrap();
+ let dummy_item = Annotatable::Item(dummy_item);
+ *item = mem::replace(item, dummy_item).map_attrs(|mut attrs| {
+ let inert_attr = attr.take().unwrap();
+ attr::mark_known(&inert_attr);
+ if self.proc_macro_enabled {
+ *attr = find_attr_invoc(&mut attrs);
+ }
+ attrs.push(inert_attr);
+ attrs
+ });
+ }
+ return Err(Determinacy::Undetermined);
+ },
+ Err(Determinacy::Undetermined) => determined = false,
+ Err(Determinacy::Determined) => {}
+ }
+ }
+
+ Err(if determined { Determinacy::Determined } else { Determinacy::Undetermined })
+ }
+
+ fn resolve_macro_to_def(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
+ -> Result<Def, Determinacy> {
let ast::Path { ref segments, span } = *path;
if segments.iter().any(|segment| segment.parameters.is_some()) {
let kind =
return Err(Determinacy::Determined);
}
- let ext = match self.resolve_path(&path, Some(MacroNS), None) {
+ let def = match self.resolve_path(&path, Some(MacroNS), None) {
PathResult::NonModule(path_res) => match path_res.base_def() {
Def::Err => Err(Determinacy::Determined),
- def @ _ => Ok(self.get_macro(def)),
+ def @ _ => Ok(def),
},
PathResult::Module(..) => unreachable!(),
PathResult::Indeterminate if !force => return Err(Determinacy::Undetermined),
};
self.current_module.macro_resolutions.borrow_mut()
.push((path.into_boxed_slice(), span));
- return ext;
+ return def;
}
let name = path[0].name;
let result = match self.resolve_legacy_scope(&invocation.legacy_scope, name, false) {
- Some(MacroBinding::Legacy(binding)) => Ok(binding.ext.clone()),
- Some(MacroBinding::Modern(binding)) => Ok(binding.get_macro(self)),
+ Some(MacroBinding::Legacy(binding)) => Ok(Def::Macro(binding.def_id, MacroKind::Bang)),
+ Some(MacroBinding::Modern(binding)) => Ok(binding.def_ignoring_ambiguity()),
None => match self.resolve_lexical_macro_path_segment(path[0], MacroNS, None) {
- Ok(binding) => Ok(binding.get_macro(self)),
+ Ok(binding) => Ok(binding.def_ignoring_ambiguity()),
Err(Determinacy::Undetermined) if !force =>
return Err(Determinacy::Undetermined),
Err(_) => {
result
}
-}
-impl<'a> Resolver<'a> {
// Resolve the initial segment of a non-global macro path (e.g. `foo` in `foo::bar!();`)
pub fn resolve_lexical_macro_path_segment(&mut self,
ident: Ident,
};
let ident = Ident::from_str(name);
self.lookup_typo_candidate(&vec![ident], MacroNS, is_macro)
- .as_ref().map(|s| Symbol::intern(s))
});
if let Some(suggestion) = suggestion {
}
pub fn define_macro(&mut self, item: &ast::Item, legacy_scope: &mut LegacyScope<'a>) {
- let tts = match item.node {
- ast::ItemKind::Mac(ref mac) => mac.node.stream(),
- _ => unreachable!(),
- };
-
- if item.ident.name == "macro_rules" {
+ self.local_macro_def_scopes.insert(item.id, self.current_module);
+ let ident = item.ident;
+ if ident.name == "macro_rules" {
self.session.span_err(item.span, "user-defined macros may not be named `macro_rules`");
}
- let mark = Mark::from_placeholder_id(item.id);
- let invocation = self.invocations[&mark];
- invocation.module.set(self.current_module);
-
- let mut def = ast::MacroDef {
- ident: item.ident,
- attrs: item.attrs.clone(),
- id: ast::DUMMY_NODE_ID,
- span: item.span,
- body: mark_tts(tts, mark).into(),
- };
-
+ let def_id = self.definitions.local_def_id(item.id);
+ let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, item));
+ self.macro_map.insert(def_id, ext);
*legacy_scope = LegacyScope::Binding(self.arenas.alloc_legacy_binding(LegacyBinding {
- parent: Cell::new(*legacy_scope),
- name: def.ident.name,
- ext: Rc::new(macro_rules::compile(&self.session.parse_sess, &def)),
- span: def.span,
+ parent: Cell::new(*legacy_scope), name: ident.name, def_id: def_id, span: item.span,
}));
- self.macro_names.insert(def.ident.name);
+ self.macro_names.insert(ident.name);
- if attr::contains_name(&def.attrs, "macro_export") {
- def.id = self.next_node_id();
- DefCollector::new(&mut self.definitions).with_parent(CRATE_DEF_INDEX, |collector| {
- collector.visit_macro_def(&def)
- });
- self.macro_exports.push(Export {
- name: def.ident.name,
- def: Def::Macro(self.definitions.local_def_id(def.id), MacroKind::Bang),
- });
- self.exported_macros.push(def);
+ if attr::contains_name(&item.attrs, "macro_export") {
+ let def = Def::Macro(def_id, MacroKind::Bang);
+ self.macro_exports.push(Export { name: ident.name, def: def });
}
}
use rustc::hir;
use rustc::hir::def_id::{CrateNum, DefId};
-use syntax::ast::{self, NodeId};
+use syntax::ast::{self, Attribute, NodeId};
use syntax_pos::Span;
pub struct CrateData {
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
/// Data for extern crates.
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
/// Data about a function call.
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
/// Data for modules.
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
/// Data for a reference to a module.
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
#[derive(Debug, RustcEncodable)]
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
#[derive(Debug, RustcEncodable)]
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
#[derive(Debug, RustcEncodable)]
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
/// Data for a typedef.
pub parent: Option<DefId>,
pub docs: String,
pub sig: Option<Signature>,
+ pub attributes: Vec<Attribute>,
}
/// Data for a reference to a type or trait.
pub visibility: Visibility,
pub docs: String,
pub sig: Option<Signature>,
+ pub attributes: Vec<Attribute>,
}
#[derive(Debug, RustcEncodable)]
scope: scope
}.lower(self.tcx));
}
+ // With macros 2.0, we can legitimately get a ref to a macro, but
+ // we don't handle it properly for now (FIXME).
+ Def::Macro(..) => {}
Def::Local(..) |
Def::Upvar(..) |
Def::SelfTy(..) |
Def::AssociatedTy(..) |
Def::AssociatedConst(..) |
Def::PrimTy(_) |
- Def::Macro(..) |
Def::Err => {
span_bug!(span,
"process_def_kind for unexpected item: {:?}",
visibility: Visibility::Inherited,
docs: String::new(),
sig: None,
+ attributes: vec![],
}.lower(self.tcx));
}
}
visibility: vis,
docs: docs_for_attrs(attrs),
sig: method_data.sig,
+ attributes: attrs.to_vec(),
}.lower(self.tcx));
}
parent: None,
docs: String::new(),
sig: None,
+ attributes: vec![],
}.lower(self.tcx));
}
}
visibility: vis,
docs: docs_for_attrs(attrs),
sig: None,
+ attributes: attrs.to_vec(),
}.lower(self.tcx));
}
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: self.save_ctxt.sig_base(item),
+ attributes: item.attrs.clone(),
}.lower(self.tcx));
}
parent: Some(make_def_id(item.id, &self.tcx.hir)),
docs: docs_for_attrs(&variant.node.attrs),
sig: sig,
+ attributes: variant.node.attrs.clone(),
}.lower(self.tcx));
}
}
parent: Some(make_def_id(item.id, &self.tcx.hir)),
docs: docs_for_attrs(&variant.node.attrs),
sig: sig,
+ attributes: variant.node.attrs.clone(),
}.lower(self.tcx));
}
}
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: self.save_ctxt.sig_base(item),
+ attributes: item.attrs.clone(),
}.lower(self.tcx));
}
visibility: Visibility::Inherited,
docs: String::new(),
sig: None,
+ attributes: vec![],
}.lower(self.tcx));
}
}
parent: None,
docs: docs_for_attrs(&item.attrs),
sig: Some(self.save_ctxt.sig_base(item)),
+ attributes: item.attrs.clone(),
}.lower(self.tcx));
}
visibility: Visibility::Inherited,
docs: String::new(),
sig: None,
+ attributes: vec![],
}.lower(self.tcx));
}
}
use rustc::hir::def_id::{CrateNum, DefId, DefIndex};
use rustc::hir::map::Map;
use rustc::ty::TyCtxt;
-use syntax::ast::NodeId;
+use syntax::ast::{self, NodeId};
use syntax::codemap::CodeMap;
+use syntax::print::pprust;
+use syntax::symbol::Symbol;
use syntax_pos::Span;
use data::{self, Visibility, SigElement};
}
}
+/// Represent an arbitrary attribute on a code element
+#[derive(Clone, Debug, RustcEncodable)]
+pub struct Attribute {
+ value: String,
+ span: SpanData,
+}
+
+impl Lower for Vec<ast::Attribute> {
+ type Target = Vec<Attribute>;
+
+ fn lower(self, tcx: TyCtxt) -> Vec<Attribute> {
+ let doc = Symbol::intern("doc");
+ self.into_iter()
+ // Only retain real attributes. Doc comments are lowered separately.
+ .filter(|attr| attr.name() != doc)
+ .map(|mut attr| {
+ // Remove the surrounding '#[..]' or '#![..]' of the pretty printed
+ // attribute. First normalize all inner attribute (#![..]) to outer
+ // ones (#[..]), then remove the two leading and the one trailing character.
+ attr.style = ast::AttrStyle::Outer;
+ let value = pprust::attribute_to_string(&attr);
+ // This str slicing works correctly, because the leading and trailing characters
+ // are in the ASCII range and thus exactly one byte each.
+ let value = value[2..value.len()-1].to_string();
+
+ Attribute {
+ value: value,
+ span: SpanData::from_span(attr.span, tcx.sess.codemap()),
+ }
+ }).collect()
+ }
+}
+
#[derive(Debug, RustcEncodable)]
pub struct CratePreludeData {
pub crate_name: String,
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::EnumData {
visibility: self.visibility,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::FunctionData {
parent: self.parent,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::MethodData {
parent: self.parent,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::ModData {
visibility: self.visibility,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::StructData {
visibility: self.visibility,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::StructVariantData {
parent: self.parent,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub visibility: Visibility,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::TraitData {
visibility: self.visibility,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub parent: Option<DefId>,
pub docs: String,
pub sig: Signature,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::TupleVariantData {
parent: self.parent,
docs: self.docs,
sig: self.sig.lower(tcx),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub parent: Option<DefId>,
pub docs: String,
pub sig: Option<Signature>,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::TypeDefData {
parent: self.parent,
docs: self.docs,
sig: self.sig.map(|s| s.lower(tcx)),
+ attributes: self.attributes.lower(tcx),
}
}
}
pub visibility: Visibility,
pub docs: String,
pub sig: Option<Signature>,
+ pub attributes: Vec<Attribute>,
}
impl Lower for data::VariableData {
visibility: self.visibility,
docs: self.docs,
sig: self.sig.map(|s| s.lower(tcx)),
+ attributes: self.attributes.lower(tcx),
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
};
if def.span.file_name != def.value {
// If the module is an out-of-line defintion, then we'll make the
decl_id: Option<Id>,
docs: String,
sig: Option<JsonSignature>,
+ attributes: Vec<Attribute>,
}
#[derive(Debug, RustcEncodable)]
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: data.decl_id.map(|id| From::from(id)),
docs: data.docs,
sig: Some(From::from(data.sig)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: None,
+ attributes: vec![],
}
}
}
decl_id: None,
docs: String::new(),
sig: data.sig.map(|s| From::from(s)),
+ attributes: data.attributes,
}
}
}
decl_id: None,
docs: data.docs,
sig: None,
+ attributes: data.attributes,
}
}
}
parent: None,
docs: docs_for_attrs(&item.attrs),
sig: self.sig_base(item),
+ attributes: item.attrs.clone(),
}))
}
ast::ItemKind::Static(ref typ, mt, ref expr) => {
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: Some(self.sig_base(item)),
+ attributes: item.attrs.clone(),
}))
}
ast::ItemKind::Const(ref typ, ref expr) => {
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: Some(self.sig_base(item)),
+ attributes: item.attrs.clone(),
}))
}
ast::ItemKind::Mod(ref m) => {
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: self.sig_base(item),
+ attributes: item.attrs.clone(),
}))
}
ast::ItemKind::Enum(ref def, _) => {
visibility: From::from(&item.vis),
docs: docs_for_attrs(&item.attrs),
sig: self.sig_base(item),
+ attributes: item.attrs.clone(),
}))
}
ast::ItemKind::Impl(.., ref trait_ref, ref typ, _) => {
visibility: From::from(&field.vis),
docs: docs_for_attrs(&field.attrs),
sig: Some(sig),
+ attributes: field.attrs.clone(),
})
} else {
None
name: ast::Name, span: Span) -> Option<FunctionData> {
// The qualname for a method is the trait name or name of the struct in an impl in
// which the method is declared in, followed by the method's name.
- let (qualname, parent_scope, decl_id, vis, docs) =
+ let (qualname, parent_scope, decl_id, vis, docs, attributes) =
match self.tcx.impl_of_method(self.tcx.hir.local_def_id(id)) {
Some(impl_id) => match self.tcx.hir.get_if_local(impl_id) {
Some(Node::NodeItem(item)) => {
(result, trait_id, decl_id,
From::from(&item.vis),
- docs_for_attrs(&item.attrs))
+ docs_for_attrs(&item.attrs),
+ item.attrs.to_vec())
}
_ => {
span_bug!(span,
(format!("::{}", self.tcx.item_path_str(def_id)),
Some(def_id), None,
From::from(&item.vis),
- docs_for_attrs(&item.attrs))
+ docs_for_attrs(&item.attrs),
+ item.attrs.to_vec())
}
r => {
span_bug!(span,
}
}
None => {
- span_bug!(span, "Could not find container for method {}", id);
+ debug!("Could not find container for method {} at {:?}", id, span);
+ // This is not necessarily a bug, if there was a compilation error, the tables
+ // we need might not exist.
+ return None;
}
},
};
parent: parent_scope,
docs: docs,
sig: sig,
+ attributes: attributes,
})
}
}
// After adding all files to the archive, we need to update the
- // symbol table of the archive. This currently dies on OSX (see
+ // symbol table of the archive. This currently dies on macOS (see
// #11162), and isn't necessary there anyway
if !sess.target.target.options.is_like_osx {
ab.update_symbols();
sess.abort_if_errors();
// Invoke the system linker
+ //
+ // Note that there's a terribly awful hack that really shouldn't be present
+ // in any compiler. Here an environment variable is supported to
+ // automatically retry the linker invocation if the linker looks like it
+ // segfaulted.
+ //
+ // Gee that seems odd, normally segfaults are things we want to know about!
+ // Unfortunately though in rust-lang/rust#38878 we're experiencing the
+ // linker segfaulting on Travis quite a bit which is causing quite a bit of
+ // pain to land PRs when they spuriously fail due to a segfault.
+ //
+ // The issue #38878 has some more debugging information on it as well, but
+ // this unfortunately looks like it's just a race condition in macOS's linker
+ // with some thread pool working in the background. It seems that no one
+ // currently knows a fix for this so in the meantime we're left with this...
info!("{:?}", &cmd);
- let prog = time(sess.time_passes(), "running linker", || cmd.output());
+ let retry_on_segfault = env::var("RUSTC_RETRY_LINKER_ON_SEGFAULT").is_ok();
+ let mut prog;
+ let mut i = 0;
+ loop {
+ i += 1;
+ prog = time(sess.time_passes(), "running linker", || cmd.output());
+ if !retry_on_segfault || i > 3 {
+ break
+ }
+ let output = match prog {
+ Ok(ref output) => output,
+ Err(_) => break,
+ };
+ if output.status.success() {
+ break
+ }
+ let mut out = output.stderr.clone();
+ out.extend(&output.stdout);
+ let out = String::from_utf8_lossy(&out);
+ let msg = "clang: error: unable to execute command: \
+ Segmentation fault: 11";
+ if !out.contains(msg) {
+ break
+ }
+
+ sess.struct_warn("looks like the linker segfaulted when we tried to \
+ call it, automatically retrying again")
+ .note(&format!("{:?}", cmd))
+ .note(&out)
+ .emit();
+ }
+
match prog {
Ok(prog) => {
fn escape_string(s: &[u8]) -> String {
}
- // On OSX, debuggers need this utility to get run to do some munging of
+ // On macOS, debuggers need this utility to get run to do some munging of
// the symbols
if sess.target.target.options.is_like_osx && sess.opts.debuginfo != NoDebugInfo {
match Command::new("dsymutil").arg(out_filename).output() {
.arg("-l").arg(lib)
.arg("-Wl,--no-whole-archive");
} else {
- // -force_load is the OSX equivalent of --whole-archive, but it
+ // -force_load is the macOS equivalent of --whole-archive, but it
// involves passing the full path to the library to link.
let mut v = OsString::from("-Wl,-force_load,");
v.push(&archive::find_library(lib, search_path, &self.sess));
// Follow C++ namespace-mangling style, see
// http://en.wikipedia.org/wiki/Name_mangling for more info.
//
- // It turns out that on OSX you can actually have arbitrary symbols in
+ // It turns out that on macOS you can actually have arbitrary symbols in
// function names (at least when given to LLVM), but this is not possible
// when using unix's linker. Perhaps one day when we just use a linker from LLVM
// we won't need to do this name mangling. The problem with name mangling is
use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::adjustment::CustomCoerceUnsized;
-use rustc::dep_graph::{DepNode, WorkProduct};
+use rustc::dep_graph::{AssertDepGraphSafe, DepNode, WorkProduct};
use rustc::hir::map as hir_map;
use rustc::util::common::time;
use session::config::{self, NoDebugInfo};
// Instantiate translation items without filling out definitions yet...
for ccx in crate_context_list.iter_need_trans() {
- let cgu = ccx.codegen_unit();
- let trans_items = cgu.items_in_deterministic_order(tcx, &symbol_map);
-
- tcx.dep_graph.with_task(cgu.work_product_dep_node(), || {
+ let dep_node = ccx.codegen_unit().work_product_dep_node();
+ tcx.dep_graph.with_task(dep_node,
+ ccx,
+ AssertDepGraphSafe(symbol_map.clone()),
+ trans_decl_task);
+
+ fn trans_decl_task<'a, 'tcx>(ccx: CrateContext<'a, 'tcx>,
+ symbol_map: AssertDepGraphSafe<Rc<SymbolMap<'tcx>>>) {
+ // FIXME(#40304): Instead of this, the symbol-map should be an
+ // on-demand thing that we compute.
+ let AssertDepGraphSafe(symbol_map) = symbol_map;
+ let cgu = ccx.codegen_unit();
+ let trans_items = cgu.items_in_deterministic_order(ccx.tcx(), &symbol_map);
for (trans_item, linkage) in trans_items {
trans_item.predefine(&ccx, linkage);
}
- });
+ }
}
// ... and now that we have everything pre-defined, fill out those definitions.
for ccx in crate_context_list.iter_need_trans() {
- let cgu = ccx.codegen_unit();
- let trans_items = cgu.items_in_deterministic_order(tcx, &symbol_map);
- tcx.dep_graph.with_task(cgu.work_product_dep_node(), || {
+ let dep_node = ccx.codegen_unit().work_product_dep_node();
+ tcx.dep_graph.with_task(dep_node,
+ ccx,
+ AssertDepGraphSafe(symbol_map.clone()),
+ trans_def_task);
+
+ fn trans_def_task<'a, 'tcx>(ccx: CrateContext<'a, 'tcx>,
+ symbol_map: AssertDepGraphSafe<Rc<SymbolMap<'tcx>>>) {
+ // FIXME(#40304): Instead of this, the symbol-map should be an
+ // on-demand thing that we compute.
+ let AssertDepGraphSafe(symbol_map) = symbol_map;
+ let cgu = ccx.codegen_unit();
+ let trans_items = cgu.items_in_deterministic_order(ccx.tcx(), &symbol_map);
for (trans_item, _) in trans_items {
trans_item.define(&ccx);
}
if ccx.sess().opts.debuginfo != NoDebugInfo {
debuginfo::finalize(&ccx);
}
- });
+ }
}
symbol_names_test::report_symbol_names(&shared_ccx);
use llvm;
use llvm::{ContextRef, ModuleRef, ValueRef};
-use rustc::dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig, WorkProduct};
+use rustc::dep_graph::{DepGraph, DepGraphSafe, DepNode, DepTrackingMap,
+ DepTrackingMapConfig, WorkProduct};
use middle::cstore::LinkMeta;
use rustc::hir;
use rustc::hir::def::ExportMap;
index: usize,
}
+impl<'a, 'tcx> DepGraphSafe for CrateContext<'a, 'tcx> {
+}
+
pub struct CrateContextIterator<'a, 'tcx: 'a> {
shared: &'a SharedCrateContext<'a, 'tcx>,
local_ccxs: &'a [LocalCrateContext<'tcx>],
llvm::LLVMRustDIBuilderFinalize(DIB(cx));
llvm::LLVMRustDIBuilderDispose(DIB(cx));
// Debuginfo generation in LLVM by default uses a higher
- // version of dwarf than OS X currently understands. We can
+ // version of dwarf than macOS currently understands. We can
// instruct LLVM to emit an older version of dwarf, however,
- // for OS X to understand. For more info see #11352
+ // for macOS to understand. For more info see #11352
// This can be overridden using --llvm-opts -dwarf-version,N.
// Android has the same issue (#22398)
if cx.sess().target.target.options.is_like_osx ||
self.trans_lvalue(bcx, dest)
};
if fn_ret_ty.is_indirect() {
- llargs.push(dest.llval);
- ReturnDest::Nothing
+ match dest.alignment {
+ Alignment::AbiAligned => {
+ llargs.push(dest.llval);
+ ReturnDest::Nothing
+ },
+ Alignment::Packed => {
+ // Currently, MIR code generation does not create calls
+ // that store directly to fields of packed structs (in
+ // fact, the calls it creates write only to temps),
+ //
+ // If someone changes that, please update this code path
+ // to create a temporary.
+ span_bug!(self.mir.span, "can't directly store to unaligned value");
+ }
+ }
} else {
ReturnDest::Store(dest.llval)
}
bcx.store(base::from_immediate(bcx, s), lldest, align);
}
OperandValue::Pair(a, b) => {
+ let f_align = match *bcx.ccx.layout_of(operand.ty) {
+ Layout::Univariant { ref variant, .. } if variant.packed => {
+ Some(1)
+ }
+ _ => align
+ };
+
let a = base::from_immediate(bcx, a);
let b = base::from_immediate(bcx, b);
- bcx.store(a, bcx.struct_gep(lldest, 0), align);
- bcx.store(b, bcx.struct_gep(lldest, 1), align);
+ bcx.store(a, bcx.struct_gep(lldest, 0), f_align);
+ bcx.store(b, bcx.struct_gep(lldest, 1), f_align);
}
}
}
use super::FnCtxt;
+use rustc::infer::InferOk;
use rustc::traits;
use rustc::ty::{self, Ty, TraitRef};
use rustc::ty::{ToPredicate, TypeFoldable};
pub fn finalize<'b, I>(self, pref: LvaluePreference, exprs: I)
where I: IntoIterator<Item = &'b hir::Expr>
+ {
+ let fcx = self.fcx;
+ fcx.register_infer_ok_obligations(self.finalize_as_infer_ok(pref, exprs));
+ }
+
+ pub fn finalize_as_infer_ok<'b, I>(self, pref: LvaluePreference, exprs: I)
+ -> InferOk<'tcx, ()>
+ where I: IntoIterator<Item = &'b hir::Expr>
{
let methods: Vec<_> = self.steps
.iter()
}
}
- for obligation in self.obligations {
- self.fcx.register_predicate(obligation);
+ InferOk {
+ value: (),
+ obligations: self.obligations
}
}
}
use rustc::hir;
use rustc::hir::def_id::DefId;
-use rustc::infer::{Coercion, InferOk, TypeTrace};
+use rustc::infer::{Coercion, InferResult, InferOk, TypeTrace};
+use rustc::infer::type_variable::TypeVariableOrigin;
use rustc::traits::{self, ObligationCause, ObligationCauseCode};
use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
use rustc::ty::{self, LvaluePreference, TypeAndMut,
use rustc::ty::subst::Subst;
use syntax::abi;
use syntax::feature_gate;
-use util::common::indent;
-use std::cell::RefCell;
use std::collections::VecDeque;
use std::ops::Deref;
fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
cause: ObligationCause<'tcx>,
use_lub: bool,
- unsizing_obligations: RefCell<Vec<traits::PredicateObligation<'tcx>>>,
}
impl<'a, 'gcx, 'tcx> Deref for Coerce<'a, 'gcx, 'tcx> {
}
}
-type CoerceResult<'tcx> = RelateResult<'tcx, (Ty<'tcx>, Adjust<'tcx>)>;
+type CoerceResult<'tcx> = InferResult<'tcx, Adjustment<'tcx>>;
fn coerce_mutbls<'tcx>(from_mutbl: hir::Mutability,
to_mutbl: hir::Mutability)
}
}
+fn identity<'tcx>() -> Adjust<'tcx> {
+ Adjust::DerefRef {
+ autoderefs: 0,
+ autoref: None,
+ unsize: false,
+ }
+}
+
+fn success<'tcx>(kind: Adjust<'tcx>,
+ target: Ty<'tcx>,
+ obligations: traits::PredicateObligations<'tcx>)
+ -> CoerceResult<'tcx> {
+ Ok(InferOk {
+ value: Adjustment {
+ kind,
+ target
+ },
+ obligations
+ })
+}
+
impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
fn new(fcx: &'f FnCtxt<'f, 'gcx, 'tcx>, cause: ObligationCause<'tcx>) -> Self {
Coerce {
fcx: fcx,
cause: cause,
use_lub: false,
- unsizing_obligations: RefCell::new(vec![]),
}
}
- fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
+ fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
self.commit_if_ok(|_| {
let trace = TypeTrace::types(&self.cause, false, a, b);
if self.use_lub {
self.lub(false, trace, &a, &b)
- .map(|ok| self.register_infer_ok_obligations(ok))
} else {
self.sub(false, trace, &a, &b)
- .map(|InferOk { value, obligations }| {
- self.fcx.register_predicates(obligations);
- value
- })
}
})
}
- /// Unify two types (using sub or lub) and produce a noop coercion.
- fn unify_and_identity(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> {
- self.unify(&a, &b).and_then(|ty| self.identity(ty))
- }
-
- /// Synthesize an identity adjustment.
- fn identity(&self, ty: Ty<'tcx>) -> CoerceResult<'tcx> {
- Ok((ty, Adjust::DerefRef {
- autoderefs: 0,
- autoref: None,
- unsize: false,
- }))
+ /// Unify two types (using sub or lub) and produce a specific coercion.
+ fn unify_and(&self, a: Ty<'tcx>, b: Ty<'tcx>, kind: Adjust<'tcx>)
+ -> CoerceResult<'tcx> {
+ self.unify(&a, &b).and_then(|InferOk { value: ty, obligations }| {
+ success(kind, ty, obligations)
+ })
}
fn coerce<'a, E, I>(&self, exprs: &E, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx>
// Just ignore error types.
if a.references_error() || b.references_error() {
- return self.identity(b);
+ return success(identity(), b, vec![]);
}
if a.is_never() {
- return Ok((b, Adjust::NeverToAny));
+ return success(Adjust::NeverToAny, b, vec![]);
}
// Consider coercing the subtype to a DST
}
_ => {
// Otherwise, just use unification rules.
- self.unify_and_identity(a, b)
+ self.unify_and(a, b, identity())
}
}
}
coerce_mutbls(mt_a.mutbl, mt_b.mutbl)?;
(r_a, mt_a)
}
- _ => return self.unify_and_identity(a, b),
+ _ => return self.unify_and(a, b, identity()),
};
let span = self.cause.span;
let mut first_error = None;
let mut r_borrow_var = None;
let mut autoderef = self.autoderef(span, a);
- let mut success = None;
+ let mut found = None;
for (referent_ty, autoderefs) in autoderef.by_ref() {
if autoderefs == 0 {
mutbl: mt_b.mutbl, // [1] above
});
match self.unify(derefd_ty_a, b) {
- Ok(ty) => {
- success = Some((ty, autoderefs));
+ Ok(ok) => {
+ found = Some((ok, autoderefs));
break;
}
Err(err) => {
// (e.g., in example above, the failure from relating `Vec<T>`
// to the target type), since that should be the least
// confusing.
- let (ty, autoderefs) = match success {
+ let (InferOk { value: ty, mut obligations }, autoderefs) = match found {
Some(d) => d,
None => {
let err = first_error.expect("coerce_borrowed_pointer had no error");
}
};
- // This commits the obligations to the fulfillcx. After this succeeds,
- // this snapshot can't be rolled back.
- autoderef.finalize(LvaluePreference::from_mutbl(mt_b.mutbl), exprs());
-
- // Now apply the autoref. We have to extract the region out of
- // the final ref type we got.
if ty == a && mt_a.mutbl == hir::MutImmutable && autoderefs == 1 {
// As a special case, if we would produce `&'a *x`, that's
// a total no-op. We end up with the type `&'a T` just as
// `self.x`, but we auto-coerce it to `foo(&mut *self.x)`,
// which is a borrow.
assert_eq!(mt_b.mutbl, hir::MutImmutable); // can only coerce &T -> &U
- return self.identity(ty);
+ return success(identity(), ty, obligations);
}
+
+ // Now apply the autoref. We have to extract the region out of
+ // the final ref type we got.
let r_borrow = match ty.sty {
ty::TyRef(r_borrow, _) => r_borrow,
_ => span_bug!(span, "expected a ref type, got {:?}", ty),
ty,
autoderefs,
autoref);
- Ok((ty, Adjust::DerefRef {
+
+ let pref = LvaluePreference::from_mutbl(mt_b.mutbl);
+ obligations.extend(autoderef.finalize_as_infer_ok(pref, exprs()).obligations);
+
+ success(Adjust::DerefRef {
autoderefs: autoderefs,
autoref: autoref,
unsize: false,
- }))
+ }, ty, obligations)
}
}
_ => (source, None),
};
- let source = source.adjust_for_autoref(self.tcx, reborrow);
+ let coerce_source = source.adjust_for_autoref(self.tcx, reborrow);
+
+ let adjust = Adjust::DerefRef {
+ autoderefs: if reborrow.is_some() { 1 } else { 0 },
+ autoref: reborrow,
+ unsize: true,
+ };
+
+ // Setup either a subtyping or a LUB relationship between
+ // the `CoerceUnsized` target type and the expected type.
+ // We only have the latter, so we use an inference variable
+ // for the former and let type inference do the rest.
+ let origin = TypeVariableOrigin::MiscVariable(self.cause.span);
+ let coerce_target = self.next_ty_var(origin);
+ let mut coercion = self.unify_and(coerce_target, target, adjust)?;
let mut selcx = traits::SelectionContext::new(self);
// Use a FIFO queue for this custom fulfillment procedure.
let mut queue = VecDeque::new();
- let mut leftover_predicates = vec![];
// Create an obligation for `Source: CoerceUnsized<Target>`.
let cause = ObligationCause::misc(self.cause.span, self.body_id);
queue.push_back(self.tcx
- .predicate_for_trait_def(cause, coerce_unsized_did, 0, source, &[target]));
+ .predicate_for_trait_def(cause, coerce_unsized_did, 0,
+ coerce_source, &[coerce_target]));
// Keep resolving `CoerceUnsized` and `Unsize` predicates to avoid
// emitting a coercion in cases like `Foo<$1>` -> `Foo<$2>`, where
let trait_ref = match obligation.predicate {
ty::Predicate::Trait(ref tr) if traits.contains(&tr.def_id()) => tr.clone(),
_ => {
- leftover_predicates.push(obligation);
+ coercion.obligations.push(obligation);
continue;
}
};
}
}
- *self.unsizing_obligations.borrow_mut() = leftover_predicates;
-
- let adjustment = Adjust::DerefRef {
- autoderefs: if reborrow.is_some() { 1 } else { 0 },
- autoref: reborrow,
- unsize: true,
- };
- debug!("Success, coerced with {:?}", adjustment);
- Ok((target, adjustment))
+ Ok(coercion)
}
fn coerce_from_safe_fn(&self,
a: Ty<'tcx>,
fn_ty_a: ty::PolyFnSig<'tcx>,
- b: Ty<'tcx>)
+ b: Ty<'tcx>,
+ to_unsafe: Adjust<'tcx>,
+ normal: Adjust<'tcx>)
-> CoerceResult<'tcx> {
if let ty::TyFnPtr(fn_ty_b) = b.sty {
match (fn_ty_a.unsafety(), fn_ty_b.unsafety()) {
(hir::Unsafety::Normal, hir::Unsafety::Unsafe) => {
let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a);
- return self.unify_and_identity(unsafe_a, b)
- .map(|(ty, _)| (ty, Adjust::UnsafeFnPointer));
+ return self.unify_and(unsafe_a, b, to_unsafe);
}
_ => {}
}
}
- self.unify_and_identity(a, b)
+ self.unify_and(a, b, normal)
}
fn coerce_from_fn_pointer(&self,
let b = self.shallow_resolve(b);
debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b);
- self.coerce_from_safe_fn(a, fn_ty_a, b)
+ self.coerce_from_safe_fn(a, fn_ty_a, b,
+ Adjust::UnsafeFnPointer, identity())
}
fn coerce_from_fn_item(&self,
match b.sty {
ty::TyFnPtr(_) => {
let a_fn_pointer = self.tcx.mk_fn_ptr(fn_ty_a);
- self.coerce_from_safe_fn(a_fn_pointer, fn_ty_a, b)
- .map(|(ty, _)| (ty, Adjust::ReifyFnPointer))
+ self.coerce_from_safe_fn(a_fn_pointer, fn_ty_a, b,
+ Adjust::ReifyFnPointer, Adjust::ReifyFnPointer)
}
- _ => self.unify_and_identity(a, b),
+ _ => self.unify_and(a, b, identity()),
}
}
self.cause.span,
feature_gate::GateIssue::Language,
feature_gate::CLOSURE_TO_FN_COERCION);
- return self.unify_and_identity(a, b);
+ return self.unify_and(a, b, identity());
}
// We coerce the closure, which has fn type
// `extern "rust-call" fn((arg0,arg1,...)) -> _`
let pointer_ty = self.tcx.mk_fn_ptr(converted_sig);
debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})",
a, b, pointer_ty);
- self.unify_and_identity(pointer_ty, b)
- .map(|(ty, _)| (ty, Adjust::ClosureFnPointer))
+ self.unify_and(pointer_ty, b, Adjust::ClosureFnPointer)
}
- _ => self.unify_and_identity(a, b),
+ _ => self.unify_and(a, b, identity()),
}
}
ty::TyRef(_, mt) => (true, mt),
ty::TyRawPtr(mt) => (false, mt),
_ => {
- return self.unify_and_identity(a, b);
+ return self.unify_and(a, b, identity());
}
};
mutbl: mutbl_b,
ty: mt_a.ty,
});
- let (ty, noop) = self.unify_and_identity(a_unsafe, b)?;
coerce_mutbls(mt_a.mutbl, mutbl_b)?;
-
// Although references and unsafe ptrs have the same
// representation, we still register an Adjust::DerefRef so that
// regionck knows that the region for `a` must be valid here.
- Ok((ty,
- if is_ref {
- Adjust::DerefRef {
- autoderefs: 1,
- autoref: Some(AutoBorrow::RawPtr(mutbl_b)),
- unsize: false,
- }
- } else if mt_a.mutbl != mutbl_b {
- Adjust::MutToConstPointer
- } else {
- noop
- }))
- }
-}
-
-fn apply<'a, 'b, 'gcx, 'tcx, E, I>(coerce: &mut Coerce<'a, 'gcx, 'tcx>,
- exprs: &E,
- a: Ty<'tcx>,
- b: Ty<'tcx>)
- -> RelateResult<'tcx, Adjustment<'tcx>>
- where E: Fn() -> I,
- I: IntoIterator<Item = &'b hir::Expr>
-{
-
- let (ty, adjust) = indent(|| coerce.coerce(exprs, a, b))?;
-
- let fcx = coerce.fcx;
- if let Adjust::DerefRef { unsize: true, .. } = adjust {
- let mut obligations = coerce.unsizing_obligations.borrow_mut();
- for obligation in obligations.drain(..) {
- fcx.register_predicate(obligation);
- }
+ self.unify_and(a_unsafe, b, if is_ref {
+ Adjust::DerefRef {
+ autoderefs: 1,
+ autoref: Some(AutoBorrow::RawPtr(mutbl_b)),
+ unsize: false,
+ }
+ } else if mt_a.mutbl != mutbl_b {
+ Adjust::MutToConstPointer
+ } else {
+ identity()
+ })
}
-
- Ok(Adjustment {
- kind: adjust,
- target: ty
- })
}
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target);
let cause = self.cause(expr.span, ObligationCauseCode::ExprAssignable);
- let mut coerce = Coerce::new(self, cause);
+ let coerce = Coerce::new(self, cause);
self.commit_if_ok(|_| {
- let adjustment = apply(&mut coerce, &|| Some(expr), source, target)?;
+ let ok = coerce.coerce(&|| Some(expr), source, target)?;
+ let adjustment = self.register_infer_ok_obligations(ok);
if !adjustment.is_identity() {
debug!("Success, coerced with {:?}", adjustment);
match self.tables.borrow().adjustments.get(&expr.id) {
// but only if the new expression has no coercion already applied to it.
let mut first_error = None;
if !self.tables.borrow().adjustments.contains_key(&new.id) {
- let result = self.commit_if_ok(|_| apply(&mut coerce, &|| Some(new), new_ty, prev_ty));
+ let result = self.commit_if_ok(|_| coerce.coerce(&|| Some(new), new_ty, prev_ty));
match result {
- Ok(adjustment) => {
+ Ok(ok) => {
+ let adjustment = self.register_infer_ok_obligations(ok);
if !adjustment.is_identity() {
self.write_adjustment(new.id, adjustment);
}
}
}
- match self.commit_if_ok(|_| apply(&mut coerce, &exprs, prev_ty, new_ty)) {
+ match self.commit_if_ok(|_| coerce.coerce(&exprs, prev_ty, new_ty)) {
Err(_) => {
// Avoid giving strange errors on failed attempts.
if let Some(e) = first_error {
})
}
}
- Ok(adjustment) => {
+ Ok(ok) => {
+ let adjustment = self.register_infer_ok_obligations(ok);
if !adjustment.is_identity() {
let mut tables = self.tables.borrow_mut();
for expr in exprs() {
}
pub fn check_item_bodies<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> CompileResult {
- tcx.sess.track_errors(|| {
- tcx.dep_graph.with_task(DepNode::TypeckBodiesKrate, || {
- tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
- tcx.item_tables(body_owner_def_id);
- });
+ return tcx.sess.track_errors(|| {
+ tcx.dep_graph.with_task(DepNode::TypeckBodiesKrate, tcx, (), check_item_bodies_task);
+ });
+
+ fn check_item_bodies_task<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (): ()) {
+ tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
+ tcx.item_tables(body_owner_def_id);
});
- })
+ }
}
pub fn provide(providers: &mut Providers) {
/// 4. This is added by the code in `visit_expr` when we write to `item_types`.
/// 5. This is added by the code in `convert_item` when we write to `item_types`;
/// note that this write occurs inside the `CollectItemSig` task.
- /// 6. Added by explicit `read` below
- fn with_collect_item_sig<OP>(&self, id: ast::NodeId, op: OP)
- where OP: FnOnce()
- {
+ /// 6. Added by reads from within `op`.
+ fn with_collect_item_sig(&self, id: ast::NodeId, op: fn(TyCtxt<'a, 'tcx, 'tcx>, ast::NodeId)) {
let def_id = self.tcx.hir.local_def_id(id);
- self.tcx.dep_graph.with_task(DepNode::CollectItemSig(def_id), || {
- self.tcx.hir.read(id);
- op();
- });
+ self.tcx.dep_graph.with_task(DepNode::CollectItemSig(def_id), self.tcx, id, op);
}
}
}
fn visit_item(&mut self, item: &'tcx hir::Item) {
- self.with_collect_item_sig(item.id, || convert_item(self.tcx, item));
+ self.with_collect_item_sig(item.id, convert_item);
intravisit::walk_item(self, item);
}
}
fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {
- self.with_collect_item_sig(trait_item.id, || {
- convert_trait_item(self.tcx, trait_item)
- });
+ self.with_collect_item_sig(trait_item.id, convert_trait_item);
intravisit::walk_trait_item(self, trait_item);
}
fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {
- self.with_collect_item_sig(impl_item.id, || {
- convert_impl_item(self.tcx, impl_item)
- });
+ self.with_collect_item_sig(impl_item.id, convert_impl_item);
intravisit::walk_impl_item(self, impl_item);
}
}
}
}
-fn convert_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, it: &hir::Item) {
+fn convert_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_id: ast::NodeId) {
+ let it = tcx.hir.expect_item(item_id);
debug!("convert: item {} with id {}", it.name, it.id);
- let def_id = tcx.hir.local_def_id(it.id);
+ let def_id = tcx.hir.local_def_id(item_id);
match it.node {
// These don't define types.
hir::ItemExternCrate(_) | hir::ItemUse(..) | hir::ItemMod(_) => {
}
}
-fn convert_trait_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_item: &hir::TraitItem) {
+fn convert_trait_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_item_id: ast::NodeId) {
+ let trait_item = tcx.hir.expect_trait_item(trait_item_id);
let def_id = tcx.hir.local_def_id(trait_item.id);
tcx.item_generics(def_id);
tcx.item_predicates(def_id);
}
-fn convert_impl_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_item: &hir::ImplItem) {
- let def_id = tcx.hir.local_def_id(impl_item.id);
+fn convert_impl_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_item_id: ast::NodeId) {
+ let def_id = tcx.hir.local_def_id(impl_item_id);
tcx.item_generics(def_id);
tcx.item_type(def_id);
tcx.item_predicates(def_id);
/// Used when rendering a `ResolvedPath` structure. This invokes the `path`
/// rendering function with the necessary arguments for linking to a local path.
fn resolved_path(w: &mut fmt::Formatter, did: DefId, path: &clean::Path,
- print_all: bool, use_absolute: bool) -> fmt::Result {
+ print_all: bool, use_absolute: bool, is_not_debug: bool) -> fmt::Result {
let last = path.segments.last().unwrap();
let rel_root = match &*path.segments[0].name {
"self" => Some("./".to_string()),
} else {
root.push_str(&seg.name);
root.push_str("/");
- write!(w, "<a class=\"mod\"
- href=\"{}index.html\">{}</a>::",
- root,
- seg.name)?;
+ if is_not_debug {
+ write!(w, "<a class=\"mod\"
+ href=\"{}index.html\">{}</a>::",
+ root,
+ seg.name)?;
+ } else {
+ write!(w, "{}::", seg.name)?;
+ }
}
}
}
}
}
if w.alternate() {
- write!(w, "{:#}{:#}", HRef::new(did, &last.name), last.params)?;
+ if is_not_debug {
+ write!(w, "{:#}{:#}", HRef::new(did, &last.name), last.params)?;
+ } else {
+ write!(w, "{:?}{:?}", HRef::new(did, &last.name), last.params)?;
+ }
} else {
- let path = if use_absolute {
- match href(did) {
- Some((_, _, fqp)) => format!("{}::{}",
- fqp[..fqp.len()-1].join("::"),
- HRef::new(did, fqp.last().unwrap())),
- None => format!("{}", HRef::new(did, &last.name)),
- }
+ if is_not_debug {
+ let path = if use_absolute {
+ match href(did) {
+ Some((_, _, fqp)) => format!("{}::{}",
+ fqp[..fqp.len()-1].join("::"),
+ HRef::new(did, fqp.last().unwrap())),
+ None => format!("{}", HRef::new(did, &last.name)),
+ }
+ } else {
+ format!("{}", HRef::new(did, &last.name))
+ };
+ write!(w, "{}{}", path, last.params)?;
} else {
- format!("{}", HRef::new(did, &last.name))
- };
- write!(w, "{}{}", path, last.params)?;
+ let path = if use_absolute {
+ match href(did) {
+ Some((_, _, fqp)) => format!("{:?}::{:?}",
+ fqp[..fqp.len()-1].join("::"),
+ HRef::new(did, fqp.last().unwrap())),
+ None => format!("{:?}", HRef::new(did, &last.name)),
+ }
+ } else {
+ format!("{:?}", HRef::new(did, &last.name))
+ };
+ write!(w, "{}{:?}", path, last.params)?;
+ }
}
Ok(())
}
}
}
+impl<'a> fmt::Debug for HRef<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", self.text)
+ }
+}
+
fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool,
is_not_debug: bool) -> fmt::Result {
match *t {
}
clean::ResolvedPath{ did, ref typarams, ref path, is_generic } => {
// Paths like T::Output and Self::Output should be rendered with all segments
- resolved_path(f, did, path, is_generic, use_absolute)?;
+ resolved_path(f, did, path, is_generic, use_absolute, is_not_debug)?;
tybounds(f, typarams)
}
clean::Infer => write!(f, "_"),
write!(f, "{}::", self_type)?;
}
let path = clean::Path::singleton(name.clone());
- resolved_path(f, did, &path, true, use_absolute)?;
+ resolved_path(f, did, &path, true, use_absolute, is_not_debug)?;
// FIXME: `typarams` are not rendered, and this seems bad?
drop(typarams);
impl fmt::Display for clean::ImportSource {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.did {
- Some(did) => resolved_path(f, did, &self.path, true, false),
+ Some(did) => resolved_path(f, did, &self.path, true, false, true),
_ => {
for (i, seg) in self.path.segments.iter().enumerate() {
if i > 0 {
// going on). If they're in different crates then the crate defining
// the trait will be interested in our implementation.
if imp.def_id.krate == did.krate { continue }
- write!(implementors, r#""{}","#, imp.impl_).unwrap();
+ write!(implementors, "{},", as_json(&imp.impl_.to_string())).unwrap();
}
implementors.push_str("];");
position: absolute;
left: 0;
top: 0;
- min-height: 100vh;
+ min-height: 100%;
}
.sidebar .current {
.content .method .where,
.content .fn .where,
.content .where.fmt-newline {
- display: block;
+ display: block;
}
/* Bit of whitespace to indent it */
.content .method .where::before,
.content .fn .where::before,
.content .where.fmt-newline::before {
- content: ' ';
+ content: ' ';
}
.content .methods > div { margin-left: 40px; }
}
#help > div {
flex: 0 0 auto;
- background: #e9e9e9;
box-shadow: 0 0 6px rgba(0,0,0,.2);
width: 550px;
height: 330px;
- border: 1px solid #bfbfbf;
+ border: 1px solid;
}
#help dt {
float: left;
border-radius: 4px;
- border: 1px solid #bfbfbf;
- background: #fff;
+ border: 1px solid;
width: 23px;
text-align: center;
clear: left;
.since {
font-weight: normal;
font-size: initial;
- color: grey;
position: absolute;
right: 0;
top: 0;
padding-right: 0px;
}
-.line-numbers :target { background-color: transparent; }
-
-/* Code highlighting */
-pre.rust .kw { color: #8959A8; }
-pre.rust .kw-2, pre.rust .prelude-ty { color: #4271AE; }
-pre.rust .number, pre.rust .string { color: #718C00; }
-pre.rust .self, pre.rust .bool-val, pre.rust .prelude-val,
-pre.rust .attribute, pre.rust .attribute .ident { color: #C82829; }
-pre.rust .macro, pre.rust .macro-nonterminal { color: #3E999F; }
-pre.rust .lifetime { color: #B76514; }
pre.rust .question-mark {
- color: #ff9011;
font-weight: bold;
}
pre.rust { position: relative; }
a.test-arrow {
- background-color: rgba(78, 139, 202, 0.2);
display: inline-block;
position: absolute;
padding: 5px 10px 5px 10px;
right: 5px;
}
a.test-arrow:hover{
- background-color: #4e8bca;
text-decoration: none;
}
text-align: center;
}
-.toggle-label {
- color: #999;
-}
-
.ghost {
display: none;
}
}
:target > code {
- background: #FDFFD3;
- opacity: 1;
+ opacity: 1;
}
/* Media Queries */
nav.sub, .content .out-of-band, .collapse-toggle {
display: none;
}
-}
+}
\ No newline at end of file
/* General structure and fonts */
body {
- background-color: white;
- color: black;
+ background-color: white;
+ color: black;
}
h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {
- color: black;
+ color: black;
}
h1.fqn {
- border-bottom-color: #D5D5D5;
+ border-bottom-color: #D5D5D5;
}
h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {
- border-bottom-color: #DDDDDD;
+ border-bottom-color: #DDDDDD;
}
.in-band {
- background-color: white;
+ background-color: white;
}
.docblock code, .docblock-short code {
- background-color: #F5F5F5;
+ background-color: #F5F5F5;
}
pre {
- background-color: #F5F5F5;
+ background-color: #F5F5F5;
+}
+
+.sidebar {
+ background-color: #F1F1F1;
+}
+
+.sidebar .current {
+ background-color: #fff;
}
.sidebar {
}
.sidebar .location {
- border-color: #000;
- background-color: #fff;
- color: #333;
+ border-color: #000;
+ background-color: #fff;
+ color: #333;
}
.block a:hover {
- background: #F5F5F5;
+ background: #F5F5F5;
}
.line-numbers span { color: #c67e2d; }
.line-numbers .line-highlighted {
- background-color: #f6fdb0 !important;
+ background-color: #f6fdb0 !important;
}
:target { background: #FDFFD3; }
.content .highlighted {
- color: #000 !important;
- background-color: #ccc;
+ color: #000 !important;
+ background-color: #ccc;
}
.content .highlighted a, .content .highlighted span { color: #000 !important; }
.content .highlighted.trait { background-color: #fece7e; }
.content .highlighted.type { background-color: #c6afb3; }
.docblock h1, .docblock h2, .docblock h3, .docblock h4, .docblock h5 {
- border-bottom-color: #DDD;
+ border-bottom-color: #DDD;
}
.docblock table {
- border-color: #ddd;
+ border-color: #ddd;
}
.docblock table td {
- border-top-color: #ddd;
- border-bottom-color: #ddd;
+ border-top-color: #ddd;
+ border-bottom-color: #ddd;
}
.docblock table th {
- border-top-color: #ddd;
- border-bottom-color: #ddd;
+ border-top-color: #ddd;
+ border-bottom-color: #ddd;
}
.content span.primitive, .content a.primitive, .block a.current.primitive { color: #39a7bf; }
pre.rust .doccomment { color: #4D4D4C; }
nav {
- border-bottom-color: #e0e0e0;
+ border-bottom-color: #e0e0e0;
}
nav.main .current {
- border-top-color: #000;
- border-bottom-color: #000;
+ border-top-color: #000;
+ border-bottom-color: #000;
}
nav.main .separator {
- border: 1px solid #000;
+ border: 1px solid #000;
}
a {
- color: #000;
+ color: #000;
}
.docblock a, .docblock-short a, .stability a {
- color: #3873AD;
+ color: #3873AD;
}
a.test-arrow {
- color: #f5f5f5;
+ color: #f5f5f5;
}
.content span.trait, .content a.trait, .block a.current.trait { color: #7c5af3; }
.search-input {
- color: #555;
- box-shadow: 0 0 0 1px #e0e0e0, 0 0 0 2px transparent;
- background-color: white;
+ color: #555;
+ box-shadow: 0 0 0 1px #e0e0e0, 0 0 0 2px transparent;
+ background-color: white;
}
.stab.unstable { background: #FFF5D6; border-color: #FFC600; }
.stab.deprecated { background: #F3DFFF; border-color: #7F0087; }
+
+#help > div {
+ background: #e9e9e9;
+ border-color: #bfbfbf;;
+}
+
+#help dt {
+ border-color: #bfbfbf;
+ background: #fff;
+}
+
+.since {
+ color: grey;
+}
+
+.line-numbers :target { background-color: transparent; }
+
+/* Code highlighting */
+pre.rust .kw { color: #8959A8; }
+pre.rust .kw-2, pre.rust .prelude-ty { color: #4271AE; }
+pre.rust .number, pre.rust .string { color: #718C00; }
+pre.rust .self, pre.rust .bool-val, pre.rust .prelude-val,
+pre.rust .attribute, pre.rust .attribute .ident { color: #C82829; }
+pre.rust .macro, pre.rust .macro-nonterminal { color: #3E999F; }
+pre.rust .lifetime { color: #B76514; }
+pre.rust .question-mark {
+ color: #ff9011;
+}
+
+a.test-arrow {
+ background-color: rgba(78, 139, 202, 0.2);
+}
+
+a.test-arrow:hover{
+ background-color: #4e8bca;
+}
+
+.toggle-label {
+ color: #999;
+}
+
+:target > code {
+ background: #FDFFD3;
+}
\ No newline at end of file
/// Load a plugin with the given name.
///
/// Turns `name` into the proper dynamic library filename for the given
- /// platform. On windows, it turns into name.dll, on OS X, name.dylib, and
+ /// platform. On windows, it turns into name.dll, on macOS, name.dylib, and
/// elsewhere, libname.so.
pub fn load_plugin(&mut self, name: String) {
let x = self.prefix.join(libname(name));
use syntax::abi;
use syntax::ast;
use syntax::attr;
+use syntax::tokenstream::TokenStream;
use syntax_pos::Span;
use rustc::hir::map as hir_map;
}
let imported_from = self.cx.sess().cstore.original_crate_name(def_id.krate);
let def = match self.cx.sess().cstore.load_macro(def_id, self.cx.sess()) {
- LoadedMacro::MacroRules(macro_rules) => macro_rules,
+ LoadedMacro::MacroDef(macro_def) => macro_def,
// FIXME(jseyfried): document proc macro reexports
LoadedMacro::ProcMacro(..) => continue,
};
- // FIXME(jseyfried) merge with `self.visit_macro()`
- let tts = def.stream().trees().collect::<Vec<_>>();
- let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
+ let matchers = if let ast::ItemKind::MacroDef(ref tokens) = def.node {
+ let tts: Vec<_> = TokenStream::from(tokens.clone()).into_trees().collect();
+ tts.chunks(4).map(|arm| arm[0].span()).collect()
+ } else {
+ unreachable!()
+ };
om.macros.push(Macro {
def_id: def_id,
attrs: def.attrs.clone().into(),
use hash::{Hash, Hasher, BuildHasher, SipHasher13};
use iter::{FromIterator, FusedIterator};
use mem::{self, replace};
-use ops::{Deref, Index};
+use ops::{Deref, Index, InPlace, Place, Placer};
use rand::{self, Rng};
+use ptr;
use super::table::{self, Bucket, EmptyBucket, FullBucket, FullBucketMut, RawTable, SafeHash};
use super::table::BucketState::{Empty, Full};
mut hash: SafeHash,
mut key: K,
mut val: V)
- -> &'a mut V {
+ -> FullBucketMut<'a, K, V> {
let start_index = bucket.index();
let size = bucket.table().size();
// Save the *starting point*.
// bucket, which is a FullBucket on top of a
// FullBucketMut, into just one FullBucketMut. The "table"
// refers to the inner FullBucketMut in this context.
- return bucket.into_table().into_mut_refs().1;
+ return bucket.into_table();
}
Full(bucket) => bucket,
};
}
}
+/// A place for insertion to a `Entry`.
+///
+/// See [`HashMap::entry`](struct.HashMap.html#method.entry) for details.
+#[must_use = "places do nothing unless written to with `<-` syntax"]
+#[unstable(feature = "collection_placement",
+ reason = "struct name and placement protocol is subject to change",
+ issue = "30172")]
+pub struct EntryPlace<'a, K: 'a, V: 'a> {
+ bucket: FullBucketMut<'a, K, V>,
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "struct name and placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, K: 'a + Debug, V: 'a + Debug> Debug for EntryPlace<'a, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("EntryPlace")
+ .field("key", self.bucket.read().0)
+ .field("value", self.bucket.read().1)
+ .finish()
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "struct name and placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, K, V> Drop for EntryPlace<'a, K, V> {
+ fn drop(&mut self) {
+ // Inplacement insertion failed. Only key need to drop.
+ // The value is failed to insert into map.
+ unsafe { self.bucket.remove_key() };
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, K, V> Placer<V> for Entry<'a, K, V> {
+ type Place = EntryPlace<'a, K, V>;
+
+ fn make_place(self) -> EntryPlace<'a, K, V> {
+ let b = match self {
+ Occupied(mut o) => {
+ unsafe { ptr::drop_in_place(o.elem.read_mut().1); }
+ o.elem
+ }
+ Vacant(v) => {
+ unsafe { v.insert_key() }
+ }
+ };
+ EntryPlace { bucket: b }
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, K, V> Place<V> for EntryPlace<'a, K, V> {
+ fn pointer(&mut self) -> *mut V {
+ self.bucket.read_mut().1
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, K, V> InPlace<V> for EntryPlace<'a, K, V> {
+ type Owner = ();
+
+ unsafe fn finalize(self) {
+ mem::forget(self);
+ }
+}
+
impl<'a, K, V> Entry<'a, K, V> {
#[stable(feature = "rust1", since = "1.0.0")]
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(self, value: V) -> &'a mut V {
- match self.elem {
+ let b = match self.elem {
NeqElem(mut bucket, disp) => {
if disp >= DISPLACEMENT_THRESHOLD {
bucket.table_mut().set_tag(true);
if disp >= DISPLACEMENT_THRESHOLD {
bucket.table_mut().set_tag(true);
}
- bucket.put(self.hash, self.key, value).into_mut_refs().1
+ bucket.put(self.hash, self.key, value)
+ },
+ };
+ b.into_mut_refs().1
+ }
+
+ // Only used for InPlacement insert. Avoid unnecessary value copy.
+ // The value remains uninitialized.
+ unsafe fn insert_key(self) -> FullBucketMut<'a, K, V> {
+ match self.elem {
+ NeqElem(mut bucket, disp) => {
+ if disp >= DISPLACEMENT_THRESHOLD {
+ bucket.table_mut().set_tag(true);
+ }
+ let uninit = mem::uninitialized();
+ robin_hood(bucket, disp, self.hash, self.key, uninit)
+ },
+ NoElem(mut bucket, disp) => {
+ if disp >= DISPLACEMENT_THRESHOLD {
+ bucket.table_mut().set_tag(true);
+ }
+ bucket.put_key(self.hash, self.key)
},
}
}
use super::RandomState;
use cell::RefCell;
use rand::{thread_rng, Rng};
+ use panic;
#[test]
fn test_zero_capacities() {
}
panic!("Adaptive early resize failed");
}
+
+ #[test]
+ fn test_placement_in() {
+ let mut map = HashMap::new();
+ map.extend((0..10).map(|i| (i, i)));
+
+ map.entry(100) <- 100;
+ assert_eq!(map[&100], 100);
+
+ map.entry(0) <- 10;
+ assert_eq!(map[&0], 10);
+
+ assert_eq!(map.len(), 11);
+ }
+
+ #[test]
+ fn test_placement_panic() {
+ let mut map = HashMap::new();
+ map.extend((0..10).map(|i| (i, i)));
+
+ fn mkpanic() -> usize { panic!() }
+
+ // modify existing key
+ // when panic happens, previous key is removed.
+ let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| { map.entry(0) <- mkpanic(); }));
+ assert_eq!(map.len(), 9);
+ assert!(!map.contains_key(&0));
+
+ // add new key
+ let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| { map.entry(100) <- mkpanic(); }));
+ assert_eq!(map.len(), 9);
+ assert!(!map.contains_key(&100));
+ }
+
+ #[test]
+ fn test_placement_drop() {
+ // correctly drop
+ struct TestV<'a>(&'a mut bool);
+ impl<'a> Drop for TestV<'a> {
+ fn drop(&mut self) {
+ if !*self.0 { panic!("value double drop!"); } // no double drop
+ *self.0 = false;
+ }
+ }
+
+ fn makepanic<'a>() -> TestV<'a> { panic!() }
+
+ let mut can_drop = true;
+ let mut hm = HashMap::new();
+ hm.insert(0, TestV(&mut can_drop));
+ let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| { hm.entry(0) <- makepanic(); }));
+ assert_eq!(hm.len(), 0);
+ }
}
table: self.table,
}
}
+
+ /// Puts given key, remain value uninitialized.
+ /// It is only used for inplacement insertion.
+ pub unsafe fn put_key(mut self, hash: SafeHash, key: K) -> FullBucket<K, V, M> {
+ *self.raw.hash = hash.inspect();
+ let pair_mut = self.raw.pair as *mut (K, V);
+ ptr::write(&mut (*pair_mut).0, key);
+
+ self.table.borrow_table_mut().size += 1;
+
+ FullBucket {
+ raw: self.raw,
+ idx: self.idx,
+ table: self.table,
+ }
+ }
}
impl<K, V, M: Deref<Target = RawTable<K, V>>> FullBucket<K, V, M> {
v)
}
}
+
+ /// Remove this bucket's `key` from the hashtable.
+ /// Only used for inplacement insertion.
+ /// NOTE: `Value` is uninitialized when this function is called, don't try to drop the `Value`.
+ pub unsafe fn remove_key(&mut self) {
+ self.table.size -= 1;
+
+ *self.raw.hash = EMPTY_BUCKET;
+ let pair_mut = self.raw.pair as *mut (K, V);
+ ptr::drop_in_place(&mut (*pair_mut).0); // only drop key
+ }
}
// This use of `Put` is misleading and restrictive, but safe and sufficient for our use cases
/// byte was found too early in the slice provided or one wasn't found at all.
#[derive(Clone, PartialEq, Eq, Debug)]
#[stable(feature = "cstr_from_bytes", since = "1.10.0")]
-pub struct FromBytesWithNulError { _a: () }
+pub struct FromBytesWithNulError {
+ kind: FromBytesWithNulErrorKind,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+enum FromBytesWithNulErrorKind {
+ InteriorNul(usize),
+ NotNulTerminated,
+}
+
+impl FromBytesWithNulError {
+ fn interior_nul(pos: usize) -> FromBytesWithNulError {
+ FromBytesWithNulError {
+ kind: FromBytesWithNulErrorKind::InteriorNul(pos),
+ }
+ }
+ fn not_nul_terminated() -> FromBytesWithNulError {
+ FromBytesWithNulError {
+ kind: FromBytesWithNulErrorKind::NotNulTerminated,
+ }
+ }
+}
/// An error returned from `CString::into_string` to indicate that a UTF-8 error
/// was encountered during the conversion.
}
/// Converts this `CString` into a boxed `CStr`.
- #[unstable(feature = "into_boxed_c_str", issue = "0")]
+ #[unstable(feature = "into_boxed_c_str", issue = "40380")]
pub fn into_boxed_c_str(self) -> Box<CStr> {
unsafe { mem::transmute(self.into_inner()) }
}
}
}
+#[stable(feature = "c_string_from_box", since = "1.17.0")]
+impl From<Box<CStr>> for CString {
+ fn from(s: Box<CStr>) -> CString {
+ s.into_c_string()
+ }
+}
+
+#[stable(feature = "box_from_c_string", since = "1.17.0")]
+impl Into<Box<CStr>> for CString {
+ fn into(self) -> Box<CStr> {
+ self.into_boxed_c_str()
+ }
+}
+
#[stable(feature = "default_box_extra", since = "1.17.0")]
impl Default for Box<CStr> {
fn default() -> Box<CStr> {
#[stable(feature = "frombyteswithnulerror_impls", since = "1.17.0")]
impl Error for FromBytesWithNulError {
fn description(&self) -> &str {
- "data provided is not null terminated or contains an interior nul byte"
+ match self.kind {
+ FromBytesWithNulErrorKind::InteriorNul(..) =>
+ "data provided contains an interior nul byte",
+ FromBytesWithNulErrorKind::NotNulTerminated =>
+ "data provided is not nul terminated",
+ }
}
}
#[stable(feature = "frombyteswithnulerror_impls", since = "1.17.0")]
impl fmt::Display for FromBytesWithNulError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.description().fmt(f)
+ f.write_str(self.description())?;
+ if let FromBytesWithNulErrorKind::InteriorNul(pos) = self.kind {
+ write!(f, " at byte pos {}", pos)?;
+ }
+ Ok(())
}
}
#[stable(feature = "cstr_from_bytes", since = "1.10.0")]
pub fn from_bytes_with_nul(bytes: &[u8])
-> Result<&CStr, FromBytesWithNulError> {
- if bytes.is_empty() || memchr::memchr(0, &bytes) != Some(bytes.len() - 1) {
- Err(FromBytesWithNulError { _a: () })
+ let nul_pos = memchr::memchr(0, bytes);
+ if let Some(nul_pos) = nul_pos {
+ if nul_pos + 1 != bytes.len() {
+ return Err(FromBytesWithNulError::interior_nul(nul_pos));
+ }
+ Ok(unsafe { CStr::from_bytes_with_nul_unchecked(bytes) })
} else {
- Ok(unsafe { Self::from_bytes_with_nul_unchecked(bytes) })
+ Err(FromBytesWithNulError::not_nul_terminated())
}
}
pub fn to_string_lossy(&self) -> Cow<str> {
String::from_utf8_lossy(self.to_bytes())
}
+
+ /// Converts a `Box<CStr>` into a `CString` without copying or allocating.
+ #[unstable(feature = "into_boxed_c_str", issue = "40380")]
+ pub fn into_c_string(self: Box<CStr>) -> CString {
+ unsafe { mem::transmute(self) }
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
fn into_boxed() {
let orig: &[u8] = b"Hello, world!\0";
let cstr = CStr::from_bytes_with_nul(orig).unwrap();
- let cstring = cstr.to_owned();
- let box1: Box<CStr> = Box::from(cstr);
- let box2 = cstring.into_boxed_c_str();
- assert_eq!(cstr, &*box1);
- assert_eq!(box1, box2);
- assert_eq!(&*box2, cstr);
+ let boxed: Box<CStr> = Box::from(cstr);
+ let cstring = cstr.to_owned().into_boxed_c_str().into_c_string();
+ assert_eq!(cstr, &*boxed);
+ assert_eq!(&*boxed, &*cstring);
+ assert_eq!(&*cstring, cstr);
}
#[test]
/// in the given `OsString`.
///
/// The collection may reserve more space to avoid frequent reallocations.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::ffi::OsString;
+ ///
+ /// let mut s = OsString::new();
+ /// s.reserve(10);
+ /// assert!(s.capacity() >= 10);
+ /// ```
#[stable(feature = "osstring_simple_functions", since = "1.9.0")]
pub fn reserve(&mut self, additional: usize) {
self.inner.reserve(additional)
/// Note that the allocator may give the collection more space than it
/// requests. Therefore capacity can not be relied upon to be precisely
/// minimal. Prefer reserve if future insertions are expected.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::ffi::OsString;
+ ///
+ /// let mut s = OsString::new();
+ /// s.reserve_exact(10);
+ /// assert!(s.capacity() >= 10);
+ /// ```
#[stable(feature = "osstring_simple_functions", since = "1.9.0")]
pub fn reserve_exact(&mut self, additional: usize) {
self.inner.reserve_exact(additional)
}
+ /// Shrinks the capacity of the `OsString` to match its length.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(osstring_shrink_to_fit)]
+ ///
+ /// use std::ffi::OsString;
+ ///
+ /// let mut s = OsString::from("foo");
+ ///
+ /// s.reserve(100);
+ /// assert!(s.capacity() >= 100);
+ ///
+ /// s.shrink_to_fit();
+ /// assert_eq!(3, s.capacity());
+ /// ```
+ #[unstable(feature = "osstring_shrink_to_fit", issue = "40421")]
+ pub fn shrink_to_fit(&mut self) {
+ self.inner.shrink_to_fit()
+ }
+
/// Converts this `OsString` into a boxed `OsStr`.
- #[unstable(feature = "into_boxed_os_str", issue = "0")]
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(into_boxed_os_str)]
+ ///
+ /// use std::ffi::{OsString, OsStr};
+ ///
+ /// let s = OsString::from("hello");
+ ///
+ /// let b: Box<OsStr> = s.into_boxed_os_str();
+ /// ```
+ #[unstable(feature = "into_boxed_os_str", issue = "40380")]
pub fn into_boxed_os_str(self) -> Box<OsStr> {
unsafe { mem::transmute(self.inner.into_box()) }
}
/// Copies the slice into an owned [`OsString`].
///
/// [`OsString`]: struct.OsString.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::ffi::{OsStr, OsString};
+ ///
+ /// let os_str = OsStr::new("foo");
+ /// let os_string = os_str.to_os_string();
+ /// assert_eq!(os_string, OsString::from("foo"));
+ /// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_os_string(&self) -> OsString {
OsString { inner: self.inner.to_owned() }
self.inner.inner.len()
}
+ /// Converts a `Box<OsStr>` into an `OsString` without copying or allocating.
+ #[unstable(feature = "into_boxed_os_str", issue = "40380")]
+ pub fn into_os_string(self: Box<OsStr>) -> OsString {
+ let inner: Box<Slice> = unsafe { mem::transmute(self) };
+ OsString { inner: Buf::from_box(inner) }
+ }
+
/// Gets the underlying byte representation.
///
/// Note: it is *crucial* that this API is private, to avoid
}
}
+#[stable(feature = "os_string_from_box", since = "1.17.0")]
+impl<'a> From<Box<OsStr>> for OsString {
+ fn from(boxed: Box<OsStr>) -> OsString {
+ boxed.into_os_string()
+ }
+}
+
+#[stable(feature = "box_from_c_string", since = "1.17.0")]
+impl Into<Box<OsStr>> for OsString {
+ fn into(self) -> Box<OsStr> {
+ self.into_boxed_os_str()
+ }
+}
+
#[stable(feature = "box_default_extra", since = "1.17.0")]
impl Default for Box<OsStr> {
fn default() -> Box<OsStr> {
fn into_boxed() {
let orig = "Hello, world!";
let os_str = OsStr::new(orig);
- let os_string = os_str.to_owned();
- let box1: Box<OsStr> = Box::from(os_str);
- let box2 = os_string.into_boxed_os_str();
- assert_eq!(os_str, &*box1);
- assert_eq!(box1, box2);
- assert_eq!(&*box2, os_str);
+ let boxed: Box<OsStr> = Box::from(os_str);
+ let os_string = os_str.to_owned().into_boxed_os_str().into_os_string();
+ assert_eq!(os_str, &*boxed);
+ assert_eq!(&*boxed, &*os_string);
+ assert_eq!(&*os_string, os_str);
}
#[test]
/// [`File::open`]: struct.File.html#method.open
/// [`File::create`]: struct.File.html#method.create
///
-/// Generally speaking, when using `OpenOptions`, you'll first call [`new()`],
-/// then chain calls to methods to set each option, then call [`open()`],
+/// Generally speaking, when using `OpenOptions`, you'll first call [`new`],
+/// then chain calls to methods to set each option, then call [`open`],
/// passing the path of the file you're trying to open. This will give you a
/// [`io::Result`][result] with a [`File`][file] inside that you can further
/// operate on.
///
-/// [`new()`]: struct.OpenOptions.html#method.new
-/// [`open()`]: struct.OpenOptions.html#method.open
+/// [`new`]: struct.OpenOptions.html#method.new
+/// [`open`]: struct.OpenOptions.html#method.open
/// [result]: ../io/type.Result.html
/// [file]: struct.File.html
///
#[stable(feature = "rust1", since = "1.0.0")]
TimedOut,
/// An error returned when an operation could not be completed because a
- /// call to [`write()`] returned [`Ok(0)`].
+ /// call to [`write`] returned [`Ok(0)`].
///
/// This typically means that an operation could only succeed if it wrote a
/// particular number of bytes but only a smaller number of bytes could be
/// written.
///
- /// [`write()`]: ../../std/io/trait.Write.html#tymethod.write
+ /// [`write`]: ../../std/io/trait.Write.html#tymethod.write
/// [`Ok(0)`]: ../../std/io/type.Result.html
#[stable(feature = "rust1", since = "1.0.0")]
WriteZero,
//! of other types, and you can implement them for your types too. As such,
//! you'll see a few different types of I/O throughout the documentation in
//! this module: [`File`]s, [`TcpStream`]s, and sometimes even [`Vec<T>`]s. For
-//! example, [`Read`] adds a [`read()`] method, which we can use on `File`s:
+//! example, [`Read`] adds a [`read`] method, which we can use on `File`s:
//!
//! ```
//! use std::io;
//! ```
//!
//! [`BufWriter`] doesn't add any new ways of writing; it just buffers every call
-//! to [`write()`]:
+//! to [`write`]:
//!
//! ```
//! use std::io;
//! # }
//! ```
//!
-//! Of course, using [`io::stdout()`] directly is less common than something like
+//! Of course, using [`io::stdout`] directly is less common than something like
//! [`println!`].
//!
//! ## Iterator types
//! [`Vec<T>`]: ../vec/struct.Vec.html
//! [`BufReader`]: struct.BufReader.html
//! [`BufWriter`]: struct.BufWriter.html
-//! [`write()`]: trait.Write.html#tymethod.write
-//! [`io::stdout()`]: fn.stdout.html
+//! [`write`]: trait.Write.html#tymethod.write
+//! [`io::stdout`]: fn.stdout.html
//! [`println!`]: ../macro.println.html
//! [`Lines`]: struct.Lines.html
//! [`io::Result`]: type.Result.html
//! [`?` operator]: ../../book/syntax-index.html
-//! [`read()`]: trait.Read.html#tymethod.read
+//! [`read`]: trait.Read.html#tymethod.read
#![stable(feature = "rust1", since = "1.0.0")]
/// If the data in this stream is *not* valid UTF-8 then an error is
/// returned and `buf` is unchanged.
///
- /// See [`read_to_end()`][readtoend] for other error semantics.
+ /// See [`read_to_end`][readtoend] for other error semantics.
///
/// [readtoend]: #method.read_to_end
///
///
/// Implementors of the `Write` trait are sometimes called 'writers'.
///
-/// Writers are defined by two required methods, [`write()`] and [`flush()`]:
+/// Writers are defined by two required methods, [`write`] and [`flush`]:
///
-/// * The [`write()`] method will attempt to write some data into the object,
+/// * The [`write`] method will attempt to write some data into the object,
/// returning how many bytes were successfully written.
///
-/// * The [`flush()`] method is useful for adaptors and explicit buffers
+/// * The [`flush`] method is useful for adaptors and explicit buffers
/// themselves for ensuring that all buffered data has been pushed out to the
/// 'true sink'.
///
/// throughout [`std::io`] take and provide types which implement the `Write`
/// trait.
///
-/// [`write()`]: #tymethod.write
-/// [`flush()`]: #tymethod.flush
+/// [`write`]: #tymethod.write
+/// [`flush`]: #tymethod.flush
/// [`std::io`]: index.html
///
/// # Examples
///
/// For example, reading line-by-line is inefficient without using a buffer, so
/// if you want to read by line, you'll need `BufRead`, which includes a
-/// [`read_line()`] method as well as a [`lines()`] iterator.
+/// [`read_line`] method as well as a [`lines`] iterator.
///
/// # Examples
///
///
/// [`BufReader`]: struct.BufReader.html
/// [`File`]: ../fs/struct.File.html
-/// [`read_line()`]: #method.read_line
-/// [`lines()`]: #method.lines
+/// [`read_line`]: #method.read_line
+/// [`lines`]: #method.lines
/// [`Read`]: trait.Read.html
///
/// ```
/// Fills the internal buffer of this object, returning the buffer contents.
///
/// This function is a lower-level call. It needs to be paired with the
- /// [`consume()`] method to function properly. When calling this
+ /// [`consume`] method to function properly. When calling this
/// method, none of the contents will be "read" in the sense that later
- /// calling `read` may return the same contents. As such, [`consume()`] must
+ /// calling `read` may return the same contents. As such, [`consume`] must
/// be called with the number of bytes that are consumed from this buffer to
/// ensure that the bytes are never returned twice.
///
- /// [`consume()`]: #tymethod.consume
+ /// [`consume`]: #tymethod.consume
///
/// An empty buffer returned indicates that the stream has reached EOF.
///
/// so they should no longer be returned in calls to `read`.
///
/// This function is a lower-level call. It needs to be paired with the
- /// [`fill_buf()`] method to function properly. This function does
+ /// [`fill_buf`] method to function properly. This function does
/// not perform any I/O, it simply informs this object that some amount of
- /// its buffer, returned from [`fill_buf()`], has been consumed and should
+ /// its buffer, returned from [`fill_buf`], has been consumed and should
/// no longer be returned. As such, this function may do odd things if
- /// [`fill_buf()`] isn't called before calling it.
+ /// [`fill_buf`] isn't called before calling it.
///
/// The `amt` must be `<=` the number of bytes in the buffer returned by
- /// [`fill_buf()`].
+ /// [`fill_buf`].
///
/// # Examples
///
- /// Since `consume()` is meant to be used with [`fill_buf()`],
+ /// Since `consume()` is meant to be used with [`fill_buf`],
/// that method's example includes an example of `consume()`.
///
- /// [`fill_buf()`]: #tymethod.fill_buf
+ /// [`fill_buf`]: #tymethod.fill_buf
#[stable(feature = "rust1", since = "1.0.0")]
fn consume(&mut self, amt: usize);
/// # Errors
///
/// This function will ignore all instances of [`ErrorKind::Interrupted`] and
- /// will otherwise return any errors returned by [`fill_buf()`].
+ /// will otherwise return any errors returned by [`fill_buf`].
///
/// If an I/O error is encountered then all bytes read so far will be
/// present in `buf` and its length will have been adjusted appropriately.
/// A locked standard input implements `BufRead`. In this example, we'll
/// read from standard input until we see an `a` byte.
///
- /// [`fill_buf()`]: #tymethod.fill_buf
+ /// [`fill_buf`]: #tymethod.fill_buf
/// [`ErrorKind::Interrupted`]: enum.ErrorKind.html#variant.Interrupted
///
/// ```
///
/// # Errors
///
- /// This function has the same error semantics as [`read_until()`] and will
+ /// This function has the same error semantics as [`read_until`] and will
/// also return an error if the read bytes are not valid UTF-8. If an I/O
/// error is encountered then `buf` may contain some bytes already read in
/// the event that all data read so far was valid UTF-8.
///
/// A locked standard input implements `BufRead`. In this example, we'll
/// read all of the lines from standard input. If we were to do this in
- /// an actual project, the [`lines()`] method would be easier, of
+ /// an actual project, the [`lines`] method would be easier, of
/// course.
///
- /// [`lines()`]: #method.lines
- /// [`read_until()`]: #method.read_until
+ /// [`lines`]: #method.lines
+ /// [`read_until`]: #method.read_until
///
/// ```
/// use std::io;
/// [`io::Result`]`<`[`Vec<u8>`]`>`. Each vector returned will *not* have
/// the delimiter byte at the end.
///
- /// This function will yield errors whenever [`read_until()`] would have
+ /// This function will yield errors whenever [`read_until`] would have
/// also yielded an error.
///
/// # Examples
///
/// [`io::Result`]: type.Result.html
/// [`Vec<u8>`]: ../vec/struct.Vec.html
- /// [`read_until()`]: #method.read_until
+ /// [`read_until`]: #method.read_until
///
/// ```
/// use std::io;
///
/// # Errors
///
- /// Each line of the iterator has the same error semantics as [`BufRead::read_line()`].
+ /// Each line of the iterator has the same error semantics as [`BufRead::read_line`].
///
- /// [`BufRead::read_line()`]: trait.BufRead.html#method.read_line
+ /// [`BufRead::read_line`]: trait.BufRead.html#method.read_line
#[stable(feature = "rust1", since = "1.0.0")]
fn lines(self) -> Lines<Self> where Self: Sized {
Lines { buf: self }
/// Adaptor to chain together two readers.
///
-/// This struct is generally created by calling [`chain()`] on a reader.
-/// Please see the documentation of [`chain()`] for more details.
+/// This struct is generally created by calling [`chain`] on a reader.
+/// Please see the documentation of [`chain`] for more details.
///
-/// [`chain()`]: trait.Read.html#method.chain
+/// [`chain`]: trait.Read.html#method.chain
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Chain<T, U> {
first: T,
/// Reader adaptor which limits the bytes read from an underlying reader.
///
-/// This struct is generally created by calling [`take()`] on a reader.
-/// Please see the documentation of [`take()`] for more details.
+/// This struct is generally created by calling [`take`] on a reader.
+/// Please see the documentation of [`take`] for more details.
///
-/// [`take()`]: trait.Read.html#method.take
+/// [`take`]: trait.Read.html#method.take
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct Take<T> {
/// An iterator over `u8` values of a reader.
///
-/// This struct is generally created by calling [`bytes()`] on a reader.
-/// Please see the documentation of [`bytes()`] for more details.
+/// This struct is generally created by calling [`bytes`] on a reader.
+/// Please see the documentation of [`bytes`] for more details.
///
-/// [`bytes()`]: trait.Read.html#method.bytes
+/// [`bytes`]: trait.Read.html#method.bytes
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct Bytes<R> {
/// An iterator over the `char`s of a reader.
///
-/// This struct is generally created by calling [`chars()`][chars] on a reader.
+/// This struct is generally created by calling [`chars`][chars] on a reader.
/// Please see the documentation of `chars()` for more details.
///
/// [chars]: trait.Read.html#method.chars
/// An iterator over the contents of an instance of `BufRead` split on a
/// particular byte.
///
-/// This struct is generally created by calling [`split()`][split] on a
+/// This struct is generally created by calling [`split`][split] on a
/// `BufRead`. Please see the documentation of `split()` for more details.
///
/// [split]: trait.BufRead.html#method.split
/// An iterator over the lines of an instance of `BufRead`.
///
-/// This struct is generally created by calling [`lines()`][lines] on a
+/// This struct is generally created by calling [`lines`][lines] on a
/// `BufRead`. Please see the documentation of `lines()` for more details.
///
/// [lines]: trait.BufRead.html#method.lines
///
/// Each handle shares a global buffer of data to be written to the standard
/// output stream. Access is also synchronized via a lock and explicit control
-/// over locking is available via the [`lock()`] method.
+/// over locking is available via the [`lock`] method.
///
/// Created by the [`io::stdout`] method.
///
-/// [`lock()`]: #method.lock
+/// [`lock`]: #method.lock
/// [`io::stdout`]: fn.stdout.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Stdout {
/// A reader which is always at EOF.
///
-/// This struct is generally created by calling [`empty()`][empty]. Please see
+/// This struct is generally created by calling [`empty`][empty]. Please see
/// the documentation of `empty()` for more details.
///
/// [empty]: fn.empty.html
/// A reader which yields one byte over and over and over and over and over and...
///
-/// This struct is generally created by calling [`repeat()`][repeat]. Please
+/// This struct is generally created by calling [`repeat`][repeat]. Please
/// see the documentation of `repeat()` for more details.
///
/// [repeat]: fn.repeat.html
/// A writer which will move data into the void.
///
-/// This struct is generally created by calling [`sink()`][sink]. Please
+/// This struct is generally created by calling [`sink`][sink]. Please
/// see the documentation of `sink()` for more details.
///
/// [sink]: fn.sink.html
//! contained an `extern crate std;` import at the [crate root]. Therefore the
//! standard library can be accessed in [`use`] statements through the path
//! `std`, as in [`use std::env`], or in expressions through the absolute path
-//! `::std`, as in [`::std::env::args()`].
+//! `::std`, as in [`::std::env::args`].
//!
//! # How to read this documentation
//!
//! [TCP]: net/struct.TcpStream.html
//! [The Rust Prelude]: prelude/index.html
//! [UDP]: net/struct.UdpSocket.html
-//! [`::std::env::args()`]: env/fn.args.html
+//! [`::std::env::args`]: env/fn.args.html
//! [`Arc`]: sync/struct.Arc.html
//! [owned slice]: boxed/index.html
//! [`Cell`]: cell/struct.Cell.html
#![feature(panic_unwind)]
#![feature(peek)]
#![feature(placement_in_syntax)]
+#![feature(placement_new_protocol)]
#![feature(prelude_import)]
#![feature(pub_restricted)]
#![feature(rand)]
/// Sets the read timeout to the timeout specified.
///
- /// If the value specified is [`None`], then [`read()`] calls will block
+ /// If the value specified is [`None`], then [`read`] calls will block
/// indefinitely. It is an error to pass the zero `Duration` to this
/// method.
///
/// error of the kind [`WouldBlock`], but Windows may return [`TimedOut`].
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`read()`]: ../../std/io/trait.Read.html#tymethod.read
+ /// [`read`]: ../../std/io/trait.Read.html#tymethod.read
/// [`WouldBlock`]: ../../std/io/enum.ErrorKind.html#variant.WouldBlock
/// [`TimedOut`]: ../../std/io/enum.ErrorKind.html#variant.TimedOut
///
/// Sets the write timeout to the timeout specified.
///
- /// If the value specified is [`None`], then [`write()`] calls will block
+ /// If the value specified is [`None`], then [`write`] calls will block
/// indefinitely. It is an error to pass the zero [`Duration`] to this
/// method.
///
/// an error of the kind [`WouldBlock`], but Windows may return [`TimedOut`].
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`write()`]: ../../std/io/trait.Write.html#tymethod.write
+ /// [`write`]: ../../std/io/trait.Write.html#tymethod.write
/// [`Duration`]: ../../std/time/struct.Duration.html
/// [`WouldBlock`]: ../../std/io/enum.ErrorKind.html#variant.WouldBlock
/// [`TimedOut`]: ../../std/io/enum.ErrorKind.html#variant.TimedOut
/// Returns the read timeout of this socket.
///
- /// If the timeout is [`None`], then [`read()`] calls will block indefinitely.
+ /// If the timeout is [`None`], then [`read`] calls will block indefinitely.
///
/// # Note
///
/// Some platforms do not provide access to the current timeout.
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`read()`]: ../../std/io/trait.Read.html#tymethod.read
+ /// [`read`]: ../../std/io/trait.Read.html#tymethod.read
///
/// # Examples
///
/// Returns the write timeout of this socket.
///
- /// If the timeout is [`None`], then [`write()`] calls will block indefinitely.
+ /// If the timeout is [`None`], then [`write`] calls will block indefinitely.
///
/// # Note
///
/// Some platforms do not provide access to the current timeout.
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`write()`]: ../../std/io/trait.Write.html#tymethod.write
+ /// [`write`]: ../../std/io/trait.Write.html#tymethod.write
///
/// # Examples
///
/// Gets the value of the `IP_TTL` option for this socket.
///
- /// For more information about this option, see [`set_ttl()`][link].
+ /// For more information about this option, see [`set_ttl`][link].
///
/// [link]: #method.set_ttl
///
/// Sets the read timeout to the timeout specified.
///
- /// If the value specified is [`None`], then [`read()`] calls will block
+ /// If the value specified is [`None`], then [`read`] calls will block
/// indefinitely. It is an error to pass the zero [`Duration`] to this
/// method.
///
/// error of the kind [`WouldBlock`], but Windows may return [`TimedOut`].
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`read()`]: ../../std/io/trait.Read.html#tymethod.read
+ /// [`read`]: ../../std/io/trait.Read.html#tymethod.read
/// [`Duration`]: ../../std/time/struct.Duration.html
/// [`WouldBlock`]: ../../std/io/enum.ErrorKind.html#variant.WouldBlock
/// [`TimedOut`]: ../../std/io/enum.ErrorKind.html#variant.TimedOut
/// Sets the write timeout to the timeout specified.
///
- /// If the value specified is [`None`], then [`write()`] calls will block
+ /// If the value specified is [`None`], then [`write`] calls will block
/// indefinitely. It is an error to pass the zero [`Duration`] to this
/// method.
///
/// an error of the kind [`WouldBlock`], but Windows may return [`TimedOut`].
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`write()`]: ../../std/io/trait.Write.html#tymethod.write
+ /// [`write`]: ../../std/io/trait.Write.html#tymethod.write
/// [`Duration`]: ../../std/time/struct.Duration.html
/// [`WouldBlock`]: ../../std/io/enum.ErrorKind.html#variant.WouldBlock
/// [`TimedOut`]: ../../std/io/enum.ErrorKind.html#variant.TimedOut
/// Returns the read timeout of this socket.
///
- /// If the timeout is [`None`], then [`read()`] calls will block indefinitely.
+ /// If the timeout is [`None`], then [`read`] calls will block indefinitely.
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`read()`]: ../../std/io/trait.Read.html#tymethod.read
+ /// [`read`]: ../../std/io/trait.Read.html#tymethod.read
///
/// # Examples
///
/// Returns the write timeout of this socket.
///
- /// If the timeout is [`None`], then [`write()`] calls will block indefinitely.
+ /// If the timeout is [`None`], then [`write`] calls will block indefinitely.
///
/// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`write()`]: ../../std/io/trait.Write.html#tymethod.write
+ /// [`write`]: ../../std/io/trait.Write.html#tymethod.write
///
/// # Examples
///
/// Sends data on the socket to the remote address to which it is connected.
///
- /// The [`connect()`] method will connect this socket to a remote address. This
+ /// The [`connect`] method will connect this socket to a remote address. This
/// method will fail if the socket is not connected.
///
- /// [`connect()`]: #method.connect
+ /// [`connect`]: #method.connect
///
/// # Examples
///
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! MacOS-specific definitions
+//! macOS-specific definitions
#![stable(feature = "raw_ext", since = "1.1.0")]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! MacOS-specific raw type definitions
+//! macOS-specific raw type definitions
#![stable(feature = "raw_ext", since = "1.1.0")]
#![rustc_deprecated(since = "1.8.0",
/// A struct providing information about a panic.
///
-/// `PanicInfo` structure is passed to a panic hook set by the [`set_hook()`]
+/// `PanicInfo` structure is passed to a panic hook set by the [`set_hook`]
/// function.
///
-/// [`set_hook()`]: ../../std/panic/fn.set_hook.html
+/// [`set_hook`]: ../../std/panic/fn.set_hook.html
///
/// # Examples
///
/// A struct containing information about the location of a panic.
///
-/// This structure is created by the [`location()`] method of [`PanicInfo`].
+/// This structure is created by the [`location`] method of [`PanicInfo`].
///
-/// [`location()`]: ../../std/panic/struct.PanicInfo.html#method.location
+/// [`location`]: ../../std/panic/struct.PanicInfo.html#method.location
/// [`PanicInfo`]: ../../std/panic/struct.PanicInfo.html
///
/// # Examples
self.inner.push(path);
}
- /// Truncate `self` to [`self.parent()`].
+ /// Truncate `self` to [`self.parent`].
///
- /// Returns false and does nothing if [`self.file_name()`] is `None`.
+ /// Returns false and does nothing if [`self.file_name`] is `None`.
/// Otherwise, returns `true`.
///
- /// [`self.parent()`]: struct.PathBuf.html#method.parent
- /// [`self.file_name()`]: struct.PathBuf.html#method.file_name
+ /// [`self.parent`]: struct.PathBuf.html#method.parent
+ /// [`self.file_name`]: struct.PathBuf.html#method.file_name
///
/// # Examples
///
}
}
- /// Updates [`self.file_name()`] to `file_name`.
+ /// Updates [`self.file_name`] to `file_name`.
///
- /// If [`self.file_name()`] was [`None`], this is equivalent to pushing
+ /// If [`self.file_name`] was [`None`], this is equivalent to pushing
/// `file_name`.
///
- /// [`self.file_name()`]: struct.PathBuf.html#method.file_name
+ /// [`self.file_name`]: struct.PathBuf.html#method.file_name
/// [`None`]: ../../std/option/enum.Option.html#variant.None
///
/// # Examples
self.push(file_name);
}
- /// Updates [`self.extension()`] to `extension`.
+ /// Updates [`self.extension`] to `extension`.
///
- /// If [`self.file_name()`] is `None`, does nothing and returns `false`.
+ /// If [`self.file_name`] is `None`, does nothing and returns `false`.
///
- /// Otherwise, returns `true`; if [`self.extension()`] is [`None`], the
+ /// Otherwise, returns `true`; if [`self.extension`] is [`None`], the
/// extension is added; otherwise it is replaced.
///
- /// [`self.file_name()`]: struct.PathBuf.html#method.file_name
- /// [`self.extension()`]: struct.PathBuf.html#method.extension
+ /// [`self.file_name`]: struct.PathBuf.html#method.file_name
+ /// [`self.extension`]: struct.PathBuf.html#method.extension
/// [`None`]: ../../std/option/enum.Option.html#variant.None
///
/// # Examples
}
/// Converts this `PathBuf` into a boxed `Path`.
- #[unstable(feature = "into_boxed_path", issue = "0")]
+ #[unstable(feature = "into_boxed_path", issue = "40380")]
pub fn into_boxed_path(self) -> Box<Path> {
unsafe { mem::transmute(self.inner.into_boxed_os_str()) }
}
}
}
+#[stable(feature = "path_buf_from_box", since = "1.17.0")]
+impl<'a> From<Box<Path>> for PathBuf {
+ fn from(boxed: Box<Path>) -> PathBuf {
+ boxed.into_path_buf()
+ }
+}
+
+#[stable(feature = "box_from_path_buf", since = "1.17.0")]
+impl Into<Box<Path>> for PathBuf {
+ fn into(self) -> Box<Path> {
+ self.into_boxed_path()
+ }
+}
+
#[stable(feature = "box_default_extra", since = "1.17.0")]
impl Default for Box<Path> {
fn default() -> Box<Path> {
iter_after(self.components().rev(), child.components().rev()).is_some()
}
- /// Extracts the stem (non-extension) portion of [`self.file_name()`].
+ /// Extracts the stem (non-extension) portion of [`self.file_name`].
///
- /// [`self.file_name()`]: struct.Path.html#method.file_name
+ /// [`self.file_name`]: struct.Path.html#method.file_name
///
/// The stem is:
///
self.file_name().map(split_file_at_dot).and_then(|(before, after)| before.or(after))
}
- /// Extracts the extension of [`self.file_name()`], if possible.
+ /// Extracts the extension of [`self.file_name`], if possible.
///
/// The extension is:
///
/// * [`None`], if the file name begins with `.` and has no other `.`s within;
/// * Otherwise, the portion of the file name after the final `.`
///
- /// [`self.file_name()`]: struct.Path.html#method.file_name
+ /// [`self.file_name`]: struct.Path.html#method.file_name
/// [`None`]: ../../std/option/enum.Option.html#variant.None
///
/// # Examples
pub fn is_dir(&self) -> bool {
fs::metadata(self).map(|m| m.is_dir()).unwrap_or(false)
}
+
+ /// Converts a `Box<Path>` into a `PathBuf` without copying or allocating.
+ #[unstable(feature = "into_boxed_path", issue = "40380")]
+ pub fn into_path_buf(self: Box<Path>) -> PathBuf {
+ let inner: Box<OsStr> = unsafe { mem::transmute(self) };
+ PathBuf { inner: OsString::from(inner) }
+ }
}
#[stable(feature = "rust1", since = "1.0.0")]
fn into_boxed() {
let orig: &str = "some/sort/of/path";
let path = Path::new(orig);
- let path_buf = path.to_owned();
- let box1: Box<Path> = Box::from(path);
- let box2 = path_buf.into_boxed_path();
- assert_eq!(path, &*box1);
- assert_eq!(box1, box2);
- assert_eq!(&*box2, path);
+ let boxed: Box<Path> = Box::from(path);
+ let path_buf = path.to_owned().into_boxed_path().into_path_buf();
+ assert_eq!(path, &*boxed);
+ assert_eq!(&*boxed, &*path_buf);
+ assert_eq!(&*path_buf, path);
}
#[test]
//! value.
//! * [`std::boxed`]::[`Box`], a way to allocate values on the heap.
//! * [`std::borrow`]::[`ToOwned`], The conversion trait that defines
-//! [`to_owned()`], the generic method for creating an owned type from a
+//! [`to_owned`], the generic method for creating an owned type from a
//! borrowed type.
-//! * [`std::clone`]::[`Clone`], the ubiquitous trait that defines [`clone()`],
+//! * [`std::clone`]::[`Clone`], the ubiquitous trait that defines [`clone`],
//! the method for producing a copy of a value.
//! * [`std::cmp`]::{[`PartialEq`], [`PartialOrd`], [`Eq`], [`Ord`] }. The
//! comparison traits, which implement the comparison operators and are often
//! [`ToOwned`]: ../borrow/trait.ToOwned.html
//! [`ToString`]: ../string/trait.ToString.html
//! [`Vec`]: ../vec/struct.Vec.html
-//! [`clone()`]: ../clone/trait.Clone.html#tymethod.clone
+//! [`clone`]: ../clone/trait.Clone.html#tymethod.clone
//! [`drop`]: ../mem/fn.drop.html
//! [`std::borrow`]: ../borrow/index.html
//! [`std::boxed`]: ../boxed/index.html
//! [`std::slice`]: ../slice/index.html
//! [`std::string`]: ../string/index.html
//! [`std::vec`]: ../vec/index.html
-//! [`to_owned()`]: ../borrow/trait.ToOwned.html#tymethod.to_owned
+//! [`to_owned`]: ../borrow/trait.ToOwned.html#tymethod.to_owned
//! [book-closures]: ../../book/closures.html
//! [book-dtor]: ../../book/drop.html
//! [book-enums]: ../../book/enums.html
/// # Representation
///
/// A `&str` is made up of two components: a pointer to some bytes, and a
-/// length. You can look at these with the [`.as_ptr()`] and [`len()`] methods:
+/// length. You can look at these with the [`.as_ptr`] and [`len`] methods:
///
/// ```
/// use std::slice;
/// assert_eq!(s, Ok(story));
/// ```
///
-/// [`.as_ptr()`]: #method.as_ptr
-/// [`len()`]: #method.len
+/// [`.as_ptr`]: #method.as_ptr
+/// [`len`]: #method.len
///
/// Note: This example shows the internals of `&str`. `unsafe` should not be
/// used to get a string slice under normal circumstances. Use `.as_slice()`
//! If an application does not have `getrandom` and likely to be run soon after first booting,
//! or on a system with very few entropy sources, one should consider using `/dev/random` via
//! `ReaderRng`.
-//! - On some systems (e.g. FreeBSD, OpenBSD and Mac OS X) there is no difference
+//! - On some systems (e.g. FreeBSD, OpenBSD and macOS) there is no difference
//! between the two sources. (Also note that, on some systems e.g. FreeBSD, both `/dev/random`
//! and `/dev/urandom` may block once if the CSPRNG has not seeded yet.)
/// A random number generator that retrieves randomness straight from
/// the operating system. Platform sources:
///
-/// - Unix-like systems (Linux, Android, Mac OSX): read directly from
+/// - Unix-like systems (Linux, Android, macOS): read directly from
/// `/dev/urandom`, or from `getrandom(2)` system call if available.
/// - Windows: calls `CryptGenRandom`, using the default cryptographic
/// service provider with the `PROV_RSA_FULL` type.
/// A result returned from wait.
///
-/// Currently this opaque structure only has one method, [`.is_leader()`]. Only
+/// Currently this opaque structure only has one method, [`.is_leader`]. Only
/// one thread will receive a result that will return `true` from this function.
///
-/// [`.is_leader()`]: #method.is_leader
+/// [`.is_leader`]: #method.is_leader
///
/// # Examples
///
///
/// This function will atomically unlock the mutex specified (represented by
/// `guard`) and block the current thread. This means that any calls
- /// to [`notify_one()`] or [`notify_all()`] which happen logically after the
+ /// to [`notify_one`] or [`notify_all`] which happen logically after the
/// mutex is unlocked are candidates to wake this thread up. When this
/// function call returns, the lock specified will have been re-acquired.
///
///
/// # Panics
///
- /// This function will [`panic!()`] if it is used with more than one mutex
+ /// This function will [`panic!`] if it is used with more than one mutex
/// over time. Each condition variable is dynamically bound to exactly one
/// mutex to ensure defined behavior across platforms. If this functionality
/// is not desired, then unsafe primitives in `sys` are provided.
///
- /// [`notify_one()`]: #method.notify_one
- /// [`notify_all()`]: #method.notify_all
+ /// [`notify_one`]: #method.notify_one
+ /// [`notify_all`]: #method.notify_all
/// [poisoning]: ../sync/struct.Mutex.html#poisoning
/// [`Mutex`]: ../sync/struct.Mutex.html
- /// [`panic!()`]: ../../std/macro.panic.html
+ /// [`panic!`]: ../../std/macro.panic.html
///
/// # Examples
///
/// be woken up from its call to [`wait`] or [`wait_timeout`]. Calls to
/// `notify_one` are not buffered in any way.
///
- /// To wake up all threads, see [`notify_all()`].
+ /// To wake up all threads, see [`notify_all`].
///
/// [`wait`]: #method.wait
/// [`wait_timeout`]: #method.wait_timeout
- /// [`notify_all()`]: #method.notify_all
+ /// [`notify_all`]: #method.notify_all
///
/// # Examples
///
/// variable are awoken. Calls to `notify_all()` are not buffered in any
/// way.
///
- /// To wake up only one thread, see [`notify_one()`].
+ /// To wake up only one thread, see [`notify_one`].
///
- /// [`notify_one()`]: #method.notify_one
+ /// [`notify_one`]: #method.notify_one
///
/// # Examples
///
/// All data sent on the sender will become available on the receiver, and no
/// send will block the calling thread (this channel has an "infinite buffer").
///
-/// If the [`Receiver`] is disconnected while trying to [`send()`] with the
-/// [`Sender`], the [`send()`] method will return an error.
+/// If the [`Receiver`] is disconnected while trying to [`send`] with the
+/// [`Sender`], the [`send`] method will return an error.
///
-/// [`send()`]: ../../../std/sync/mpsc/struct.Sender.html#method.send
+/// [`send`]: ../../../std/sync/mpsc/struct.Sender.html#method.send
/// [`Sender`]: ../../../std/sync/mpsc/struct.Sender.html
/// [`Receiver`]: ../../../std/sync/mpsc/struct.Receiver.html
///
/// `bound` specifies the buffer size. When the internal buffer becomes full,
/// future sends will *block* waiting for the buffer to open up. Note that a
/// buffer size of 0 is valid, in which case this becomes "rendezvous channel"
-/// where each [`send()`] will not return until a recv is paired with it.
+/// where each [`send`] will not return until a recv is paired with it.
///
/// Like asynchronous channels, if the [`Receiver`] is disconnected while
-/// trying to [`send()`] with the [`SyncSender`], the [`send()`] method will
+/// trying to [`send`] with the [`SyncSender`], the [`send`] method will
/// return an error.
///
-/// [`send()`]: ../../../std/sync/mpsc/struct.SyncSender.html#method.send
+/// [`send`]: ../../../std/sync/mpsc/struct.SyncSender.html#method.send
/// [`SyncSender`]: ../../../std/sync/mpsc/struct.SyncSender.html
/// [`Receiver`]: ../../../std/sync/mpsc/struct.Receiver.html
///
/// The data protected by the mutex can be access through this guard via its
/// [`Deref`] and [`DerefMut`] implementations.
///
-/// This structure is created by the [`lock()`] and [`try_lock()`] methods on
+/// This structure is created by the [`lock`] and [`try_lock`] methods on
/// [`Mutex`].
///
/// [`Deref`]: ../../std/ops/trait.Deref.html
/// [`DerefMut`]: ../../std/ops/trait.DerefMut.html
-/// [`lock()`]: struct.Mutex.html#method.lock
-/// [`try_lock()`]: struct.Mutex.html#method.try_lock
+/// [`lock`]: struct.Mutex.html#method.lock
+/// [`try_lock`]: struct.Mutex.html#method.try_lock
/// [`Mutex`]: struct.Mutex.html
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
/// RAII structure used to release the shared read access of a lock when
/// dropped.
///
-/// This structure is created by the [`read()`] and [`try_read()`] methods on
+/// This structure is created by the [`read`] and [`try_read`] methods on
/// [`RwLock`].
///
-/// [`read()`]: struct.RwLock.html#method.read
-/// [`try_read()`]: struct.RwLock.html#method.try_read
+/// [`read`]: struct.RwLock.html#method.read
+/// [`try_read`]: struct.RwLock.html#method.try_read
/// [`RwLock`]: struct.RwLock.html
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
/// RAII structure used to release the exclusive write access of a lock when
/// dropped.
///
-/// This structure is created by the [`write()`] and [`try_write()`] methods
+/// This structure is created by the [`write`] and [`try_write`] methods
/// on [`RwLock`].
///
-/// [`write()`]: struct.RwLock.html#method.write
-/// [`try_write()`]: struct.RwLock.html#method.try_write
+/// [`write`]: struct.RwLock.html#method.write
+/// [`try_write`]: struct.RwLock.html#method.try_write
/// [`RwLock`]: struct.RwLock.html
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
// `None`.
(*ptr).dtor_running.set(true);
- // The OSX implementation of TLS apparently had an odd aspect to it
+ // The macOS implementation of TLS apparently had an odd aspect to it
// where the pointer we have may be overwritten while this destructor
// is running. Specifically if a TLS destructor re-accesses TLS it may
// trigger a re-initialization of all TLS variables, paving over at
// least some destroyed ones with initial values.
//
- // This means that if we drop a TLS value in place on OSX that we could
+ // This means that if we drop a TLS value in place on macOS that we could
// revert the value to its original state halfway through the
// destructor, which would be bad!
//
- // Hence, we use `ptr::read` on OSX (to move to a "safe" location)
+ // Hence, we use `ptr::read` on macOS (to move to a "safe" location)
// instead of drop_in_place.
if cfg!(target_os = "macos") {
ptr::read((*ptr).inner.get());
self.inner.reserve_exact(additional)
}
+ #[inline]
+ pub fn shrink_to_fit(&mut self) {
+ self.inner.shrink_to_fit()
+ }
+
pub fn as_slice(&self) -> &Slice {
unsafe { mem::transmute(&*self.inner) }
}
pub fn into_box(self) -> Box<Slice> {
unsafe { mem::transmute(self.inner.into_boxed_slice()) }
}
+
+ #[inline]
+ pub fn from_box(boxed: Box<Slice>) -> Buf {
+ let inner: Box<[u8]> = unsafe { mem::transmute(boxed) };
+ Buf { inner: inner.into_vec() }
+ }
}
impl Slice {
// mutex, and then after the fork they unlock it.
//
// Despite this information, libnative's spawn has been witnessed to
- // deadlock on both OSX and FreeBSD. I'm not entirely sure why, but
+ // deadlock on both macOS and FreeBSD. I'm not entirely sure why, but
// all collected backtraces point at malloc/free traffic in the
// child spawned process.
//
/// Some methods of getting a backtrace:
///
/// * The backtrace() functions on unix. It turns out this doesn't work very
-/// well for green threads on OSX, and the address to symbol portion of it
+/// well for green threads on macOS, and the address to symbol portion of it
/// suffers problems that are described below.
///
/// * Using libunwind. This is more difficult than it sounds because libunwind
///
/// * Use dladdr(). The original backtrace()-based idea actually uses dladdr()
/// behind the scenes to translate, and this is why backtrace() was not used.
-/// Conveniently, this method works fantastically on OSX. It appears dladdr()
+/// Conveniently, this method works fantastically on macOS. It appears dladdr()
/// uses magic to consult the local symbol table, or we're putting everything
-/// in the dynamic symbol table anyway. Regardless, for OSX, this is the
+/// in the dynamic symbol table anyway. Regardless, for macOS, this is the
/// method used for translation. It's provided by the system and easy to do.o
///
/// Sadly, all other systems have a dladdr() implementation that does not
/// * Use `libbacktrace`. It turns out that this is a small library bundled in
/// the gcc repository which provides backtrace and symbol translation
/// functionality. All we really need from it is the backtrace functionality,
-/// and we only really need this on everything that's not OSX, so this is the
+/// and we only really need this on everything that's not macOS, so this is the
/// chosen route for now.
///
/// In summary, the current situation uses libgcc_s to get a trace of stack
let len = self.len as usize - sun_path_offset();
let path = unsafe { mem::transmute::<&[libc::c_char], &[u8]>(&self.addr.sun_path) };
- // OSX seems to return a len of 16 and a zeroed sun_path for unnamed addresses
+ // macOS seems to return a len of 16 and a zeroed sun_path for unnamed addresses
if len == 0 || (cfg!(not(target_os = "linux")) && self.addr.sun_path[0] == 0) {
AddressKind::Unnamed
} else if self.addr.sun_path[0] == 0 {
/// Sets the read timeout for the socket.
///
- /// If the provided value is [`None`], then [`read()`] calls will block
+ /// If the provided value is [`None`], then [`read`] calls will block
/// indefinitely. It is an error to pass the zero [`Duration`] to this
/// method.
///
/// [`None`]: ../../../../std/option/enum.Option.html#variant.None
- /// [`read()`]: ../../../../std/io/trait.Read.html#tymethod.read
+ /// [`read`]: ../../../../std/io/trait.Read.html#tymethod.read
/// [`Duration`]: ../../../../std/time/struct.Duration.html
///
/// # Examples
/// Sets the write timeout for the socket.
///
- /// If the provided value is [`None`], then [`write()`] calls will block
+ /// If the provided value is [`None`], then [`write`] calls will block
/// indefinitely. It is an error to pass the zero [`Duration`] to this
/// method.
///
/// [`None`]: ../../../../std/option/enum.Option.html#variant.None
- /// [`read()`]: ../../../../std/io/trait.Write.html#tymethod.write
+ /// [`read`]: ../../../../std/io/trait.Write.html#tymethod.write
/// [`Duration`]: ../../../../std/time/struct.Duration.html
///
/// # Examples
/// Connects the socket to the specified address.
///
- /// The [`send()`] method may be used to send data to the specified address.
- /// [`recv()`] and [`recv_from()`] will only receive data from that address.
+ /// The [`send`] method may be used to send data to the specified address.
+ /// [`recv`] and [`recv_from`] will only receive data from that address.
///
- /// [`send()`]: #method.send
- /// [`recv()`]: #method.recv
- /// [`recv_from()`]: #method.recv_from
+ /// [`send`]: #method.send
+ /// [`recv`]: #method.recv
+ /// [`recv_from`]: #method.recv_from
///
/// # Examples
///
/// Returns the address of this socket's peer.
///
- /// The [`connect()`] method will connect the socket to a peer.
+ /// The [`connect`] method will connect the socket to a peer.
///
- /// [`connect()`]: #method.connect
+ /// [`connect`]: #method.connect
///
/// # Examples
///
/// Sets the read timeout for the socket.
///
- /// If the provided value is [`None`], then [`recv()`] and [`recv_from()`] calls will
+ /// If the provided value is [`None`], then [`recv`] and [`recv_from`] calls will
/// block indefinitely. It is an error to pass the zero [`Duration`] to this
/// method.
///
/// [`None`]: ../../../../std/option/enum.Option.html#variant.None
- /// [`recv()`]: #method.recv
- /// [`recv_from()`]: #method.recv_from
+ /// [`recv`]: #method.recv
+ /// [`recv_from`]: #method.recv_from
/// [`Duration`]: ../../../../std/time/struct.Duration.html
///
/// # Examples
/// Sets the write timeout for the socket.
///
- /// If the provided value is [`None`], then [`send()`] and [`send_to()`] calls will
+ /// If the provided value is [`None`], then [`send`] and [`send_to`] calls will
/// block indefinitely. It is an error to pass the zero [`Duration`] to this
/// method.
///
/// [`None`]: ../../../../std/option/enum.Option.html#variant.None
- /// [`send()`]: #method.send
- /// [`send_to()`]: #method.send_to
+ /// [`send`]: #method.send
+ /// [`send_to`]: #method.send_to
/// [`Duration`]: ../../../../std/time/struct.Duration.html
///
/// # Examples
register_dtor_fallback(t, dtor);
}
-// OSX's analog of the above linux function is this _tlv_atexit function.
+// macOS's analog of the above linux function is this _tlv_atexit function.
// The disassembly of thread_local globals in C++ (at least produced by
// clang) will have this show up in the output.
#[cfg(target_os = "macos")]
// `None`.
(*ptr).dtor_running.set(true);
- // The OSX implementation of TLS apparently had an odd aspect to it
+ // The macOS implementation of TLS apparently had an odd aspect to it
// where the pointer we have may be overwritten while this destructor
// is running. Specifically if a TLS destructor re-accesses TLS it may
// trigger a re-initialization of all TLS variables, paving over at
// least some destroyed ones with initial values.
//
- // This means that if we drop a TLS value in place on OSX that we could
+ // This means that if we drop a TLS value in place on macOS that we could
// revert the value to its original state halfway through the
// destructor, which would be bad!
//
- // Hence, we use `ptr::read` on OSX (to move to a "safe" location)
+ // Hence, we use `ptr::read` on macOS (to move to a "safe" location)
// instead of drop_in_place.
if cfg!(target_os = "macos") {
ptr::read((*ptr).inner.get());
// with the man page quoting that if the count of bytes to read is
// greater than `SSIZE_MAX` the result is "unspecified".
//
- // On OSX, however, apparently the 64-bit libc is either buggy or
+ // On macOS, however, apparently the 64-bit libc is either buggy or
// intentionally showing odd behavior by rejecting any read with a size
// larger than or equal to INT_MAX. To handle both of these the read
// size is capped on both platforms.
// Linux kernel then the flag is just ignored by the OS, so we continue
// to explicitly ask for a CLOEXEC fd here.
//
- // The CLOEXEC flag, however, is supported on versions of OSX/BSD/etc
+ // The CLOEXEC flag, however, is supported on versions of macOS/BSD/etc
// that we support, so we only do this on Linux currently.
if cfg!(target_os = "linux") {
fd.set_cloexec()?;
#[cfg(target_os = "macos")]
fn get_path(fd: c_int) -> Option<PathBuf> {
// FIXME: The use of PATH_MAX is generally not encouraged, but it
- // is inevitable in this case because OS X defines `fcntl` with
+ // is inevitable in this case because macOS defines `fcntl` with
// `F_GETPATH` in terms of `MAXPATHLEN`, and there are no
// alternatives. If a better method is invented, it should be used
// instead.
self.inner.reserve_exact(additional)
}
+ #[inline]
+ pub fn shrink_to_fit(&mut self) {
+ self.inner.shrink_to_fit()
+ }
+
pub fn as_slice(&self) -> &Slice {
unsafe { mem::transmute(&*self.inner) }
}
pub fn into_box(self) -> Box<Slice> {
unsafe { mem::transmute(self.inner.into_boxed_slice()) }
}
+
+ #[inline]
+ pub fn from_box(boxed: Box<Slice>) -> Buf {
+ let inner: Box<[u8]> = unsafe { mem::transmute(boxed) };
+ Buf { inner: inner.into_vec() }
+ }
}
impl Slice {
}
// See #14232 for more information, but it appears that signal delivery to a
- // newly spawned process may just be raced in the OSX, so to prevent this
- // test from being flaky we ignore it on OSX.
+ // newly spawned process may just be raced in the macOS, so to prevent this
+ // test from being flaky we ignore it on macOS.
#[test]
#[cfg_attr(target_os = "macos", ignore)]
#[cfg_attr(target_os = "nacl", ignore)] // no signals on NaCl.
// mutex, and then after the fork they unlock it.
//
// Despite this information, libnative's spawn has been witnessed to
- // deadlock on both OSX and FreeBSD. I'm not entirely sure why, but
+ // deadlock on both macOS and FreeBSD. I'm not entirely sure why, but
// all collected backtraces point at malloc/free traffic in the
// child spawned process.
//
let stack = libc::stack_t {
ss_sp: ptr::null_mut(),
ss_flags: SS_DISABLE,
- // Workaround for bug in MacOS implementation of sigaltstack
+ // Workaround for bug in macOS implementation of sigaltstack
// UNIX2003 which returns ENOMEM when disabling a stack while
// passing ss_size smaller than MINSIGSTKSZ. According to POSIX
// both ss_sp and ss_size should be ignored in this case.
self.inner.reserve_exact(additional)
}
+ pub fn shrink_to_fit(&mut self) {
+ self.inner.shrink_to_fit()
+ }
+
#[inline]
pub fn into_box(self) -> Box<Slice> {
unsafe { mem::transmute(self.inner.into_box()) }
}
+
+ #[inline]
+ pub fn from_box(boxed: Box<Slice>) -> Buf {
+ let inner: Box<Wtf8> = unsafe { mem::transmute(boxed) };
+ Buf { inner: Wtf8Buf::from_box(inner) }
+ }
}
impl Slice {
msg: *const libc::c_char,
errnum: libc::c_int);
enum backtrace_state {}
-#[link(name = "backtrace", kind = "static")]
-#[cfg(all(not(test), not(cargobuild)))]
-extern {}
extern {
fn backtrace_create_state(filename: *const libc::c_char,
self.bytes.reserve_exact(additional)
}
+ #[inline]
+ pub fn shrink_to_fit(&mut self) {
+ self.bytes.shrink_to_fit()
+ }
+
/// Returns the number of bytes that this string buffer can hold without reallocating.
#[inline]
pub fn capacity(&self) -> usize {
pub fn into_box(self) -> Box<Wtf8> {
unsafe { mem::transmute(self.bytes.into_boxed_slice()) }
}
+
+ /// Converts a `Box<Wtf8>` into a `Wtf8Buf`.
+ pub fn from_box(boxed: Box<Wtf8>) -> Wtf8Buf {
+ let bytes: Box<[u8]> = unsafe { mem::transmute(boxed) };
+ Wtf8Buf { bytes: bytes.into_vec() }
+ }
}
/// Create a new WTF-8 string from an iterator of code points.
/// destroyed, but not all platforms have this guard. Those platforms that do
/// not guard typically have a synthetic limit after which point no more
/// destructors are run.
-/// 3. On OSX, initializing TLS during destruction of other TLS slots can
+/// 3. On macOS, initializing TLS during destruction of other TLS slots can
/// sometimes cancel *all* destructors for the current thread, whether or not
/// the slots have already had their destructors run or not.
#[stable(feature = "rust1", since = "1.0.0")]
}
// Note that this test will deadlock if TLS destructors aren't run (this
- // requires the destructor to be run to pass the test). OSX has a known bug
+ // requires the destructor to be run to pass the test). macOS has a known bug
// where dtors-in-dtors may cancel other destructors, so we just ignore this
- // test on OSX.
+ // test on macOS.
#[test]
#[cfg_attr(target_os = "macos", ignore)]
fn dtors_in_dtors_in_dtors() {
//! two ways:
//!
//! * By spawning a new thread, e.g. using the [`thread::spawn`][`spawn`]
-//! function, and calling [`thread()`] on the [`JoinHandle`].
-//! * By requesting the current thread, using the [`thread::current()`] function.
+//! function, and calling [`thread`] on the [`JoinHandle`].
+//! * By requesting the current thread, using the [`thread::current`] function.
//!
-//! The [`thread::current()`] function is available even for threads not spawned
+//! The [`thread::current`] function is available even for threads not spawned
//! by the APIs of this module.
//!
//! ## Blocking support: park and unpark
//!
//! Every thread is equipped with some basic low-level blocking support, via the
-//! [`thread::park()`][`park()`] function and [`thread::Thread::unpark()`][`unpark()`]
-//! method. [`park()`] blocks the current thread, which can then be resumed from
-//! another thread by calling the [`unpark()`] method on the blocked thread's handle.
+//! [`thread::park`][`park`] function and [`thread::Thread::unpark()`][`unpark`]
+//! method. [`park`] blocks the current thread, which can then be resumed from
+//! another thread by calling the [`unpark`] method on the blocked thread's handle.
//!
//! Conceptually, each [`Thread`] handle has an associated token, which is
//! initially not present:
//!
-//! * The [`thread::park()`][`park()`] function blocks the current thread unless or until
+//! * The [`thread::park`][`park`] function blocks the current thread unless or until
//! the token is available for its thread handle, at which point it atomically
//! consumes the token. It may also return *spuriously*, without consuming the
-//! token. [`thread::park_timeout()`] does the same, but allows specifying a
+//! token. [`thread::park_timeout`] does the same, but allows specifying a
//! maximum time to block the thread for.
//!
-//! * The [`unpark()`] method on a [`Thread`] atomically makes the token available
+//! * The [`unpark`] method on a [`Thread`] atomically makes the token available
//! if it wasn't already.
//!
//! In other words, each [`Thread`] acts a bit like a semaphore with initial count
//! The API is typically used by acquiring a handle to the current thread,
//! placing that handle in a shared data structure so that other threads can
//! find it, and then `park`ing. When some desired condition is met, another
-//! thread calls [`unpark()`] on the handle.
+//! thread calls [`unpark`] on the handle.
//!
//! The motivation for this design is twofold:
//!
//! [`Arc`]: ../../std/sync/struct.Arc.html
//! [`spawn`]: ../../std/thread/fn.spawn.html
//! [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
-//! [`thread()`]: ../../std/thread/struct.JoinHandle.html#method.thread
+//! [`thread`]: ../../std/thread/struct.JoinHandle.html#method.thread
//! [`join`]: ../../std/thread/struct.JoinHandle.html#method.join
//! [`Result`]: ../../std/result/enum.Result.html
//! [`Ok`]: ../../std/result/enum.Result.html#variant.Ok
//! [`Err`]: ../../std/result/enum.Result.html#variant.Err
//! [`panic!`]: ../../std/macro.panic.html
//! [`Builder`]: ../../std/thread/struct.Builder.html
-//! [`thread::current()`]: ../../std/thread/fn.spawn.html
+//! [`thread::current`]: ../../std/thread/fn.spawn.html
//! [`Thread`]: ../../std/thread/struct.Thread.html
-//! [`park()`]: ../../std/thread/fn.park.html
-//! [`unpark()`]: ../../std/thread/struct.Thread.html#method.unpark
-//! [`thread::park_timeout()`]: ../../std/thread/fn.park_timeout.html
+//! [`park`]: ../../std/thread/fn.park.html
+//! [`unpark`]: ../../std/thread/struct.Thread.html#method.unpark
+//! [`thread::park_timeout`]: ../../std/thread/fn.park_timeout.html
//! [`Cell`]: ../cell/struct.Cell.html
//! [`RefCell`]: ../cell/struct.RefCell.html
//! [`thread_local!`]: ../macro.thread_local.html
/// Blocks unless or until the current thread's token is made available.
///
/// Every thread is equipped with some basic low-level blocking support, via
-/// the `park()` function and the [`unpark()`][unpark] method. These can be
+/// the `park()` function and the [`unpark`][unpark] method. These can be
/// used as a more CPU-efficient implementation of a spinlock.
///
/// [unpark]: struct.Thread.html#method.unpark
/// Returns an iterator that yields the lowercase equivalent of a `char`.
///
-/// This `struct` is created by the [`to_lowercase()`] method on [`char`]. See
+/// This `struct` is created by the [`to_lowercase`] method on [`char`]. See
/// its documentation for more.
///
-/// [`to_lowercase()`]: ../../std/primitive.char.html#method.to_lowercase
+/// [`to_lowercase`]: ../../std/primitive.char.html#method.to_lowercase
/// [`char`]: ../../std/primitive.char.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToLowercase(CaseMappingIter);
/// Returns an iterator that yields the uppercase equivalent of a `char`.
///
-/// This `struct` is created by the [`to_uppercase()`] method on [`char`]. See
+/// This `struct` is created by the [`to_uppercase`] method on [`char`]. See
/// its documentation for more.
///
-/// [`to_uppercase()`]: ../../std/primitive.char.html#method.to_uppercase
+/// [`to_uppercase`]: ../../std/primitive.char.html#method.to_uppercase
/// [`char`]: ../../std/primitive.char.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ToUppercase(CaseMappingIter);
/// * `a-z`
/// * `A-Z`
///
- /// For a more comprehensive understanding of 'digit', see [`is_numeric()`][is_numeric].
+ /// For a more comprehensive understanding of 'digit', see [`is_numeric`][is_numeric].
///
/// [is_numeric]: #method.is_numeric
///
/// Returns the number of 16-bit code units this `char` would need if
/// encoded in UTF-16.
///
- /// See the documentation for [`len_utf8()`] for more explanation of this
+ /// See the documentation for [`len_utf8`] for more explanation of this
/// concept. This function is a mirror, but for UTF-16 instead of UTF-8.
///
- /// [`len_utf8()`]: #method.len_utf8
+ /// [`len_utf8`]: #method.len_utf8
///
/// # Examples
///
pub fn from_ident(s: Span, identifier: Ident) -> Path {
Path {
span: s,
- segments: vec![identifier.into()],
+ segments: vec![PathSegment::from_ident(identifier, s)],
}
}
pub struct PathSegment {
/// The identifier portion of this path segment.
pub identifier: Ident,
+ /// Span of the segment identifier.
+ pub span: Span,
/// Type/lifetime parameters attached to this path. They come in
/// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that
pub parameters: Option<P<PathParameters>>,
}
-impl From<Ident> for PathSegment {
- fn from(id: Ident) -> Self {
- PathSegment { identifier: id, parameters: None }
- }
-}
-
impl PathSegment {
+ pub fn from_ident(ident: Ident, span: Span) -> Self {
+ PathSegment { identifier: ident, span: span, parameters: None }
+ }
pub fn crate_root() -> Self {
PathSegment {
identifier: keywords::CrateRoot.ident(),
+ span: DUMMY_SP,
parameters: None,
}
}
pub module: Mod,
pub attrs: Vec<Attribute>,
pub span: Span,
- pub exported_macros: Vec<MacroDef>,
}
/// A spanned compile-time attribute list item.
Closure(CaptureBy, P<FnDecl>, P<Expr>, Span),
/// A block (`{ ... }`)
Block(P<Block>),
+ /// A catch block (`catch { ... }`)
+ Catch(P<Block>),
/// An assignment (`a = foo()`)
Assign(P<Expr>, P<Expr>),
Option<TraitRef>, // (optional) trait this impl implements
P<Ty>, // self
Vec<ImplItem>),
- /// A macro invocation (which includes macro definition).
+ /// A macro invocation.
///
/// E.g. `macro_rules! foo { .. }` or `foo!(..)`
Mac(Mac),
+
+ /// A macro definition.
+ MacroDef(ThinTokenStream),
}
impl ItemKind {
ItemKind::Union(..) => "union",
ItemKind::Trait(..) => "trait",
ItemKind::Mac(..) |
+ ItemKind::MacroDef(..) |
ItemKind::Impl(..) |
ItemKind::DefaultImpl(..) => "item"
}
}
}
-/// A macro definition, in this crate or imported from another.
-///
-/// Not parsed directly, but created on macro import or `macro_rules!` expansion.
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
-pub struct MacroDef {
- pub ident: Ident,
- pub attrs: Vec<Attribute>,
- pub id: NodeId,
- pub span: Span,
- pub body: ThinTokenStream,
-}
-
-impl MacroDef {
- pub fn stream(&self) -> TokenStream {
- self.body.clone().into()
- }
-}
-
#[cfg(test)]
mod tests {
use serialize;
use codemap::{self, CodeMap, ExpnInfo, Spanned, respan};
use syntax_pos::{Span, ExpnId, NO_EXPANSION};
use errors::{DiagnosticBuilder, FatalError};
-use ext::expand::{self, Expansion};
+use ext::expand::{self, Expansion, Invocation};
use ext::hygiene::Mark;
use fold::{self, Folder};
use parse::{self, parser, DirectoryOwnership};
fn is_whitelisted_legacy_custom_derive(&self, name: Name) -> bool;
fn visit_expansion(&mut self, mark: Mark, expansion: &Expansion, derives: &[Mark]);
- fn add_ext(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>);
- fn add_expansions_at_stmt(&mut self, id: ast::NodeId, macros: Vec<Mark>);
+ fn add_builtin(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>);
fn resolve_imports(&mut self);
// Resolves attribute and derive legacy macros from `#![plugin(..)]`.
fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec<Attribute>) -> Option<Attribute>;
- fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind,
- force: bool) -> Result<Rc<SyntaxExtension>, Determinacy>;
+ fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
+ -> Result<Option<Rc<SyntaxExtension>>, Determinacy>;
+ fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
+ -> Result<Rc<SyntaxExtension>, Determinacy>;
}
#[derive(Copy, Clone, Debug)]
fn is_whitelisted_legacy_custom_derive(&self, _name: Name) -> bool { false }
fn visit_expansion(&mut self, _invoc: Mark, _expansion: &Expansion, _derives: &[Mark]) {}
- fn add_ext(&mut self, _ident: ast::Ident, _ext: Rc<SyntaxExtension>) {}
- fn add_expansions_at_stmt(&mut self, _id: ast::NodeId, _macros: Vec<Mark>) {}
+ fn add_builtin(&mut self, _ident: ast::Ident, _ext: Rc<SyntaxExtension>) {}
fn resolve_imports(&mut self) {}
fn find_legacy_attr_invoc(&mut self, _attrs: &mut Vec<Attribute>) -> Option<Attribute> { None }
+ fn resolve_invoc(&mut self, _invoc: &mut Invocation, _scope: Mark, _force: bool)
+ -> Result<Option<Rc<SyntaxExtension>>, Determinacy> {
+ Err(Determinacy::Determined)
+ }
fn resolve_macro(&mut self, _scope: Mark, _path: &ast::Path, _kind: MacroKind,
_force: bool) -> Result<Rc<SyntaxExtension>, Determinacy> {
Err(Determinacy::Determined)
fn qpath(&self, self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::Ident)
+ ident: ast::SpannedIdent)
-> (ast::QSelf, ast::Path);
fn qpath_all(&self, self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::Ident,
+ ident: ast::SpannedIdent,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>>,
bindings: Vec<ast::TypeBinding>)
segments.push(ast::PathSegment::crate_root());
}
- segments.extend(idents.into_iter().map(Into::into));
+ segments.extend(idents.into_iter().map(|i| ast::PathSegment::from_ident(i, sp)));
let parameters = if lifetimes.is_empty() && types.is_empty() && bindings.is_empty() {
None
} else {
bindings: bindings,
})))
};
- segments.push(ast::PathSegment { identifier: last_identifier, parameters: parameters });
+ segments.push(ast::PathSegment {
+ identifier: last_identifier,
+ span: sp,
+ parameters: parameters
+ });
ast::Path {
span: sp,
segments: segments,
fn qpath(&self,
self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::Ident)
+ ident: ast::SpannedIdent)
-> (ast::QSelf, ast::Path) {
self.qpath_all(self_type, trait_path, ident, vec![], vec![], vec![])
}
fn qpath_all(&self,
self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::Ident,
+ ident: ast::SpannedIdent,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>>,
bindings: Vec<ast::TypeBinding>)
bindings: bindings,
};
path.segments.push(ast::PathSegment {
- identifier: ident,
+ identifier: ident.node,
+ span: ident.span,
parameters: Some(P(ast::PathParameters::AngleBracketed(parameters))),
});
pub struct Invocation {
pub kind: InvocationKind,
expansion_kind: ExpansionKind,
- expansion_data: ExpansionData,
+ pub expansion_data: ExpansionData,
}
pub enum InvocationKind {
let scope =
if self.monotonic { invoc.expansion_data.mark } else { orig_expansion_data.mark };
- let ext = match self.resolve_invoc(&mut invoc, scope, force) {
+ let ext = match self.cx.resolver.resolve_invoc(&mut invoc, scope, force) {
Ok(ext) => Some(ext),
Err(Determinacy::Determined) => None,
Err(Determinacy::Undetermined) => {
result
}
- fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
- -> Result<Option<Rc<SyntaxExtension>>, Determinacy> {
- let (attr, traits, item) = match invoc.kind {
- InvocationKind::Bang { ref mac, .. } => {
- return self.cx.resolver.resolve_macro(scope, &mac.node.path,
- MacroKind::Bang, force).map(Some);
- }
- InvocationKind::Attr { attr: None, .. } => return Ok(None),
- InvocationKind::Derive { name, span, .. } => {
- let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
- return self.cx.resolver.resolve_macro(scope, &path,
- MacroKind::Derive, force).map(Some)
- }
- InvocationKind::Attr { ref mut attr, ref traits, ref mut item } => (attr, traits, item),
- };
-
- let (attr_name, path) = {
- let attr = attr.as_ref().unwrap();
- (attr.name(), ast::Path::from_ident(attr.span, Ident::with_empty_ctxt(attr.name())))
- };
-
- let mut determined = true;
- match self.cx.resolver.resolve_macro(scope, &path, MacroKind::Attr, force) {
- Ok(ext) => return Ok(Some(ext)),
- Err(Determinacy::Undetermined) => determined = false,
- Err(Determinacy::Determined) if force => return Err(Determinacy::Determined),
- _ => {}
- }
-
- for &(name, span) in traits {
- let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
- match self.cx.resolver.resolve_macro(scope, &path, MacroKind::Derive, force) {
- Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext {
- if inert_attrs.contains(&attr_name) {
- // FIXME(jseyfried) Avoid `mem::replace` here.
- let dummy_item = placeholder(ExpansionKind::Items, ast::DUMMY_NODE_ID)
- .make_items().pop().unwrap();
- *item = mem::replace(item, Annotatable::Item(dummy_item))
- .map_attrs(|mut attrs| {
- let inert_attr = attr.take().unwrap();
- attr::mark_known(&inert_attr);
- if self.cx.ecfg.proc_macro_enabled() {
- *attr = find_attr_invoc(&mut attrs);
- }
- attrs.push(inert_attr);
- attrs
- });
- }
- return Err(Determinacy::Undetermined);
- },
- Err(Determinacy::Undetermined) => determined = false,
- Err(Determinacy::Determined) => {}
- }
- }
-
- Err(if determined { Determinacy::Determined } else { Determinacy::Undetermined })
- }
-
fn expand_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) -> Expansion {
match invoc.kind {
InvocationKind::Bang { .. } => self.expand_bang_invoc(invoc, ext),
let attr_toks = stream_for_attr_args(&attr, &self.cx.parse_sess);
let item_toks = stream_for_item(&item, &self.cx.parse_sess);
+ let span = Span {
+ expn_id: self.cx.codemap().record_expansion(ExpnInfo {
+ call_site: attr.span,
+ callee: NameAndSpan {
+ format: MacroAttribute(name),
+ span: None,
+ allow_internal_unstable: false,
+ },
+ }),
+ ..attr.span
+ };
+
let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
- self.parse_expansion(tok_result, kind, name, attr.span)
+ self.parse_expansion(tok_result, kind, name, span)
}
SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => {
self.cx.span_err(attr.span, &format!("`{}` is a derive mode", name));
let extname = path.segments.last().unwrap().identifier.name;
let ident = ident.unwrap_or(keywords::Invalid.ident());
- let marked_tts = mark_tts(mac.node.stream(), mark);
+ let marked_tts =
+ noop_fold_tts(mac.node.stream(), &mut Marker { mark: mark, expn_id: None });
let opt_expanded = match *ext {
NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
if ident.name != keywords::Invalid.name() {
}
}
-fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
+pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
for i in 0 .. attrs.len() {
if !attr::is_known(&attrs[i]) && !is_builtin_attr(&attrs[i]) {
return Some(attrs.remove(i));
match item.node {
ast::ItemKind::Mac(..) => {
self.check_attributes(&item.attrs);
- let is_macro_def = if let ItemKind::Mac(ref mac) = item.node {
- mac.node.path.segments[0].identifier.name == "macro_rules"
- } else {
- unreachable!()
- };
-
- item.and_then(|mut item| match item.node {
- ItemKind::Mac(_) if is_macro_def => {
- item.id = Mark::fresh().as_placeholder_id();
- SmallVector::one(P(item))
- }
+ item.and_then(|item| match item.node {
ItemKind::Mac(mac) => {
self.collect(ExpansionKind::Items, InvocationKind::Bang {
mac: mac,
}
fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind {
- noop_fold_item_kind(self.cfg.configure_item_kind(item), self)
+ match item {
+ ast::ItemKind::MacroDef(..) => item,
+ _ => noop_fold_item_kind(self.cfg.configure_item_kind(item), self),
+ }
}
fn new_id(&mut self, id: ast::NodeId) -> ast::NodeId {
span
}
}
-
-// apply a given mark to the given token trees. Used prior to expansion of a macro.
-pub fn mark_tts(tts: TokenStream, m: Mark) -> TokenStream {
- noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
-}
}
/// A mark is a unique id associated with a macro expansion.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default, RustcEncodable, RustcDecodable)]
pub struct Mark(u32);
impl Mark {
})
})
}
-
- /// If `ident` is macro expanded, return the source ident from the macro definition
- /// and the mark of the expansion that created the macro definition.
- pub fn source(self) -> (Self /* source context */, Mark /* source macro */) {
- let macro_def_ctxt = self.data().prev_ctxt.data();
- (macro_def_ctxt.prev_ctxt, macro_def_ctxt.outer_mark)
- }
}
impl fmt::Debug for SyntaxContext {
use util::small_vector::SmallVector;
use std::collections::HashMap;
-use std::mem;
pub fn placeholder(kind: ExpansionKind, id: ast::NodeId) -> Expansion {
fn mac_placeholder() -> ast::Mac {
fn fold_block(&mut self, block: P<ast::Block>) -> P<ast::Block> {
noop_fold_block(block, self).map(|mut block| {
- let mut macros = Vec::new();
let mut remaining_stmts = block.stmts.len();
block.stmts = block.stmts.move_flat_map(|mut stmt| {
remaining_stmts -= 1;
- // `macro_rules!` macro definition
- if let ast::StmtKind::Item(ref item) = stmt.node {
- if let ast::ItemKind::Mac(_) = item.node {
- macros.push(Mark::from_placeholder_id(item.id));
- return None;
- }
- }
-
match stmt.node {
// Avoid wasting a node id on a trailing expression statement,
// which shares a HIR node with the expression itself.
_ => {}
}
- if self.monotonic && !macros.is_empty() {
- let macros = mem::replace(&mut macros, Vec::new());
- self.cx.resolver.add_expansions_at_stmt(stmt.id, macros);
- }
-
Some(stmt)
});
// Holy self-referential!
/// Converts a `macro_rules!` invocation into a syntax extension.
-pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
+pub fn compile(sess: &ParseSess, def: &ast::Item) -> SyntaxExtension {
let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
];
// Parse the macro_rules! invocation
- let argument_map = match parse(sess, def.body.clone().into(), &argument_gram, None) {
+ let body = match def.node {
+ ast::ItemKind::MacroDef(ref body) => body.clone().into(),
+ _ => unreachable!(),
+ };
+ let argument_map = match parse(sess, body, &argument_gram, None) {
Success(m) => m,
Failure(sp, tok) => {
let s = parse_failure_msg(tok);
// `extern "x86-interrupt" fn()`
(active, abi_x86_interrupt, "1.17.0", Some(40180)),
+
+ // Allows the `catch {...}` expression
+ (active, catch_expr, "1.17.0", Some(31436)),
);
declare_features! (
}
}
}
+ ast::ExprKind::Catch(_) => {
+ gate_feature_post!(&self, catch_expr, e.span, "`catch` expression is experimental");
+ }
_ => {}
}
visit::walk_expr(self, e);
pub fn noop_fold_path<T: Folder>(Path { segments, span }: Path, fld: &mut T) -> Path {
Path {
- segments: segments.move_map(|PathSegment {identifier, parameters}| PathSegment {
+ segments: segments.move_map(|PathSegment {identifier, span, parameters}| PathSegment {
identifier: fld.fold_ident(identifier),
+ span: fld.new_span(span),
parameters: parameters.map(|ps| ps.map(|ps| fld.fold_path_parameters(ps))),
}),
span: fld.new_span(span)
items.move_flat_map(|item| folder.fold_trait_item(item)),
),
ItemKind::Mac(m) => ItemKind::Mac(folder.fold_mac(m)),
+ ItemKind::MacroDef(tts) => ItemKind::MacroDef(folder.fold_tts(tts.into()).into()),
}
}
}
}
-pub fn noop_fold_crate<T: Folder>(Crate {module, attrs, mut exported_macros, span}: Crate,
+pub fn noop_fold_crate<T: Folder>(Crate {module, attrs, span}: Crate,
folder: &mut T) -> Crate {
let mut items = folder.fold_item(P(ast::Item {
ident: keywords::Invalid.ident(),
}, vec![], span)
};
- for def in &mut exported_macros {
- def.id = folder.new_id(def.id);
- }
-
Crate {
module: module,
attrs: attrs,
- exported_macros: exported_macros,
span: span,
}
}
};
}
ExprKind::Try(ex) => ExprKind::Try(folder.fold_expr(ex)),
+ ExprKind::Catch(body) => ExprKind::Catch(folder.fold_block(body)),
},
id: folder.new_id(id),
span: folder.new_span(span),
matches_codepattern,
"matches_codepattern",
pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
- "zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string());
+ "macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string());
}
}
Span {lo: BytePos(a), hi: BytePos(b), expn_id: NO_EXPANSION}
}
+ fn str2seg(s: &str, lo: u32, hi: u32) -> ast::PathSegment {
+ ast::PathSegment::from_ident(Ident::from_str(s), sp(lo, hi))
+ }
+
#[test] fn path_exprs_1() {
assert!(string_to_expr("a".to_string()) ==
P(ast::Expr{
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
span: sp(0, 1),
- segments: vec![Ident::from_str("a").into()],
+ segments: vec![str2seg("a", 0, 1)],
}),
span: sp(0, 1),
attrs: ThinVec::new(),
node: ast::ExprKind::Path(None, ast::Path {
span: sp(0, 6),
segments: vec![ast::PathSegment::crate_root(),
- Ident::from_str("a").into(),
- Ident::from_str("b").into()]
+ str2seg("a", 2, 3),
+ str2seg("b", 5, 6)]
}),
span: sp(0, 6),
attrs: ThinVec::new(),
id: ast::DUMMY_NODE_ID,
node:ast::ExprKind::Path(None, ast::Path{
span: sp(7, 8),
- segments: vec![Ident::from_str("d").into()],
+ segments: vec![str2seg("d", 7, 8)],
}),
span:sp(7,8),
attrs: ThinVec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
span:sp(0,1),
- segments: vec![Ident::from_str("b").into()],
+ segments: vec![str2seg("b", 0, 1)],
}),
span: sp(0,1),
attrs: ThinVec::new()})),
ty: P(ast::Ty{id: ast::DUMMY_NODE_ID,
node: ast::TyKind::Path(None, ast::Path{
span:sp(10,13),
- segments: vec![Ident::from_str("i32").into()],
+ segments: vec![str2seg("i32", 10, 13)],
}),
span:sp(10,13)
}),
node: ast::ExprKind::Path(None,
ast::Path{
span:sp(17,18),
- segments: vec![Ident::from_str("b").into()],
+ segments: vec![str2seg("b", 17, 18)],
}),
span: sp(17,18),
attrs: ThinVec::new()})),
use ast::MacStmtStyle;
use ast::Mac_;
use ast::{MutTy, Mutability};
-use ast::{Pat, PatKind};
+use ast::{Pat, PatKind, PathSegment};
use ast::{PolyTraitRef, QSelf};
use ast::{Stmt, StmtKind};
use ast::{VariantData, StructField};
self.expected_tokens.clear();
}
- pub fn look_ahead<R, F>(&mut self, dist: usize, f: F) -> R where
+ pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
F: FnOnce(&token::Token) -> R,
{
if dist == 0 {
};
if is_global {
- segments.insert(0, ast::PathSegment::crate_root());
+ segments.insert(0, PathSegment::crate_root());
}
// Assemble the span.
/// - `a::b<T,U>::c<V,W>`
/// - `a::b<T,U>::c(V) -> W`
/// - `a::b<T,U>::c(V)`
- pub fn parse_path_segments_without_colons(&mut self) -> PResult<'a, Vec<ast::PathSegment>> {
+ pub fn parse_path_segments_without_colons(&mut self) -> PResult<'a, Vec<PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let identifier = self.parse_path_segment_ident()?;
+ let ident_span = self.prev_span;
if self.check(&token::ModSep) && self.look_ahead(1, |t| *t == token::Lt) {
self.bump();
};
// Assemble and push the result.
- segments.push(ast::PathSegment { identifier: identifier, parameters: parameters });
+ segments.push(PathSegment {
+ identifier: identifier,
+ span: ident_span,
+ parameters: parameters
+ });
// Continue only if we see a `::`
if !self.eat(&token::ModSep) {
/// Examples:
/// - `a::b::<T,U>::c`
- pub fn parse_path_segments_with_colons(&mut self) -> PResult<'a, Vec<ast::PathSegment>> {
+ pub fn parse_path_segments_with_colons(&mut self) -> PResult<'a, Vec<PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let identifier = self.parse_path_segment_ident()?;
+ let ident_span = self.prev_span;
// If we do not see a `::`, stop.
if !self.eat(&token::ModSep) {
- segments.push(identifier.into());
+ segments.push(PathSegment::from_ident(identifier, ident_span));
return Ok(segments);
}
// Consumed `a::b::<`, go look for types
let (lifetimes, types, bindings) = self.parse_generic_args()?;
self.expect_gt()?;
- segments.push(ast::PathSegment {
+ segments.push(PathSegment {
identifier: identifier,
+ span: ident_span,
parameters: ast::AngleBracketedParameterData {
lifetimes: lifetimes,
types: types,
}
} else {
// Consumed `a::`, go look for `b`
- segments.push(identifier.into());
+ segments.push(PathSegment::from_ident(identifier, ident_span));
}
}
}
/// Examples:
/// - `a::b::c`
pub fn parse_path_segments_without_types(&mut self)
- -> PResult<'a, Vec<ast::PathSegment>> {
+ -> PResult<'a, Vec<PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let identifier = self.parse_path_segment_ident()?;
// Assemble and push the result.
- segments.push(identifier.into());
+ segments.push(PathSegment::from_ident(identifier, self.prev_span));
// If we do not see a `::` or see `::{`/`::*`, stop.
if !self.check(&token::ModSep) || self.is_import_coupler() {
BlockCheckMode::Unsafe(ast::UserProvided),
attrs);
}
+ if self.is_catch_expr() {
+ assert!(self.eat_keyword(keywords::Do));
+ assert!(self.eat_keyword(keywords::Catch));
+ let lo = self.prev_span.lo;
+ return self.parse_catch_expr(lo, attrs);
+ }
if self.eat_keyword(keywords::Return) {
if self.token.can_begin_expr() {
let e = self.parse_expr()?;
Ok(self.mk_expr(span_lo, hi, ExprKind::Loop(body, opt_ident), attrs))
}
+ /// Parse a `do catch {...}` expression (`do catch` token already eaten)
+ pub fn parse_catch_expr(&mut self, span_lo: BytePos, mut attrs: ThinVec<Attribute>)
+ -> PResult<'a, P<Expr>>
+ {
+ let (iattrs, body) = self.parse_inner_attrs_and_block()?;
+ attrs.extend(iattrs);
+ let hi = body.span.hi;
+ Ok(self.mk_expr(span_lo, hi, ExprKind::Catch(body), attrs))
+ }
+
// `match` token already eaten
fn parse_match_expr(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let match_span = self.prev_span;
})
}
- fn is_union_item(&mut self) -> bool {
+ fn is_catch_expr(&mut self) -> bool {
+ self.token.is_keyword(keywords::Do) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Catch)) &&
+ self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
+
+ // prevent `while catch {} {}`, `if catch {} {} else {}`, etc.
+ !self.restrictions.contains(Restrictions::RESTRICTION_NO_STRUCT_LITERAL)
+ }
+
+ fn is_union_item(&self) -> bool {
self.token.is_keyword(keywords::Union) &&
self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword())
}
+ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility)
+ -> PResult<'a, Option<P<Item>>> {
+ let lo = self.span.lo;
+ match self.token {
+ token::Ident(ident) if ident.name == "macro_rules" => {
+ if self.look_ahead(1, |t| *t == token::Not) {
+ let prev_span = self.prev_span;
+ self.complain_if_pub_macro(vis, prev_span);
+ self.bump();
+ self.bump();
+ }
+ }
+ _ => return Ok(None),
+ };
+
+ let id = self.parse_ident()?;
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ if delim != token::Brace {
+ if !self.eat(&token::Semi) {
+ let msg = "macros that expand to items must either be surrounded with braces \
+ or followed by a semicolon";
+ self.span_err(self.prev_span, msg);
+ }
+ }
+
+ let hi = self.prev_span.hi;
+ let kind = ItemKind::MacroDef(tts);
+ Ok(Some(self.mk_item(lo, hi, id, kind, Visibility::Inherited, attrs.to_owned())))
+ }
+
fn parse_stmt_without_recovery(&mut self,
macro_legacy_warnings: bool)
-> PResult<'a, Option<Stmt>> {
node: StmtKind::Local(self.parse_local(attrs.into())?),
span: mk_sp(lo, self.prev_span.hi),
}
+ } else if let Some(macro_def) = self.eat_macro_def(&attrs, &Visibility::Inherited)? {
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Item(macro_def),
+ span: mk_sp(lo, self.prev_span.hi),
+ }
// Starts like a simple path, but not a union item.
} else if self.token.is_path_start() &&
!self.token.is_qpath_start() &&
let mut attrs = self.parse_outer_attributes()?;
let lo = self.span.lo;
- let vis = self.parse_visibility(true)?;
+ let vis = self.parse_visibility()?;
let defaultness = self.parse_defaultness()?;
let (name, node) = if self.eat_keyword(keywords::Type) {
let name = self.parse_ident()?;
/// Parse struct Foo { ... }
fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
let class_name = self.parse_ident()?;
+
let mut generics = self.parse_generics()?;
// There is a special case worth noting here, as reported in issue #17904.
/// Parse union Foo { ... }
fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
let class_name = self.parse_ident()?;
+
let mut generics = self.parse_generics()?;
let vdata = if self.token.is_keyword(keywords::Where) {
|p| {
let attrs = p.parse_outer_attributes()?;
let lo = p.span.lo;
- let mut vis = p.parse_visibility(false)?;
+ let mut vis = p.parse_visibility()?;
let ty_is_interpolated =
p.token.is_interpolated() || p.look_ahead(1, |t| t.is_interpolated());
let mut ty = p.parse_ty()?;
fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
let attrs = self.parse_outer_attributes()?;
let lo = self.span.lo;
- let vis = self.parse_visibility(true)?;
+ let vis = self.parse_visibility()?;
self.parse_single_struct_field(lo, vis, attrs)
}
- // If `allow_path` is false, just parse the `pub` in `pub(path)` (but still parse `pub(crate)`)
- fn parse_visibility(&mut self, allow_path: bool) -> PResult<'a, Visibility> {
- let pub_crate = |this: &mut Self| {
- let span = this.prev_span;
- this.expect(&token::CloseDelim(token::Paren))?;
- Ok(Visibility::Crate(span))
- };
-
+ // Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts
+ // `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
+ fn parse_visibility(&mut self) -> PResult<'a, Visibility> {
if !self.eat_keyword(keywords::Pub) {
- Ok(Visibility::Inherited)
- } else if !allow_path {
- // Look ahead to avoid eating the `(` in `pub(path)` while still parsing `pub(crate)`
- if self.token == token::OpenDelim(token::Paren) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) {
- self.bump(); self.bump();
- pub_crate(self)
- } else {
- Ok(Visibility::Public)
- }
- } else if !self.eat(&token::OpenDelim(token::Paren)) {
- Ok(Visibility::Public)
- } else if self.eat_keyword(keywords::Crate) {
- pub_crate(self)
- } else {
- let path = self.parse_path(PathStyle::Mod)?.default_to_global();
- self.expect(&token::CloseDelim(token::Paren))?;
- Ok(Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID })
- }
+ return Ok(Visibility::Inherited)
+ }
+
+ if self.check(&token::OpenDelim(token::Paren)) {
+ if self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) {
+ // `pub(crate)`
+ self.bump(); // `(`
+ self.bump(); // `crate`
+ let vis = Visibility::Crate(self.prev_span);
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ return Ok(vis)
+ } else if self.look_ahead(1, |t| t.is_keyword(keywords::In)) {
+ // `pub(in path)`
+ self.bump(); // `(`
+ self.bump(); // `in`
+ let path = self.parse_path(PathStyle::Mod)?.default_to_global(); // `path`
+ let vis = Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ return Ok(vis)
+ } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Super) ||
+ t.is_keyword(keywords::SelfValue)) {
+ // `pub(self)` or `pub(super)`
+ self.bump(); // `(`
+ let path = self.parse_path(PathStyle::Mod)?.default_to_global(); // `super`/`self`
+ let vis = Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ return Ok(vis)
+ }
+ }
+
+ Ok(Visibility::Public)
}
/// Parse defaultness: DEFAULT or nothing
let lo = self.span.lo;
- let visibility = self.parse_visibility(true)?;
+ let visibility = self.parse_visibility()?;
if self.eat_keyword(keywords::Use) {
// USE ITEM
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
+ if let Some(macro_def) = self.eat_macro_def(&attrs, &visibility)? {
+ return Ok(Some(macro_def));
+ }
+
self.parse_macro_use_or_failure(attrs,macros_allowed,attributes_allowed,lo,visibility)
}
fn parse_foreign_item(&mut self) -> PResult<'a, Option<ForeignItem>> {
let attrs = self.parse_outer_attributes()?;
let lo = self.span.lo;
- let visibility = self.parse_visibility(true)?;
+ let visibility = self.parse_visibility()?;
if self.check_keyword(keywords::Static) {
// FOREIGN STATIC ITEM
// `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`.
self.eat(&token::ModSep);
let prefix = ast::Path {
- segments: vec![ast::PathSegment::crate_root()],
+ segments: vec![PathSegment::crate_root()],
span: mk_sp(lo, self.span.hi),
};
let view_path_kind = if self.eat(&token::BinOp(token::Star)) {
attrs: self.parse_inner_attributes()?,
module: self.parse_mod_items(&token::Eof, lo)?,
span: mk_sp(lo, self.span.lo),
- exported_macros: Vec::new(),
})
}
!ident_token.is_any_keyword() ||
ident_token.is_path_segment_keyword() ||
[
+ keywords::Do.name(),
keywords::Box.name(),
keywords::Break.name(),
keywords::Continue.name(),
self.bclose(item.span)?;
}
ast::ItemKind::Mac(codemap::Spanned { ref node, .. }) => {
- self.print_visibility(&item.vis)?;
self.print_path(&node.path, false, 0, false)?;
word(&mut self.s, "! ")?;
self.print_ident(item.ident)?;
word(&mut self.s, ";")?;
self.end()?;
}
+ ast::ItemKind::MacroDef(ref tts) => {
+ word(&mut self.s, "macro_rules! ")?;
+ self.print_ident(item.ident)?;
+ self.cbox(INDENT_UNIT)?;
+ self.popen()?;
+ self.print_tts(tts.clone().into())?;
+ self.pclose()?;
+ word(&mut self.s, ";")?;
+ self.end()?;
+ }
}
self.ann.post(self, NodeItem(item))
}
self.print_expr(e)?;
word(&mut self.s, "?")?
}
+ ast::ExprKind::Catch(ref blk) => {
+ self.head("do catch")?;
+ space(&mut self.s)?;
+ self.print_block_with_attrs(&blk, attrs)?
+ }
}
self.ann.post(self, NodeExpr(expr))?;
self.end()
vis: ast::Visibility::Inherited,
node: ast::ItemKind::Use(P(codemap::dummy_spanned(ast::ViewPathGlob(ast::Path {
segments: ["{{root}}", name, "prelude", "v1"].into_iter().map(|name| {
- ast::Ident::from_str(name).into()
+ ast::PathSegment::from_ident(ast::Ident::from_str(name), DUMMY_SP)
}).collect(),
span: span,
})))),
(53, Default, "default")
(54, StaticLifetime, "'static")
(55, Union, "union")
+ (56, Catch, "catch")
// A virtual keyword that resolves to the crate root when used in a lexical scope.
- (56, CrateRoot, "{{root}}")
+ (57, CrateRoot, "{{root}}")
}
// If an interner exists in TLS, return it. Otherwise, prepare a fresh one.
fn path_node(ids: Vec<Ident>) -> ast::Path {
ast::Path {
span: DUMMY_SP,
- segments: ids.into_iter().map(Into::into).collect(),
+ segments: ids.into_iter().map(|id| ast::PathSegment::from_ident(id, DUMMY_SP)).collect(),
}
}
fn visit_attribute(&mut self, _attr: &Attribute) {
self.count += 1;
}
- fn visit_macro_def(&mut self, macro_def: &MacroDef) {
- self.count += 1;
- walk_macro_def(self, macro_def)
- }
-
}
walk_assoc_type_binding(self, type_binding)
}
fn visit_attribute(&mut self, _attr: &'ast Attribute) {}
- fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) {
- walk_macro_def(self, macro_def)
- }
fn visit_vis(&mut self, vis: &'ast Visibility) {
walk_vis(self, vis)
}
pub fn walk_crate<'a, V: Visitor<'a>>(visitor: &mut V, krate: &'a Crate) {
visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID);
walk_list!(visitor, visit_attribute, &krate.attrs);
- walk_list!(visitor, visit_macro_def, &krate.exported_macros);
-}
-
-pub fn walk_macro_def<'a, V: Visitor<'a>>(visitor: &mut V, macro_def: &'a MacroDef) {
- visitor.visit_ident(macro_def.span, macro_def.ident);
- walk_list!(visitor, visit_attribute, ¯o_def.attrs);
}
pub fn walk_mod<'a, V: Visitor<'a>>(visitor: &mut V, module: &'a Mod) {
walk_list!(visitor, visit_trait_item, methods);
}
ItemKind::Mac(ref mac) => visitor.visit_mac(mac),
+ ItemKind::MacroDef(..) => {},
}
walk_list!(visitor, visit_attribute, &item.attrs);
}
ExprKind::Try(ref subexpression) => {
visitor.visit_expr(subexpression)
}
+ ExprKind::Catch(ref body) => {
+ visitor.visit_block(body)
+ }
}
visitor.visit_expr_post(expression)
fn path(&self) -> ast::Path {
ast::Path {
span: self.span,
- segments: vec![self.ident.into()],
+ segments: vec![ast::PathSegment::from_ident(self.ident, self.span)],
}
}
}
pub fn register_builtin_derives(resolver: &mut Resolver) {
$(
- resolver.add_ext(
+ resolver.add_builtin(
ast::Ident::with_empty_ctxt(Symbol::intern($name)),
Rc::new(SyntaxExtension::BuiltinDerive($func))
);
deriving::register_builtin_derives(resolver);
let mut register = |name, ext| {
- resolver.add_ext(ast::Ident::with_empty_ctxt(name), Rc::new(ext));
+ resolver.add_builtin(ast::Ident::with_empty_ctxt(name), Rc::new(ext));
};
macro_rules! register {
krate.module.items.push(mk_registrar(&mut cx, &derives, &attr_macros, &bang_macros));
- if krate.exported_macros.len() > 0 {
- handler.err("cannot export macro_rules! macros from a `proc-macro` \
- crate type currently");
- }
-
- return krate
+ krate
}
fn is_proc_macro_attr(attr: &ast::Attribute) -> bool {
impl<'a> Visitor<'a> for CollectProcMacros<'a> {
fn visit_item(&mut self, item: &'a ast::Item) {
+ if let ast::ItemKind::MacroDef(..) = item.node {
+ if self.is_proc_macro_crate &&
+ item.attrs.iter().any(|attr| attr.name() == "macro_export") {
+ let msg =
+ "cannot export macro_rules! macros from a `proc-macro` crate type currently";
+ self.handler.span_err(item.span, msg);
+ }
+ }
+
// First up, make sure we're checking a bare function. If we're not then
// we're just not interested in this item.
//
p.pop();
// on some installations the dir is named after the hex of the char
- // (e.g. OS X)
+ // (e.g. macOS)
p.push(&format!("{:x}", first_char as usize));
p.push(term);
if fs::metadata(&p).is_ok() {
#[ignore(reason = "buildbots don't have ncurses installed and I can't mock everything I need")]
fn test_get_dbpath_for_term() {
// woefully inadequate test coverage
- // note: current tests won't work with non-standard terminfo hierarchies (e.g. OS X's)
+ // note: current tests won't work with non-standard terminfo hierarchies (e.g. macOS's)
use std::env;
// FIXME (#9639): This needs to handle non-utf8 paths
fn x(t: &str) -> String {
-Subproject commit 50ab09fb43f038e4f824eea6cb278f560d3e8621
+Subproject commit 859fb269364623b17e092efaba3f94e70ce97c5e
+++ /dev/null
-# If this file is modified, then llvm will be forcibly cleaned and then rebuilt.
-# The actual contents of this file do not matter, but to trigger a change on the
-# build bots then the contents should be changed so git updates the mtime.
-2017-03-02
--- /dev/null
+# If this file is modified, then llvm will be (optionally) cleaned and then rebuilt.
+# The actual contents of this file do not matter, but to trigger a change on the
+# build bots then the contents should be changed so git updates the mtime.
+2017-03-04
# released on `$date`
rustc: beta-2017-02-01
-cargo: bfee18f73287687c543bda8c35e4e33808792715
+cargo: 407edef22e894266eb562618cba5ca9757051946
pkd.data = 42;
result
}
+
+pub struct Array([i32; 8]);
+#[repr(packed)]
+pub struct BigPacked {
+ dealign: u8,
+ data: Array
+}
+
+// CHECK-LABEL: @call_pkd
+#[no_mangle]
+pub fn call_pkd(f: fn() -> Array) -> BigPacked {
+// CHECK: [[ALLOCA:%[_a-z0-9]+]] = alloca %Array
+// CHECK: call void %{{.*}}(%Array* noalias nocapture sret dereferenceable(32) [[ALLOCA]])
+// CHECK: call void @llvm.memcpy.{{.*}}(i8* %{{.*}}, i8* %{{.*}}, i{{[0-9]+}} 32, i32 1, i1 false)
+ // check that calls whose destination is a field of a packed struct
+ // go through an alloca rather than calling the function with an
+ // unaligned destination.
+ BigPacked { dealign: 0, data: f() }
+}
+
+#[repr(packed)]
+#[derive(Copy, Clone)]
+pub struct PackedPair(u8, u32);
+
+// CHECK-LABEL: @pkd_pair
+#[no_mangle]
+pub fn pkd_pair(pair1: &mut PackedPair, pair2: &mut PackedPair) {
+ // CHECK: [[V1:%[a-z0-9]+]] = load i8, i8* %{{.*}}, align 1
+ // CHECK: [[V2:%[a-z0-9]+]] = load i32, i32* %{{.*}}, align 1
+ // CHECK: store i8 [[V1]], i8* {{.*}}, align 1
+ // CHECK: store i32 [[V2]], i32* {{.*}}, align 1
+ *pair2 = *pair1;
+}
#[unstable(feature = "unstable_undeclared", issue = "38412")] // SILLY
pub(crate) b_crate: i32,
#[unstable(feature = "unstable_declared", issue = "38412")] // SILLY
- pub(m) c_mod: i32,
+ pub(in m) c_mod: i32,
#[stable(feature = "unit_test", since = "0.0.0")] // SILLY
d_priv: i32
}
pub i32,
pub(crate) i32,
- pub(m) i32,
+ pub(in m) i32,
i32);
impl Record {
#[unstable(feature = "unstable_undeclared", issue = "38412")] // SILLY
pub(crate) fn pub_crate(&self) -> i32 { self.d_priv }
#[unstable(feature = "unstable_declared", issue = "38412")] // SILLY
- pub(m) fn pub_mod(&self) -> i32 { self.d_priv }
+ pub(in m) fn pub_mod(&self) -> i32 { self.d_priv }
#[stable(feature = "unit_test", since = "0.0.0")] // SILLY
fn private(&self) -> i32 { self.d_priv }
}
pub fn stable(&self) -> i32 { self.0 }
pub(crate) fn pub_crate(&self) -> i32 { self.0 }
- pub(m) fn pub_mod(&self) -> i32 { self.0 }
+ pub(in m) fn pub_mod(&self) -> i32 { self.0 }
fn private(&self) -> i32 { self.0 }
}
}
// that this just passes on those platforms we link in some other allocator to
// ensure we get the same error.
//
-// So long as we CI linux/OSX we should be good.
+// So long as we CI linux/macOS we should be good.
#[cfg(any(target_os = "linux", target_os = "macos"))]
extern crate alloc_jemalloc;
#[cfg(not(any(target_os = "linux", target_os = "macos")))]
// that this just passes on those platforms we link in some other allocator to
// ensure we get the same error.
//
-// So long as we CI linux/OSX we should be good.
+// So long as we CI linux/macOS we should be good.
#[cfg(any(all(target_os = "linux", any(target_arch = "x86", target_arch = "x86_64")),
target_os = "macos"))]
extern crate alloc_system;
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(catch_expr)]
+
+fn main() {
+ match do catch { false } { _ => {} } //~ ERROR expected expression, found reserved keyword `do`
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(catch_expr)]
+
+fn main() {
+ while do catch { false } {} //~ ERROR expected expression, found reserved keyword `do`
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub fn main() {
+ let catch_result = do catch { //~ ERROR `catch` expression is experimental
+ let x = 5;
+ x
+ };
+ assert_eq!(catch_result, 5);
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn prove_static<T: 'static + ?Sized>(_: &'static T) {}
+
+fn lifetime_transmute_slice<'a, T: ?Sized>(x: &'a T, y: &T) -> &'a T {
+ let mut out = [x];
+ //~^ ERROR cannot infer an appropriate lifetime due to conflicting requirements
+ {
+ let slice: &mut [_] = &mut out;
+ slice[0] = y;
+ }
+ out[0]
+}
+
+struct Struct<T, U: ?Sized> {
+ head: T,
+ _tail: U
+}
+
+fn lifetime_transmute_struct<'a, T: ?Sized>(x: &'a T, y: &T) -> &'a T {
+ let mut out = Struct { head: x, _tail: [()] };
+ //~^ ERROR cannot infer an appropriate lifetime due to conflicting requirements
+ {
+ let dst: &mut Struct<_, [()]> = &mut out;
+ dst.head = y;
+ }
+ out.head
+}
+
+fn main() {
+ prove_static(lifetime_transmute_slice("", &String::from("foo")));
+ prove_static(lifetime_transmute_struct("", &String::from("bar")));
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn save_ref<'a>(refr: &'a i32, to: &mut [&'a i32]) {
+ for val in &mut *to {
+ *val = refr;
+ }
+}
+
+fn main() {
+ let ref init = 0i32;
+ let ref mut refr = 1i32;
+
+ let mut out = [init];
+
+ save_ref(&*refr, &mut out);
+
+ // This shouldn't be allowed as `refr` is borrowed
+ *refr = 3; //~ ERROR cannot assign to `*refr` because it is borrowed
+
+ // Prints 3?!
+ println!("{:?}", out[0]);
+}
+++ /dev/null
-// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Exercise the unused_unsafe attribute in some positive and negative cases
-
-#![allow(dead_code)]
-#![deny(unused_unsafe)]
-
-
-mod foo {
- extern {
- pub fn bar();
- }
-}
-
-fn callback<T, F>(_f: F) -> T where F: FnOnce() -> T { panic!() }
-unsafe fn unsf() {}
-
-fn bad1() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
-fn bad2() { unsafe { bad1() } } //~ ERROR: unnecessary `unsafe` block
-unsafe fn bad3() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
-fn bad4() { unsafe { callback(||{}) } } //~ ERROR: unnecessary `unsafe` block
-unsafe fn bad5() { unsafe { unsf() } } //~ ERROR: unnecessary `unsafe` block
-fn bad6() {
- unsafe { // don't put the warning here
- unsafe { //~ ERROR: unnecessary `unsafe` block
- unsf()
- }
- }
-}
-unsafe fn bad7() {
- unsafe { //~ ERROR: unnecessary `unsafe` block
- unsafe { //~ ERROR: unnecessary `unsafe` block
- unsf()
- }
- }
-}
-
-unsafe fn good0() { unsf() }
-fn good1() { unsafe { unsf() } }
-fn good2() {
- /* bug uncovered when implementing warning about unused unsafe blocks. Be
- sure that when purity is inherited that the source of the unsafe-ness
- is tracked correctly */
- unsafe {
- unsafe fn what() -> Vec<String> { panic!() }
-
- callback(|| {
- what();
- });
- }
-}
-
-unsafe fn good3() { foo::bar() }
-fn good4() { unsafe { foo::bar() } }
-
-#[allow(unused_unsafe)] fn allowed() { unsafe {} }
-
-fn main() {}
// ignore-macos
// ignore-ios
// compile-flags:-l framework=foo
-// error-pattern: native frameworks are only available on OSX targets
+// error-pattern: native frameworks are only available on macOS targets
fn main() {
}
// which fails to type check.
ss
- //~^ ERROR lifetime bound not satisfied
+ //~^ ERROR cannot infer
//~| ERROR cannot infer
}
// `Box<SomeTrait>` defaults to a `'static` bound, so this return
// is illegal.
- ss.r //~ ERROR lifetime bound not satisfied
+ ss.r //~ ERROR cannot infer an appropriate lifetime
}
fn store(ss: &mut SomeStruct, b: Box<SomeTrait>) {
fn store1<'b>(ss: &mut SomeStruct, b: Box<SomeTrait+'b>) {
// Here we override the lifetimes explicitly, and so naturally we get an error.
- ss.r = b; //~ ERROR lifetime bound not satisfied
+ ss.r = b; //~ ERROR cannot infer an appropriate lifetime
}
fn main() {
#[link(name = "foo", kind = "framework")]
extern {}
-//~^^ ERROR: native frameworks are only available on OSX
+//~^^ ERROR: native frameworks are only available on macOS
fn main() {
}
mod bar {
#[derive(Default)]
pub struct S {
- pub(foo) x: i32,
+ pub(in foo) x: i32,
}
impl S {
- pub(foo) fn f(&self) -> i32 { 0 }
+ pub(in foo) fn f(&self) -> i32 { 0 }
}
pub struct S2 {
mod foo {
pub mod bar {
pub struct S {
- pub(foo) x: i32,
+ pub(in foo) x: i32,
}
}
}
mod pathological {
- pub(bad::path) mod m1 {} //~ ERROR failed to resolve. Maybe a missing `extern crate bad;`?
- pub(foo) mod m2 {} //~ ERROR visibilities can only be restricted to ancestor modules
+ pub(in bad::path) mod m1 {} //~ ERROR failed to resolve. Maybe a missing `extern crate bad;`?
+ pub(in foo) mod m2 {} //~ ERROR visibilities can only be restricted to ancestor modules
}
#![feature(pub_restricted)]
macro_rules! m {
- ($p: path) => (pub($p) struct Z;)
+ ($p: path) => (pub(in $p) struct Z;)
}
struct S<T>(T);
m!{ S<u8> } //~ ERROR type or lifetime parameters in visibility path
//~^ ERROR expected module, found struct `S`
-mod foo {
- struct S(pub(foo<T>) ()); //~ ERROR type or lifetime parameters in visibility path
- //~^ ERROR cannot find type `T` in this scope
-}
-
fn main() {}
fn make_object_bad<'a,'b,'c,A:SomeTrait+'a+'b>(v: A) -> Box<SomeTrait+'c> {
// A outlives 'a AND 'b...but not 'c.
- box v as Box<SomeTrait+'a> //~ ERROR lifetime bound not satisfied
+ box v as Box<SomeTrait+'a> //~ ERROR cannot infer an appropriate lifetime
}
fn main() {
fn static_proc(x: &isize) -> Box<FnMut()->(isize) + 'static> {
// This is illegal, because the region bound on `proc` is 'static.
- Box::new(move|| { *x }) //~ ERROR does not fulfill the required lifetime
+ Box::new(move|| { *x }) //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
fn foo3<'a,'b>(x: &'a mut Dummy) -> &'b mut Dummy {
// Without knowing 'a:'b, we can't coerce
- x //~ ERROR lifetime bound not satisfied
- //~^ ERROR cannot infer
+ x //~ ERROR cannot infer an appropriate lifetime
+ //~^ ERROR cannot infer an appropriate lifetime
}
struct Wrapper<T>(T);
enum E {}
trait Tr {}
-pub(E) struct S; //~ ERROR expected module, found enum `E`
-pub(Tr) struct Z; //~ ERROR expected module, found trait `Tr`
-pub(std::vec) struct F; //~ ERROR visibilities can only be restricted to ancestor modules
-pub(nonexistent) struct G; //~ ERROR cannot find module `nonexistent` in the crate root
-pub(too_soon) struct H; //~ ERROR cannot find module `too_soon` in the crate root
+pub(in E) struct S; //~ ERROR expected module, found enum `E`
+pub(in Tr) struct Z; //~ ERROR expected module, found trait `Tr`
+pub(in std::vec) struct F; //~ ERROR visibilities can only be restricted to ancestor modules
+pub(in nonexistent) struct G; //~ ERROR cannot find module `nonexistent` in the crate root
+pub(in too_soon) struct H; //~ ERROR cannot find module `too_soon` in the crate root
// Visibilities are resolved eagerly without waiting for modules becoming fully populated.
// Visibilities can only use ancestor modules legally which are always available in time,
-> Box<Get<&'min i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
where 'max : 'min
{
// Previously OK:
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
where 'max : 'min
{
// Previously OK, now an error as traits are invariant.
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
-> Box<Get<&'max i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
-> Box<Get<&'min i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
-> Box<Get<&'max i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
// === GDB TESTS ===================================================================================
-// gdb-command:print 'c_style_enum::SINGLE_VARIANT'
+// gdbg-command:print 'c_style_enum::SINGLE_VARIANT'
+// gdbr-command:print c_style_enum::SINGLE_VARIANT
// gdbg-check:$1 = TheOnlyVariant
// gdbr-check:$1 = c_style_enum::SingleVariant::TheOnlyVariant
-// gdb-command:print 'c_style_enum::AUTO_ONE'
+// gdbg-command:print 'c_style_enum::AUTO_ONE'
+// gdbr-command:print c_style_enum::AUTO_ONE
// gdbg-check:$2 = One
// gdbr-check:$2 = c_style_enum::AutoDiscriminant::One
-// gdb-command:print 'c_style_enum::AUTO_TWO'
+// gdbg-command:print 'c_style_enum::AUTO_TWO'
+// gdbr-command:print c_style_enum::AUTO_TWO
// gdbg-check:$3 = One
// gdbr-check:$3 = c_style_enum::AutoDiscriminant::One
-// gdb-command:print 'c_style_enum::AUTO_THREE'
+// gdbg-command:print 'c_style_enum::AUTO_THREE'
+// gdbr-command:print c_style_enum::AUTO_THREE
// gdbg-check:$4 = One
// gdbr-check:$4 = c_style_enum::AutoDiscriminant::One
-// gdb-command:print 'c_style_enum::MANUAL_ONE'
+// gdbg-command:print 'c_style_enum::MANUAL_ONE'
+// gdbr-command:print c_style_enum::MANUAL_ONE
// gdbg-check:$5 = OneHundred
// gdbr-check:$5 = c_style_enum::ManualDiscriminant::OneHundred
-// gdb-command:print 'c_style_enum::MANUAL_TWO'
+// gdbg-command:print 'c_style_enum::MANUAL_TWO'
+// gdbr-command:print c_style_enum::MANUAL_TWO
// gdbg-check:$6 = OneHundred
// gdbr-check:$6 = c_style_enum::ManualDiscriminant::OneHundred
-// gdb-command:print 'c_style_enum::MANUAL_THREE'
+// gdbg-command:print 'c_style_enum::MANUAL_THREE'
+// gdbr-command:print c_style_enum::MANUAL_THREE
// gdbg-check:$7 = OneHundred
// gdbr-check:$7 = c_style_enum::ManualDiscriminant::OneHundred
// Make sure functions have proper names
// gdb-command:info functions
-// gdb-check:[...]void[...]main([...]);
-// gdb-check:[...]void[...]some_function([...]);
-// gdb-check:[...]void[...]some_other_function([...]);
-// gdb-check:[...]void[...]zzz([...]);
+// gdbg-check:[...]void[...]main([...]);
+// gdbr-check:fn limited_debuginfo::main();
+// gdbg-check:[...]void[...]some_function([...]);
+// gdbr-check:fn limited_debuginfo::some_function();
+// gdbg-check:[...]void[...]some_other_function([...]);
+// gdbr-check:fn limited_debuginfo::some_other_function();
+// gdbg-check:[...]void[...]zzz([...]);
+// gdbr-check:fn limited_debuginfo::zzz();
// gdb-command:run
// === GDB TESTS ===================================================================================
-// there's no frame yet for gdb to reliably detect the language, set it explicitly
-// gdbr-command:set language rust
-
// gdbg-command:print 'simple_struct::NO_PADDING_16'
// gdbr-command:print simple_struct::NO_PADDING_16
// gdbg-check:$1 = {x = 1000, y = -1001}
// === GDB TESTS ===================================================================================
-// there's no frame yet for gdb to reliably detect the language, set it explicitly
-// gdbr-command:set language rust
-
// gdbg-command:print/d 'simple_tuple::NO_PADDING_8'
// gdbr-command:print simple_tuple::NO_PADDING_8
// gdbg-check:$1 = {__0 = -50, __1 = 50}
// }
//
// bb2: {
-// StorageLive(_6);
// _0 = ();
// StorageDead(_4);
// StorageDead(_1);
--- /dev/null
+-include ../tools.mk
+
+all:
+ $(RUSTC) foo.rs --emit dep-info
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// We're only emitting dep info, so we shouldn't be running static analysis to
+// figure out that this program is erroneous.
+fn main() {
+ let a: u8 = "a";
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::fmt;
+
#[repr(packed)]
-#[derive(Copy, Clone, PartialEq, Debug)]
+#[derive(Copy, Clone)]
struct Foo {
a: i8,
b: i16,
c: i8
}
+impl PartialEq for Foo {
+ fn eq(&self, other: &Foo) -> bool {
+ self.a == other.a && self.b == other.b && self.c == other.c
+ }
+}
+
+impl fmt::Debug for Foo {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let a = self.a;
+ let b = self.b;
+ let c = self.c;
+
+ f.debug_struct("Foo")
+ .field("a", &a)
+ .field("b", &b)
+ .field("c", &c)
+ .finish()
+ }
+}
+
#[link(name = "test", kind = "static")]
extern {
fn foo(f: Foo) -> Foo;
#![feature(no_core)]
#![no_core]
+macro_rules! foo /* 60#0 */(( $ x : ident ) => { y + $ x });
fn bar /* 62#0 */() { let x /* 59#2 */ = 1; y /* 61#4 */ + x /* 59#5 */ }
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// force-host
-
-#![feature(plugin_registrar, rustc_private)]
-#![feature(box_syntax)]
-
-#[macro_use] extern crate rustc;
-extern crate rustc_plugin;
-extern crate rustc_const_math;
-extern crate syntax;
-
-use rustc::mir::transform::{self, MirPass, MirSource};
-use rustc::mir::{Mir, Literal, Location};
-use rustc::mir::visit::MutVisitor;
-use rustc::ty::TyCtxt;
-use rustc::middle::const_val::ConstVal;
-use rustc_const_math::ConstInt;
-use rustc_plugin::Registry;
-
-struct Pass;
-
-impl transform::Pass for Pass {}
-
-impl<'tcx> MirPass<'tcx> for Pass {
- fn run_pass<'a>(&mut self, _: TyCtxt<'a, 'tcx, 'tcx>,
- _: MirSource, mir: &mut Mir<'tcx>) {
- Visitor.visit_mir(mir)
- }
-}
-
-struct Visitor;
-
-impl<'tcx> MutVisitor<'tcx> for Visitor {
- fn visit_literal(&mut self, literal: &mut Literal<'tcx>, _: Location) {
- if let Literal::Value { ref mut value } = *literal {
- if let ConstVal::Integral(ConstInt::I32(ref mut i @ 11)) = *value {
- *i = 42;
- }
- }
- }
-}
-
-#[plugin_registrar]
-pub fn plugin_registrar(reg: &mut Registry) {
- reg.register_mir_pass(box Pass);
-}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// aux-build:dummy_mir_pass.rs
-// ignore-stage1
-
-#![feature(plugin)]
-#![plugin(dummy_mir_pass)]
-
-fn math() -> i32 {
- 11
-}
-
-pub fn main() {
- assert_eq!(math(), 42);
-}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(catch_expr)]
+
+struct catch {}
+
+pub fn main() {
+ let catch_result = do catch {
+ let x = 5;
+ x
+ };
+ assert_eq!(catch_result, 5);
+
+ let mut catch = true;
+ while catch { catch = false; }
+ assert_eq!(catch, false);
+
+ catch = if catch { false } else { true };
+ assert_eq!(catch, true);
+
+ match catch {
+ _ => {}
+ };
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// pretty-expanded FIXME #23616
+
+use std::rc::Rc;
+
+fn lub_short<'a, T>(_: &[&'a T], _: &[&'a T]) {}
+
+// The two arguments are a subtype of their LUB, after coercion.
+fn long_and_short<'a, T>(xs: &[&'static T; 1], ys: &[&'a T; 1]) {
+ lub_short(xs, ys);
+}
+
+// The argument coerces to a subtype of the return type.
+fn long_to_short<'a, 'b, T>(xs: &'b [&'static T; 1]) -> &'b [&'a T] {
+ xs
+}
+
+// Rc<T> is covariant over T just like &T.
+fn long_to_short_rc<'a, T>(xs: Rc<[&'static T; 1]>) -> Rc<[&'a T]> {
+ xs
+}
+
+// LUB-coercion (if-else/match/array) coerces `xs: &'b [&'static T: N]`
+// to a subtype of the LUB of `xs` and `ys` (i.e. `&'b [&'a T]`),
+// regardless of the order they appear (in if-else/match/array).
+fn long_and_short_lub1<'a, 'b, T>(xs: &'b [&'static T; 1], ys: &'b [&'a T]) {
+ let _order1 = [xs, ys];
+ let _order2 = [ys, xs];
+}
+
+// LUB-coercion should also have the exact same effect when `&'b [&'a T; N]`
+// needs to be coerced, i.e. the resulting type is not &'b [&'static T], but
+// rather the `&'b [&'a T]` LUB.
+fn long_and_short_lub2<'a, 'b, T>(xs: &'b [&'static T], ys: &'b [&'a T; 1]) {
+ let _order1 = [xs, ys];
+ let _order2 = [ys, xs];
+}
+
+fn main() {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::fmt;
use std::mem;
#[repr(packed)]
-#[derive(Copy, Clone, PartialEq, Debug)]
+#[derive(Copy, Clone)]
struct Foo {
bar: u8,
baz: u64
}
+impl PartialEq for Foo {
+ fn eq(&self, other: &Foo) -> bool {
+ self.bar == other.bar && self.baz == other.baz
+ }
+}
+
+impl fmt::Debug for Foo {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let bar = self.bar;
+ let baz = self.baz;
+
+ f.debug_struct("Foo")
+ .field("bar", &bar)
+ .field("baz", &baz)
+ .finish()
+ }
+}
+
pub fn main() {
let foos = [Foo { bar: 1, baz: 2 }; 10];
// (E.g. negative float to unsigned integer goes through a
// library routine on the default i686 platforms, and the
// implementation of that routine differs on e.g. Linux
- // vs. OSX, resulting in different answers.)
+ // vs. macOS, resulting in different answers.)
if $from::is_float() {
if !$to::in_range(A) { from.0 = 0 as $to; to.0 = 0 as $to; }
if !$to::in_range(B) { from.1 = 0 as $to; to.1 = 0 as $to; }
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-fn main() {
- let (x,) = (vec![],);
-}
+++ /dev/null
-error[E0282]: type annotations needed
- --> $DIR/issue-38812-2.rs:12:17
- |
-12 | let (x,) = (vec![],);
- | ---- ^^^^^^ cannot infer type for `T`
- | |
- | consider giving a type to pattern
- |
- = note: this error originates in a macro outside of the current crate
-
-error: aborting due to previous error
-
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-fn main() {
- let x = vec![];
-}
+++ /dev/null
-error[E0282]: type annotations needed
- --> $DIR/issue-38812.rs:12:13
- |
-12 | let x = vec![];
- | - ^^^^^^ cannot infer type for `T`
- | |
- | consider giving `x` a type
- |
- = note: this error originates in a macro outside of the current crate
-
-error: aborting due to previous error
-
pub struct S(u8);
pub mod n {
- pub(m) struct Z(pub(m::n) u8);
+ pub(in m) struct Z(pub(in m::n) u8);
}
}
error[E0425]: cannot find value `A` in module `namespaced_enums`
- --> $DIR/enums-are-namespaced-xc.rs:15:13
+ --> $DIR/enums-are-namespaced-xc.rs:15:31
|
15 | let _ = namespaced_enums::A;
- | ^^^^^^^^^^^^^^^^^^^ not found in `namespaced_enums`
+ | ^ not found in `namespaced_enums`
|
= help: possible candidate is found in another module, you can import it into scope:
`use namespaced_enums::Foo::A;`
error[E0425]: cannot find function `B` in module `namespaced_enums`
- --> $DIR/enums-are-namespaced-xc.rs:18:13
+ --> $DIR/enums-are-namespaced-xc.rs:18:31
|
18 | let _ = namespaced_enums::B(10);
- | ^^^^^^^^^^^^^^^^^^^ not found in `namespaced_enums`
+ | ^ not found in `namespaced_enums`
|
= help: possible candidate is found in another module, you can import it into scope:
`use namespaced_enums::Foo::B;`
error[E0422]: cannot find struct, variant or union type `C` in module `namespaced_enums`
- --> $DIR/enums-are-namespaced-xc.rs:21:13
+ --> $DIR/enums-are-namespaced-xc.rs:21:31
|
21 | let _ = namespaced_enums::C { a: 10 };
- | ^^^^^^^^^^^^^^^^^^^ not found in `namespaced_enums`
+ | ^ not found in `namespaced_enums`
|
= help: possible candidate is found in another module, you can import it into scope:
`use namespaced_enums::Foo::C;`
--> $DIR/levenshtein.rs:20:10
|
20 | type B = Opiton<u8>; // Misspelled type name from the prelude.
- | ^^^^^^^^^^ did you mean `Option`?
+ | ^^^^^^ did you mean `Option`?
error[E0412]: cannot find type `Baz` in this scope
--> $DIR/levenshtein.rs:23:14
| ^^^^^^ did you mean `foo_bar`?
error[E0412]: cannot find type `first` in module `m`
- --> $DIR/levenshtein.rs:32:12
+ --> $DIR/levenshtein.rs:32:15
|
32 | let b: m::first = m::second; // Misspelled item in module.
- | ^^^^^^^^ did you mean `m::First`?
+ | ^^^^^ did you mean `First`?
error[E0425]: cannot find value `second` in module `m`
- --> $DIR/levenshtein.rs:32:23
+ --> $DIR/levenshtein.rs:32:26
|
32 | let b: m::first = m::second; // Misspelled item in module.
- | ^^^^^^^^^ did you mean `m::Second`?
+ | ^^^^^^ did you mean `Second`?
error: aborting due to 8 previous errors
pub struct S(u8);
pub mod n {
- pub(m) struct Z(pub(m::n) u8);
+ pub(in m) struct Z(pub(in m::n) u8);
}
use m::n::Z; // OK, only the type is imported
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:27:5
|
27 | a.I
- | ^ did you mean `a::I`?
+ | ^--
+ | |
+ | did you mean `a::I`?
error[E0423]: expected value, found module `a`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:33:5
|
33 | a.g()
- | ^ did you mean `a::g(...)`?
+ | ^----
+ | |
+ | did you mean `a::g(...)`?
error[E0423]: expected value, found module `a`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:39:5
|
39 | a.b.J
- | ^ did you mean `a::b`?
+ | ^--
+ | |
+ | did you mean `a::b`?
error[E0423]: expected value, found module `a::b`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:45:5
|
45 | a::b.J
- | ^^^^ did you mean `a::b::J`?
+ | ^^^^--
+ | |
+ | did you mean `a::b::J`?
error[E0423]: expected value, found module `a`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:51:5
|
51 | a.b.f();
- | ^ did you mean `a::b`?
+ | ^--
+ | |
+ | did you mean `a::b`?
error[E0423]: expected value, found module `a::b`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:55:12
|
55 | v.push(a::b);
- | ^^^^ did you mean `a::I`?
+ | ^^^-
+ | |
+ | did you mean `I`?
error[E0423]: expected value, found module `a::b`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:61:5
|
61 | a::b.f()
- | ^^^^ did you mean `a::b::f(...)`?
+ | ^^^^----
+ | |
+ | did you mean `a::b::f(...)`?
error[E0423]: expected value, found module `a::b`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:67:5
|
67 | a::b
- | ^^^^ did you mean `a::I`?
+ | ^^^-
+ | |
+ | did you mean `I`?
error[E0423]: expected function, found module `a::b`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:73:5
|
73 | a::b()
- | ^^^^ did you mean `a::I`?
+ | ^^^-
+ | |
+ | did you mean `I`?
error: main function not found
--> $DIR/unboxed-closure-sugar-nonexistent-trait.rs:11:8
|
11 | fn f<F:Nonexist(isize) -> isize>(x: F) {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
+ | ^^^^^^^^ not found in this scope
error[E0404]: expected trait, found type alias `Typedef`
--> $DIR/unboxed-closure-sugar-nonexistent-trait.rs:17:8
--- /dev/null
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(associated_consts)]
+
+trait Tr {
+ const C: Self;
+}
+
+fn main() {
+ let a: u8 = Tr::C; //~ ERROR the trait bound `u8: Tr` is not satisfied
+}
--- /dev/null
+error[E0277]: the trait bound `u8: Tr` is not satisfied
+ --> $DIR/issue-29595.rs:18:17
+ |
+18 | let a: u8 = Tr::C; //~ ERROR the trait bound `u8: Tr` is not satisfied
+ | ^^^^^ the trait `Tr` is not implemented for `u8`
+ |
+ = note: required by `Tr::C`
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main () {
+ {println!("{:?}", match { let foo = vec![1, 2]; foo.get(1) } { x => x });}
+}
--- /dev/null
+error: `foo` does not live long enough
+ --> $DIR/issue-40157.rs:12:64
+ |
+12 | {println!("{:?}", match { let foo = vec![1, 2]; foo.get(1) } { x => x });}
+ | ----------------------------------------------------------^-------------
+ | | | |
+ | | | `foo` dropped here while still borrowed
+ | | borrow occurs here
+ | borrowed value needs to live until here
+ |
+ = note: this error originates in a macro outside of the current crate
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Exercise the unused_unsafe attribute in some positive and negative cases
+
+#![allow(dead_code)]
+#![deny(unused_unsafe)]
+
+
+mod foo {
+ extern {
+ pub fn bar();
+ }
+}
+
+fn callback<T, F>(_f: F) -> T where F: FnOnce() -> T { panic!() }
+unsafe fn unsf() {}
+
+fn bad1() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
+fn bad2() { unsafe { bad1() } } //~ ERROR: unnecessary `unsafe` block
+unsafe fn bad3() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
+fn bad4() { unsafe { callback(||{}) } } //~ ERROR: unnecessary `unsafe` block
+unsafe fn bad5() { unsafe { unsf() } } //~ ERROR: unnecessary `unsafe` block
+fn bad6() {
+ unsafe { // don't put the warning here
+ unsafe { //~ ERROR: unnecessary `unsafe` block
+ unsf()
+ }
+ }
+}
+unsafe fn bad7() {
+ unsafe { //~ ERROR: unnecessary `unsafe` block
+ unsafe { //~ ERROR: unnecessary `unsafe` block
+ unsf()
+ }
+ }
+}
+
+unsafe fn good0() { unsf() }
+fn good1() { unsafe { unsf() } }
+fn good2() {
+ /* bug uncovered when implementing warning about unused unsafe blocks. Be
+ sure that when purity is inherited that the source of the unsafe-ness
+ is tracked correctly */
+ unsafe {
+ unsafe fn what() -> Vec<String> { panic!() }
+
+ callback(|| {
+ what();
+ });
+ }
+}
+
+unsafe fn good3() { foo::bar() }
+fn good4() { unsafe { foo::bar() } }
+
+#[allow(unused_unsafe)] fn allowed() { unsafe {} }
+
+fn main() {}
--- /dev/null
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:26:13
+ |
+26 | fn bad1() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^ unnecessary `unsafe` block
+ |
+note: lint level defined here
+ --> $DIR/lint-unused-unsafe.rs:14:9
+ |
+14 | #![deny(unused_unsafe)]
+ | ^^^^^^^^^^^^^
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:27:13
+ |
+27 | fn bad2() { unsafe { bad1() } } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^^^^^^^^^ unnecessary `unsafe` block
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:28:20
+ |
+28 | unsafe fn bad3() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^ unnecessary `unsafe` block
+ |
+note: because it's nested under this `unsafe` fn
+ --> $DIR/lint-unused-unsafe.rs:28:1
+ |
+28 | unsafe fn bad3() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:29:13
+ |
+29 | fn bad4() { unsafe { callback(||{}) } } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^ unnecessary `unsafe` block
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:30:20
+ |
+30 | unsafe fn bad5() { unsafe { unsf() } } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^^^^^^^^^ unnecessary `unsafe` block
+ |
+note: because it's nested under this `unsafe` fn
+ --> $DIR/lint-unused-unsafe.rs:30:1
+ |
+30 | unsafe fn bad5() { unsafe { unsf() } } //~ ERROR: unnecessary `unsafe` block
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:33:9
+ |
+33 | unsafe { //~ ERROR: unnecessary `unsafe` block
+ | _________^ starting here...
+34 | | unsf()
+35 | | }
+ | |_________^ ...ending here: unnecessary `unsafe` block
+ |
+note: because it's nested under this `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:32:5
+ |
+32 | unsafe { // don't put the warning here
+ | _____^ starting here...
+33 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+34 | | unsf()
+35 | | }
+36 | | }
+ | |_____^ ...ending here
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:39:5
+ |
+39 | unsafe { //~ ERROR: unnecessary `unsafe` block
+ | _____^ starting here...
+40 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+41 | | unsf()
+42 | | }
+43 | | }
+ | |_____^ ...ending here: unnecessary `unsafe` block
+ |
+note: because it's nested under this `unsafe` fn
+ --> $DIR/lint-unused-unsafe.rs:38:1
+ |
+38 | unsafe fn bad7() {
+ | _^ starting here...
+39 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+40 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+41 | | unsf()
+42 | | }
+43 | | }
+44 | | }
+ | |_^ ...ending here
+
+error: unnecessary `unsafe` block
+ --> $DIR/lint-unused-unsafe.rs:40:9
+ |
+40 | unsafe { //~ ERROR: unnecessary `unsafe` block
+ | _________^ starting here...
+41 | | unsf()
+42 | | }
+ | |_________^ ...ending here: unnecessary `unsafe` block
+ |
+note: because it's nested under this `unsafe` fn
+ --> $DIR/lint-unused-unsafe.rs:38:1
+ |
+38 | unsafe fn bad7() {
+ | _^ starting here...
+39 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+40 | | unsafe { //~ ERROR: unnecessary `unsafe` block
+41 | | unsf()
+42 | | }
+43 | | }
+44 | | }
+ | |_^ ...ending here
+
+error: aborting due to 8 previous errors
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+
+fn main() {
+ let tup = (1,);
+ println!("☃{}", tup[0]);
+}
+
--- /dev/null
+error: cannot index a value of type `({integer},)`
+ --> $DIR/suggestion-non-ascii.rs:14:21
+ |
+14 | println!("☃{}", tup[0]);
+ | ^^^^^^
+ |
+help: to access tuple elements, use tuple indexing syntax as shown
+ | println!("☃{}", tup.0);
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let (x,) = (vec![],);
+}
--- /dev/null
+error[E0282]: type annotations needed
+ --> $DIR/issue-38812-2.rs:12:17
+ |
+12 | let (x,) = (vec![],);
+ | ---- ^^^^^^ cannot infer type for `T`
+ | |
+ | consider giving a type to pattern
+ |
+ = note: this error originates in a macro outside of the current crate
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let x = vec![];
+}
--- /dev/null
+error[E0282]: type annotations needed
+ --> $DIR/issue-38812.rs:12:13
+ |
+12 | let x = vec![];
+ | - ^^^^^^ cannot infer type for `T`
+ | |
+ | consider giving `x` a type
+ |
+ = note: this error originates in a macro outside of the current crate
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Foo: Sized {
+ fn foo(self);
+}
+
+fn foo<'a,'b,T>(x: &'a T, y: &'b T)
+ where &'a T : Foo,
+ &'b T : Foo
+{
+ x.foo();
+ y.foo();
+}
+
+fn main() { }
--- /dev/null
+error[E0282]: type annotations needed
+ --> $DIR/issue-40294.rs:15:1
+ |
+15 | fn foo<'a,'b,T>(x: &'a T, y: &'b T)
+ | _^ starting here...
+16 | | where &'a T : Foo,
+17 | | &'b T : Foo
+18 | | {
+19 | | x.foo();
+20 | | y.foo();
+21 | | }
+ | |_^ ...ending here: cannot infer type for `&'a T`
+
+error: aborting due to previous error
+
static TARGETS: &'static [&'static str] = &[
"aarch64-apple-ios",
+ "aarch64-unknown-fuchsia",
"aarch64-linux-android",
"aarch64-unknown-linux-gnu",
"arm-linux-androideabi",
"x86_64-pc-windows-msvc",
"x86_64-rumprun-netbsd",
"x86_64-unknown-freebsd",
+ "x86_64-unknown-fuchsia",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
"x86_64-unknown-netbsd",
let mut manifest = BTreeMap::new();
manifest.insert("manifest-version".to_string(),
toml::Value::String(manifest_version));
- manifest.insert("date".to_string(), toml::Value::String(date));
+ manifest.insert("date".to_string(), toml::Value::String(date.clone()));
manifest.insert("pkg".to_string(), toml::encode(&pkg));
let manifest = toml::Value::Table(manifest).to_string();
let filename = format!("channel-rust-{}.toml", self.rust_release);
self.write_manifest(&manifest, &filename);
+ let filename = format!("channel-rust-{}-date.txt", self.rust_release);
+ self.write_date_stamp(&date, &filename);
+
if self.rust_release != "beta" && self.rust_release != "nightly" {
self.write_manifest(&manifest, "channel-rust-stable.toml");
+ self.write_date_stamp(&date, "channel-rust-stable-date.txt");
}
}
self.package("rust-docs", &mut manifest.pkg, TARGETS);
self.package("rust-src", &mut manifest.pkg, &["*"]);
- if self.channel == "nightly" {
+ if self.rust_release == "nightly" {
self.package("rust-analysis", &mut manifest.pkg, TARGETS);
}
let mut components = Vec::new();
let mut extensions = Vec::new();
- // rustc/rust-std/cargo are all required, and so is rust-mingw if it's
- // available for the target.
+ // rustc/rust-std/cargo/docs are all required, and so is rust-mingw
+ // if it's available for the target.
components.extend(vec![
Component { pkg: "rustc".to_string(), target: host.to_string() },
Component { pkg: "rust-std".to_string(), target: host.to_string() },
Component { pkg: "cargo".to_string(), target: host.to_string() },
+ Component { pkg: "rust-docs".to_string(), target: host.to_string() },
]);
if host.contains("pc-windows-gnu") {
components.push(Component {
});
}
- // Docs, other standard libraries, and the source package are all
- // optional.
- extensions.push(Component {
- pkg: "rust-docs".to_string(),
- target: host.to_string(),
- });
for target in TARGETS {
if target != host {
extensions.push(Component {
target: target.to_string(),
});
}
- if self.channel == "nightly" {
+ if self.rust_release == "nightly" {
extensions.push(Component {
pkg: "rust-analysis".to_string(),
target: target.to_string(),
self.hash(&dst);
self.sign(&dst);
}
+
+ fn write_date_stamp(&self, date: &str, name: &str) {
+ let dst = self.output.join(name);
+ t!(t!(File::create(&dst)).write_all(date.as_bytes()));
+ self.hash(&dst);
+ self.sign(&dst);
+ }
}
+++ /dev/null
-Subproject commit d17b61aa5a2ca790f268a043bffdb0ffb04f0ec7
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-/// darwin_fd_limit exists to work around an issue where launchctl on Mac OS X
+/// darwin_fd_limit exists to work around an issue where launchctl on macOS
/// defaults the rlimit maxfiles to 256/unlimited. The default soft limit of 256
/// ends up being far too low for our multithreaded scheduler testing, depending
/// on the number of cores available.
clap = "2.19.3"
[dependencies.mdbook]
-version = "0.0.17"
+version = "0.0.18"
default-features = false