os: osx
osx_image: xcode8.2
install: &osx_install_sccache >
- travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-apple-darwin &&
+ travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-apple-darwin &&
chmod +x /usr/local/bin/sccache &&
travis_retry curl -o /usr/local/bin/stamp https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-17-stamp-x86_64-apple-darwin &&
chmod +x /usr/local/bin/stamp
- set PATH=C:\Python27;%PATH%
# Download and install sccache
- - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-pc-windows-msvc
- - mv 2017-04-29-sccache-x86_64-pc-windows-msvc sccache.exe
+ - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-pc-windows-msvc
+ - mv 2017-05-12-sccache-x86_64-pc-windows-msvc sccache.exe
- set PATH=%PATH%;%CD%
# Download and install ninja
--- /dev/null
+#!/bin/sh
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+URL=https://dl.google.com/android/repository
+
+download_ndk() {
+ mkdir -p /android/ndk
+ cd /android/ndk
+ curl -O $URL/$1
+ unzip -q $1
+ rm $1
+ mv android-ndk-* ndk
+}
+
+make_standalone_toolchain() {
+ # See https://developer.android.com/ndk/guides/standalone_toolchain.htm
+ python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
+ --install-dir /android/ndk/$1-$2 \
+ --arch $1 \
+ --api $2
+}
+
+remove_ndk() {
+ rm -rf /android/ndk/ndk
+}
FROM ubuntu:16.04
-RUN dpkg --add-architecture i386 && \
- apt-get update && \
+RUN apt-get update && \
apt-get install -y --no-install-recommends \
g++ \
make \
git \
cmake \
unzip \
- expect \
- openjdk-9-jre-headless \
sudo \
- libstdc++6:i386 \
xz-utils \
libssl-dev \
pkg-config
-WORKDIR /android/
-ENV PATH=$PATH:/android/ndk-arm-9/bin:/android/sdk/tools:/android/sdk/platform-tools
-
-COPY install-ndk.sh install-sdk.sh accept-licenses.sh /android/
-RUN sh /android/install-ndk.sh
-RUN sh /android/install-sdk.sh
-
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
-COPY start-emulator.sh /android/
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
-ENTRYPOINT ["/usr/bin/dumb-init", "--", "/android/start-emulator.sh"]
+# Install NDK
+COPY install-ndk.sh /tmp
+RUN . /tmp/install-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain arm 9 && \
+ remove_ndk
-RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-24-sccache-x86_64-unknown-linux-gnu && \
- chmod +x /usr/local/bin/sccache
+# Install SDK
+RUN dpkg --add-architecture i386 && \
+ apt-get update && \
+ apt-get install -y --no-install-recommends \
+ openjdk-9-jre-headless \
+ tzdata \
+ libstdc++6:i386 \
+ libgl1-mesa-glx \
+ libpulse0
+
+COPY install-sdk.sh /tmp
+RUN . /tmp/install-sdk.sh && \
+ download_sdk tools_r25.2.5-linux.zip && \
+ download_sysimage armeabi-v7a 18 && \
+ create_avd armeabi-v7a 18
+
+# Setup env
+ENV PATH=$PATH:/android/sdk/tools
+ENV PATH=$PATH:/android/sdk/platform-tools
+
+ENV TARGETS=arm-linux-androideabi
ENV RUST_CONFIGURE_ARGS \
- --target=arm-linux-androideabi \
- --arm-linux-androideabi-ndk=/android/ndk-arm-9
+ --target=$TARGETS \
+ --arm-linux-androideabi-ndk=/android/ndk/arm-9
+
+ENV SCRIPT python2.7 ../x.py test --target $TARGETS --verbose
-ENV SCRIPT python2.7 ../x.py test --target arm-linux-androideabi
+# Entrypoint
+COPY start-emulator.sh /android/
+ENTRYPOINT ["/usr/bin/dumb-init", "--", "/android/start-emulator.sh"]
+++ /dev/null
-#!/usr/bin/expect -f
-# ignore-license
-
-set timeout 1800
-set cmd [lindex $argv 0]
-set licenses [lindex $argv 1]
-
-spawn {*}$cmd
-expect {
- "Do you accept the license '*'*" {
- exp_send "y\r"
- exp_continue
- }
- eof
-}
set -ex
-cpgdb() {
- cp android-ndk-r11c/prebuilt/linux-x86_64/bin/gdb /android/$1/bin/$2-gdb
- cp android-ndk-r11c/prebuilt/linux-x86_64/bin/gdb-orig /android/$1/bin/gdb-orig
- cp -r android-ndk-r11c/prebuilt/linux-x86_64/share /android/$1/share
+URL=https://dl.google.com/android/repository
+
+download_ndk() {
+ mkdir -p /android/ndk
+ cd /android/ndk
+ curl -O $URL/$1
+ unzip -q $1
+ rm $1
+ mv android-ndk-* ndk
}
-# Prep the Android NDK
-#
-# See https://github.com/servo/servo/wiki/Building-for-Android
-curl -O https://dl.google.com/android/repository/android-ndk-r11c-linux-x86_64.zip
-unzip -q android-ndk-r11c-linux-x86_64.zip
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
- --platform=android-9 \
- --toolchain=arm-linux-androideabi-4.9 \
- --install-dir=/android/ndk-arm-9 \
- --ndk-dir=/android/android-ndk-r11c \
- --arch=arm
-cpgdb ndk-arm-9 arm-linux-androideabi
+make_standalone_toolchain() {
+ # See https://developer.android.com/ndk/guides/standalone_toolchain.html
+ python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
+ --install-dir /android/ndk/$1-$2 \
+ --arch $1 \
+ --api $2
+}
-rm -rf ./android-ndk-r11c-linux-x86_64.zip ./android-ndk-r11c
+remove_ndk() {
+ rm -rf /android/ndk/ndk
+}
#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
set -ex
-# Prep the SDK and emulator
-#
-# Note that the update process requires that we accept a bunch of licenses, and
-# we can't just pipe `yes` into it for some reason, so we take the same strategy
-# located in https://github.com/appunite/docker by just wrapping it in a script
-# which apparently magically accepts the licenses.
+URL=https://dl.google.com/android/repository
+
+download_sdk() {
+ mkdir -p /android/sdk
+ cd /android/sdk
+ curl -O $URL/$1
+ unzip -q $1
+ rm -rf $1
+}
+
+download_sysimage() {
+ # See https://developer.android.com/studio/tools/help/android.html
+ abi=$1
+ api=$2
+
+ filter="platform-tools,android-$api"
+ filter="$filter,sys-img-$abi-android-$api"
-mkdir sdk
-curl https://dl.google.com/android/android-sdk_r24.4-linux.tgz | \
- tar xzf - -C sdk --strip-components=1
+ # Keep printing yes to accept the licenses
+ while true; do echo yes; sleep 10; done | \
+ /android/sdk/tools/android update sdk -a --no-ui \
+ --filter "$filter"
+}
-filter="platform-tools,android-18"
-filter="$filter,sys-img-armeabi-v7a-android-18"
+create_avd() {
+ # See https://developer.android.com/studio/tools/help/android.html
+ abi=$1
+ api=$2
-./accept-licenses.sh "android - update sdk -a --no-ui --filter $filter"
+ echo no | \
+ /android/sdk/tools/android create avd \
+ --name $abi-$api \
+ --target android-$api \
+ --abi $abi
+}
-echo "no" | android create avd \
- --name arm-18 \
- --target android-18 \
- --abi armeabi-v7a
# Setting SHELL to a file instead on a symlink helps android
# emulator identify the system
export SHELL=/bin/bash
-nohup nohup emulator @arm-18 -no-window -partition-size 2047 0<&- &>/dev/null &
+
+# Using the default qemu2 engine makes time::tests::since_epoch fails because
+# the emulator date is set to unix epoch (in armeabi-v7a-18 image). Using
+# classic engine the emulator starts with the current date and the tests run
+# fine. If another image is used, this need to be evaluated again.
+nohup nohup emulator @armeabi-v7a-18 \
+ -engine classic -no-window -partition-size 2047 0<&- &>/dev/null &
+
exec "$@"
RUN curl -O http://ftp.nl.debian.org/debian/dists/jessie/main/installer-armhf/current/images/device-tree/vexpress-v2p-ca15-tc1.dtb
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ g++ \
+ make \
+ file \
+ curl \
+ ca-certificates \
+ python2.7 \
+ git \
+ cmake \
+ unzip \
+ sudo \
+ xz-utils \
+ libssl-dev \
+ pkg-config
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+COPY android-ndk.sh /
+RUN . /android-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain arm64 21 && \
+ remove_ndk
+
+ENV PATH=$PATH:/android/ndk/arm64-21/bin
+
+ENV DEP_Z_ROOT=/android/ndk/arm64-21/sysroot/usr/
+
+ENV HOSTS=aarch64-linux-android
+
+ENV RUST_CONFIGURE_ARGS \
+ --host=$HOSTS \
+ --target=$HOSTS \
+ --aarch64-linux-android-ndk=/android/ndk/arm64-21 \
+ --disable-rpath \
+ --enable-extended \
+ --enable-cargo-openssl-static
+
+ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ g++ \
+ make \
+ file \
+ curl \
+ ca-certificates \
+ python2.7 \
+ git \
+ cmake \
+ unzip \
+ sudo \
+ xz-utils \
+ libssl-dev \
+ pkg-config
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+COPY android-ndk.sh /
+RUN . /android-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain arm 9 && \
+ make_standalone_toolchain arm 21 && \
+ remove_ndk
+
+ENV PATH=$PATH:/android/ndk/arm-9/bin
+
+ENV DEP_Z_ROOT=/android/ndk/arm-9/sysroot/usr/
+
+ENV HOSTS=armv7-linux-androideabi
+
+ENV RUST_CONFIGURE_ARGS \
+ --host=$HOSTS \
+ --target=$HOSTS \
+ --armv7-linux-androideabi-ndk=/android/ndk/arm \
+ --disable-rpath \
+ --enable-extended \
+ --enable-cargo-openssl-static
+
+# We support api level 9, but api level 21 is required to build llvm. To
+# overcome this problem we use a ndk with api level 21 to build llvm and then
+# switch to a ndk with api level 9 to complete the build. When the linker is
+# invoked there are missing symbols (like sigsetempty, not available with api
+# level 9), the default linker behavior is to generate an error, to allow the
+# build to finish we use --warn-unresolved-symbols. Note that the missing
+# symbols does not affect std, only the compiler (llvm) and cargo (openssl).
+RUN chmod 777 /android/ndk && \
+ ln -s /android/ndk/arm-21 /android/ndk/arm
+
+ENV SCRIPT \
+ python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \
+ (export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \
+ rm /android/ndk/arm && \
+ ln -s /android/ndk/arm-9 /android/ndk/arm && \
+ python2.7 ../x.py dist --host $HOSTS --target $HOSTS)
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ g++ \
+ make \
+ file \
+ curl \
+ ca-certificates \
+ python2.7 \
+ git \
+ cmake \
+ unzip \
+ sudo \
+ xz-utils \
+ libssl-dev \
+ pkg-config
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+COPY android-ndk.sh /
+RUN . /android-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain x86 9 && \
+ make_standalone_toolchain x86 21 && \
+ remove_ndk
+
+ENV PATH=$PATH:/android/ndk/x86-9/bin
+
+ENV DEP_Z_ROOT=/android/ndk/x86-9/sysroot/usr/
+
+ENV HOSTS=i686-linux-android
+
+ENV RUST_CONFIGURE_ARGS \
+ --host=$HOSTS \
+ --target=$HOSTS \
+ --i686-linux-android-ndk=/android/ndk/x86 \
+ --disable-rpath \
+ --enable-extended \
+ --enable-cargo-openssl-static
+
+# We support api level 9, but api level 21 is required to build llvm. To
+# overcome this problem we use a ndk with api level 21 to build llvm and then
+# switch to a ndk with api level 9 to complete the build. When the linker is
+# invoked there are missing symbols (like sigsetempty, not available with api
+# level 9), the default linker behavior is to generate an error, to allow the
+# build to finish we use --warn-unresolved-symbols. Note that the missing
+# symbols does not affect std, only the compiler (llvm) and cargo (openssl).
+RUN chmod 777 /android/ndk && \
+ ln -s /android/ndk/x86-21 /android/ndk/x86
+
+ENV SCRIPT \
+ python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \
+ (export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \
+ rm /android/ndk/x86 && \
+ ln -s /android/ndk/x86-9 /android/ndk/x86 && \
+ python2.7 ../x.py dist --host $HOSTS --target $HOSTS)
--- /dev/null
+FROM ubuntu:16.04
+
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ g++ \
+ make \
+ file \
+ curl \
+ ca-certificates \
+ python2.7 \
+ git \
+ cmake \
+ unzip \
+ sudo \
+ xz-utils \
+ libssl-dev \
+ pkg-config
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+ dpkg -i dumb-init_*.deb && \
+ rm dumb-init_*.deb
+
+RUN curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+COPY android-ndk.sh /
+RUN . /android-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain x86_64 21 && \
+ remove_ndk
+
+ENV PATH=$PATH:/android/ndk/x86_64-21/bin
+
+ENV DEP_Z_ROOT=/android/ndk/x86_64-21/sysroot/usr/
+
+ENV HOSTS=x86_64-linux-android
+
+ENV RUST_CONFIGURE_ARGS \
+ --host=$HOSTS \
+ --target=$HOSTS \
+ --x86_64-linux-android-ndk=/android/ndk/x86_64-21 \
+ --disable-rpath \
+ --enable-extended \
+ --enable-cargo-openssl-static
+
+ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/aarch64-unknown-linux-gnueabi/bin
FROM ubuntu:16.04
-RUN dpkg --add-architecture i386 && \
- apt-get update && \
+RUN apt-get update && \
apt-get install -y --no-install-recommends \
g++ \
make \
git \
cmake \
unzip \
- expect \
- openjdk-9-jre \
sudo \
- libstdc++6:i386 \
xz-utils \
libssl-dev \
pkg-config
-WORKDIR /android/
-ENV PATH=$PATH:/android/ndk-arm-9/bin:/android/sdk/tools:/android/sdk/platform-tools
-
-COPY install-ndk.sh /android/
-RUN sh /android/install-ndk.sh
-
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
- chmod +x /usr/local/bin/sccache
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
+ chmod +x /usr/local/bin/sccache
+
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+# Install NDK
+COPY install-ndk.sh /tmp
+RUN . /tmp/install-ndk.sh && \
+ download_ndk android-ndk-r13b-linux-x86_64.zip && \
+ make_standalone_toolchain arm 9 && \
+ make_standalone_toolchain x86 9 && \
+ make_standalone_toolchain arm64 21 && \
+ make_standalone_toolchain x86_64 21 && \
+ remove_ndk
ENV TARGETS=arm-linux-androideabi
ENV TARGETS=$TARGETS,armv7-linux-androideabi
ENV RUST_CONFIGURE_ARGS \
--target=$TARGETS \
--enable-extended \
- --arm-linux-androideabi-ndk=/android/ndk-arm-9 \
- --armv7-linux-androideabi-ndk=/android/ndk-arm-9 \
- --i686-linux-android-ndk=/android/ndk-x86-9 \
- --aarch64-linux-android-ndk=/android/ndk-arm64-21 \
- --x86_64-linux-android-ndk=/android/ndk-x86_64-21
+ --arm-linux-androideabi-ndk=/android/ndk/arm-9 \
+ --armv7-linux-androideabi-ndk=/android/ndk/arm-9 \
+ --i686-linux-android-ndk=/android/ndk/x86-9 \
+ --aarch64-linux-android-ndk=/android/ndk/arm64-21 \
+ --x86_64-linux-android-ndk=/android/ndk/x86_64-21
ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
set -ex
-# Prep the Android NDK
-#
-# See https://github.com/servo/servo/wiki/Building-for-Android
-curl -O https://dl.google.com/android/repository/android-ndk-r11c-linux-x86_64.zip
-unzip -q android-ndk-r11c-linux-x86_64.zip
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
- --platform=android-9 \
- --toolchain=arm-linux-androideabi-4.9 \
- --install-dir=/android/ndk-arm-9 \
- --ndk-dir=/android/android-ndk-r11c \
- --arch=arm
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
- --platform=android-21 \
- --toolchain=aarch64-linux-android-4.9 \
- --install-dir=/android/ndk-arm64-21 \
- --ndk-dir=/android/android-ndk-r11c \
- --arch=arm64
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
- --platform=android-9 \
- --toolchain=x86-4.9 \
- --install-dir=/android/ndk-x86-9 \
- --ndk-dir=/android/android-ndk-r11c \
- --arch=x86
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
- --platform=android-21 \
- --toolchain=x86_64-4.9 \
- --install-dir=/android/ndk-x86_64-21 \
- --ndk-dir=/android/android-ndk-r11c \
- --arch=x86_64
+URL=https://dl.google.com/android/repository
+
+download_ndk() {
+ mkdir -p /android/ndk
+ cd /android/ndk
+ curl -O $URL/$1
+ unzip -q $1
+ rm $1
+ mv android-ndk-* ndk
+}
+
+make_standalone_toolchain() {
+ # See https://developer.android.com/ndk/guides/standalone_toolchain.html
+ python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
+ --install-dir /android/ndk/$1-$2 \
+ --arch $1 \
+ --api $2
+}
-rm -rf ./android-ndk-r11c-linux-x86_64.zip ./android-ndk-r11c
+remove_ndk() {
+ rm -rf /android/ndk/ndk
+}
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabi/bin
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabihf/bin
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/armv7-unknown-linux-gnueabihf/bin
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV RUST_CONFIGURE_ARGS \
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
ENTRYPOINT ["/rustroot/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV HOSTS=i686-unknown-linux-gnu
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/powerpc-unknown-linux-gnu/bin
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/powerpc64-unknown-linux-gnu/bin
RUN ./build-powerpc64le-toolchain.sh
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/s390x-ibm-linux-gnu/bin
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
ENTRYPOINT ["/rustroot/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV HOSTS=x86_64-unknown-linux-gnu
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV RUST_CONFIGURE_ARGS \
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/x86_64-unknown-netbsd/bin
lib32stdc++6
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
source "$ci_dir/shared.sh"
-retry docker \
- build \
- --rm \
- -t rust-ci \
- "`dirname "$script"`/$image"
+if [ -f "$docker_dir/$image/Dockerfile" ]; then
+ retry docker \
+ build \
+ --rm \
+ -t rust-ci \
+ "$docker_dir/$image"
+elif [ -f "$docker_dir/disabled/$image/Dockerfile" ]; then
+ if [ -n "$TRAVIS_OS_NAME" ]; then
+ echo Cannot run disabled images on travis!
+ exit 1
+ fi
+ retry docker \
+ build \
+ --rm \
+ -t rust-ci \
+ -f "$docker_dir/disabled/$image/Dockerfile" \
+ "$docker_dir"
+else
+ echo Invalid image: $image
+ exit 1
+fi
objdir=$root_dir/obj
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-04-29-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
- [peek](library-features/peek.md)
- [placement_in](library-features/placement-in.md)
- [placement_new_protocol](library-features/placement-new-protocol.md)
- - [print](library-features/print.md)
+ - [print_internals](library-features/print-internals.md)
- [proc_macro_internals](library-features/proc-macro-internals.md)
- [process_try_wait](library-features/process-try-wait.md)
- [question_mark_carrier](library-features/question-mark-carrier.md)
--- /dev/null
+# `print_internals`
+
+This feature is internal to the Rust compiler and is not intended for general use.
+
+------------------------
+++ /dev/null
-# `print`
-
-This feature is internal to the Rust compiler and is not intended for general use.
-
-------------------------
/// exception. If you need to mutate through an `Arc`, use [`Mutex`][mutex],
/// [`RwLock`][rwlock], or one of the [`Atomic`][atomic] types.
///
-/// `Arc` uses atomic operations for reference counting, so `Arc`s can be
-/// sent between threads. In other words, `Arc<T>` implements [`Send`]
-/// as long as `T` implements [`Send`] and [`Sync`][sync]. The disadvantage is
-/// that atomic operations are more expensive than ordinary memory accesses.
-/// If you are not sharing reference-counted values between threads, consider
-/// using [`rc::Rc`][`Rc`] for lower overhead. [`Rc`] is a safe default, because
-/// the compiler will catch any attempt to send an [`Rc`] between threads.
-/// However, a library might choose `Arc` in order to give library consumers
+/// ## Thread Safety
+///
+/// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
+/// counting This means that it is thread-safe. The disadvantage is that
+/// atomic operations are more expensive than ordinary memory accesses. If you
+/// are not sharing reference-counted values between threads, consider using
+/// [`Rc<T>`] for lower overhead. [`Rc<T>`] is a safe default, because the
+/// compiler will catch any attempt to send an [`Rc<T>`] between threads.
+/// However, a library might choose `Arc<T>` in order to give library consumers
/// more flexibility.
///
+/// `Arc<T>` will implement [`Send`] and [`Sync`] as long as the `T` implements
+/// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an
+/// `Arc<T>` to make it thread-safe? This may be a bit counter-intuitive at
+/// first: after all, isn't the point of `Arc<T>` thread safety? The key is
+/// this: `Arc<T>` makes it thread safe to have multiple ownership of the same
+/// data, but it doesn't add thread safety to its data. Consider
+/// `Arc<RefCell<T>>`. `RefCell<T>` isn't [`Sync`], and if `Arc<T>` was always
+/// [`Send`], `Arc<RefCell<T>>` would be as well. But then we'd have a problem:
+/// `RefCell<T>` is not thread safe; it keeps track of the borrowing count using
+/// non-atomic operations.
+///
+/// In the end, this means that you may need to pair `Arc<T>` with some sort of
+/// `std::sync` type, usually `Mutex<T>`.
+///
+/// ## Breaking cycles with `Weak`
+///
/// The [`downgrade`][downgrade] method can be used to create a non-owning
/// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d
/// to an `Arc`, but this will return [`None`] if the value has already been
/// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak]
/// pointers from children back to their parents.
///
+/// ## `Deref` behavior
+///
/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
/// so you can call `T`'s methods on a value of type `Arc<T>`. To avoid name
/// clashes with `T`'s methods, the methods of `Arc<T>` itself are [associated
///
/// [arc]: struct.Arc.html
/// [weak]: struct.Weak.html
-/// [`Rc`]: ../../std/rc/struct.Rc.html
+/// [`Rc<T>`]: ../../std/rc/struct.Rc.html
/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
/// [mutex]: ../../std/sync/struct.Mutex.html
/// [rwlock]: ../../std/sync/struct.RwLock.html
/// [atomic]: ../../std/sync/atomic/index.html
/// [`Send`]: ../../std/marker/trait.Send.html
-/// [sync]: ../../std/marker/trait.Sync.html
+/// [`Sync`]: ../../std/marker/trait.Sync.html
/// [deref]: ../../std/ops/trait.Deref.html
/// [downgrade]: struct.Arc.html#method.downgrade
/// [upgrade]: struct.Weak.html#method.upgrade
#[unstable(feature = "unique", issue = "27730")]
impl<T: Sized> Unique<T> {
- /// Creates a new `Shared` that is dangling, but well-aligned.
+ /// Creates a new `Unique` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
DefSpan(D),
Stability(D),
Deprecation(D),
+ ItemBodyNestedBodies(D),
+ ConstIsRvaluePromotableToStatic(D),
+ ImplParent(D),
+ TraitOfItem(D),
+ IsExportedSymbol(D),
+ IsMirAvailable(D),
+ ItemAttrs(D),
+ FnArgNames(D),
FileMap(D, Arc<String>),
}
DefSpan(ref d) => op(d).map(DefSpan),
Stability(ref d) => op(d).map(Stability),
Deprecation(ref d) => op(d).map(Deprecation),
+ ItemAttrs(ref d) => op(d).map(ItemAttrs),
+ FnArgNames(ref d) => op(d).map(FnArgNames),
+ ImplParent(ref d) => op(d).map(ImplParent),
+ TraitOfItem(ref d) => op(d).map(TraitOfItem),
+ IsExportedSymbol(ref d) => op(d).map(IsExportedSymbol),
+ ItemBodyNestedBodies(ref d) => op(d).map(ItemBodyNestedBodies),
+ ConstIsRvaluePromotableToStatic(ref d) => op(d).map(ConstIsRvaluePromotableToStatic),
+ IsMirAvailable(ref d) => op(d).map(IsMirAvailable),
GlobalMetaData(ref d, kind) => op(d).map(|d| GlobalMetaData(d, kind)),
FileMap(ref d, ref file_name) => op(d).map(|d| FileMap(d, file_name.clone())),
}
use ty::{IntType, UintType};
use ty::{self, Ty, TyCtxt};
use ty::error::TypeError;
-use ty::fold::TypeFoldable;
-use ty::relate::{RelateResult, TypeRelation};
-use traits::PredicateObligations;
+use ty::relate::{self, Relate, RelateResult, TypeRelation};
+use traits::{Obligation, PredicateObligations};
use syntax::ast;
use syntax_pos::Span;
// `'?2` and `?3` are fresh region/type inference
// variables. (Down below, we will relate `a_ty <: b_ty`,
// adding constraints like `'x: '?2` and `?1 <: ?3`.)
- let b_ty = self.generalize(a_ty, b_vid, dir == EqTo)?;
+ let Generalization { ty: b_ty, needs_wf } = self.generalize(a_ty, b_vid, dir)?;
debug!("instantiate(a_ty={:?}, dir={:?}, b_vid={:?}, generalized b_ty={:?})",
a_ty, dir, b_vid, b_ty);
self.infcx.type_variables.borrow_mut().instantiate(b_vid, b_ty);
+ if needs_wf {
+ self.obligations.push(Obligation::new(self.trace.cause.clone(),
+ ty::Predicate::WellFormed(b_ty)));
+ }
+
// Finally, relate `b_ty` to `a_ty`, as described in previous comment.
//
// FIXME(#16847): This code is non-ideal because all these subtype
/// Attempts to generalize `ty` for the type variable `for_vid`.
/// This checks for cycle -- that is, whether the type `ty`
- /// references `for_vid`. If `is_eq_relation` is false, it will
- /// also replace all regions/unbound-type-variables with fresh
- /// variables. Returns `TyError` in the case of a cycle, `Ok`
- /// otherwise.
+ /// references `for_vid`. The `dir` is the "direction" for which we
+ /// a performing the generalization (i.e., are we producing a type
+ /// that can be used as a supertype etc).
///
/// Preconditions:
///
fn generalize(&self,
ty: Ty<'tcx>,
for_vid: ty::TyVid,
- is_eq_relation: bool)
- -> RelateResult<'tcx, Ty<'tcx>>
+ dir: RelationDir)
+ -> RelateResult<'tcx, Generalization<'tcx>>
{
+ // Determine the ambient variance within which `ty` appears.
+ // The surrounding equation is:
+ //
+ // ty [op] ty2
+ //
+ // where `op` is either `==`, `<:`, or `:>`. This maps quite
+ // naturally.
+ let ambient_variance = match dir {
+ RelationDir::EqTo => ty::Invariant,
+ RelationDir::SubtypeOf => ty::Covariant,
+ RelationDir::SupertypeOf => ty::Contravariant,
+ };
+
let mut generalize = Generalizer {
infcx: self.infcx,
span: self.trace.cause.span,
for_vid_sub_root: self.infcx.type_variables.borrow_mut().sub_root_var(for_vid),
- is_eq_relation: is_eq_relation,
- cycle_detected: false
+ ambient_variance: ambient_variance,
+ needs_wf: false,
};
- let u = ty.fold_with(&mut generalize);
- if generalize.cycle_detected {
- Err(TypeError::CyclicTy)
- } else {
- Ok(u)
- }
+
+ let ty = generalize.relate(&ty, &ty)?;
+ let needs_wf = generalize.needs_wf;
+ Ok(Generalization { ty, needs_wf })
}
}
infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
span: Span,
for_vid_sub_root: ty::TyVid,
- is_eq_relation: bool,
- cycle_detected: bool,
+ ambient_variance: ty::Variance,
+ needs_wf: bool, // see the field `needs_wf` in `Generalization`
}
-impl<'cx, 'gcx, 'tcx> ty::fold::TypeFolder<'gcx, 'tcx> for Generalizer<'cx, 'gcx, 'tcx> {
- fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx> {
+/// Result from a generalization operation. This includes
+/// not only the generalized type, but also a bool flag
+/// indicating whether further WF checks are needed.q
+struct Generalization<'tcx> {
+ ty: Ty<'tcx>,
+
+ /// If true, then the generalized type may not be well-formed,
+ /// even if the source type is well-formed, so we should add an
+ /// additional check to enforce that it is. This arises in
+ /// particular around 'bivariant' type parameters that are only
+ /// constrained by a where-clause. As an example, imagine a type:
+ ///
+ /// struct Foo<A, B> where A: Iterator<Item=B> {
+ /// data: A
+ /// }
+ ///
+ /// here, `A` will be covariant, but `B` is
+ /// unconstrained. However, whatever it is, for `Foo` to be WF, it
+ /// must be equal to `A::Item`. If we have an input `Foo<?A, ?B>`,
+ /// then after generalization we will wind up with a type like
+ /// `Foo<?C, ?D>`. When we enforce that `Foo<?A, ?B> <: Foo<?C,
+ /// ?D>` (or `>:`), we will wind up with the requirement that `?A
+ /// <: ?C`, but no particular relationship between `?B` and `?D`
+ /// (after all, we do not know the variance of the normalized form
+ /// of `A::Item` with respect to `A`). If we do nothing else, this
+ /// may mean that `?D` goes unconstrained (as in #41677). So, in
+ /// this scenario where we create a new type variable in a
+ /// bivariant context, we set the `needs_wf` flag to true. This
+ /// will force the calling code to check that `WF(Foo<?C, ?D>)`
+ /// holds, which in turn implies that `?C::Item == ?D`. So once
+ /// `?C` is constrained, that should suffice to restrict `?D`.
+ needs_wf: bool,
+}
+
+impl<'cx, 'gcx, 'tcx> TypeRelation<'cx, 'gcx, 'tcx> for Generalizer<'cx, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
self.infcx.tcx
}
- fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
+ fn tag(&self) -> &'static str {
+ "Generalizer"
+ }
+
+ fn a_is_expected(&self) -> bool {
+ true
+ }
+
+ fn binders<T>(&mut self, a: &ty::Binder<T>, b: &ty::Binder<T>)
+ -> RelateResult<'tcx, ty::Binder<T>>
+ where T: Relate<'tcx>
+ {
+ Ok(ty::Binder(self.relate(a.skip_binder(), b.skip_binder())?))
+ }
+
+ fn relate_with_variance<T: Relate<'tcx>>(&mut self,
+ variance: ty::Variance,
+ a: &T,
+ b: &T)
+ -> RelateResult<'tcx, T>
+ {
+ let old_ambient_variance = self.ambient_variance;
+ self.ambient_variance = self.ambient_variance.xform(variance);
+
+ let result = self.relate(a, b);
+ self.ambient_variance = old_ambient_variance;
+ result
+ }
+
+ fn tys(&mut self, t: Ty<'tcx>, t2: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
+ assert_eq!(t, t2); // we are abusing TypeRelation here; both LHS and RHS ought to be ==
+
// Check to see whether the type we are genealizing references
// any other type variable related to `vid` via
// subtyping. This is basically our "occurs check", preventing
if sub_vid == self.for_vid_sub_root {
// If sub-roots are equal, then `for_vid` and
// `vid` are related via subtyping.
- self.cycle_detected = true;
- self.tcx().types.err
+ return Err(TypeError::CyclicTy);
} else {
match variables.probe_root(vid) {
Some(u) => {
drop(variables);
- self.fold_ty(u)
+ self.relate(&u, &u)
}
None => {
- if !self.is_eq_relation {
- let origin = variables.origin(vid);
- let new_var_id = variables.new_var(false, origin, None);
- let u = self.tcx().mk_var(new_var_id);
- debug!("generalize: replacing original vid={:?} with new={:?}",
- vid, u);
- u
- } else {
- t
+ match self.ambient_variance {
+ // Invariant: no need to make a fresh type variable.
+ ty::Invariant => return Ok(t),
+
+ // Bivariant: make a fresh var, but we
+ // may need a WF predicate. See
+ // comment on `needs_wf` field for
+ // more info.
+ ty::Bivariant => self.needs_wf = true,
+
+ // Co/contravariant: this will be
+ // sufficiently constrained later on.
+ ty::Covariant | ty::Contravariant => (),
}
+
+ let origin = variables.origin(vid);
+ let new_var_id = variables.new_var(false, origin, None);
+ let u = self.tcx().mk_var(new_var_id);
+ debug!("generalize: replacing original vid={:?} with new={:?}",
+ vid, u);
+ return Ok(u);
}
}
}
}
+ ty::TyInfer(ty::IntVar(_)) |
+ ty::TyInfer(ty::FloatVar(_)) => {
+ // No matter what mode we are in,
+ // integer/floating-point types must be equal to be
+ // relatable.
+ Ok(t)
+ }
_ => {
- t.super_fold_with(self)
+ relate::super_relate_tys(self, t, t)
}
}
}
- fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
+ fn regions(&mut self, r: ty::Region<'tcx>, r2: ty::Region<'tcx>)
+ -> RelateResult<'tcx, ty::Region<'tcx>> {
+ assert_eq!(r, r2); // we are abusing TypeRelation here; both LHS and RHS ought to be ==
+
match *r {
// Never make variables for regions bound within the type itself,
// nor for erased regions.
ty::ReLateBound(..) |
- ty::ReErased => { return r; }
+ ty::ReErased => {
+ return Ok(r);
+ }
// Early-bound regions should really have been substituted away before
// we get to this point.
ty::ReScope(..) |
ty::ReVar(..) |
ty::ReFree(..) => {
- if self.is_eq_relation {
- return r;
+ match self.ambient_variance {
+ ty::Invariant => return Ok(r),
+ ty::Bivariant | ty::Covariant | ty::Contravariant => (),
}
}
}
// FIXME: This is non-ideal because we don't give a
// very descriptive origin for this region variable.
- self.infcx.next_region_var(MiscVariable(self.span))
+ Ok(self.infcx.next_region_var(MiscVariable(self.span)))
}
}
use super::combine::{CombineFields, RelationDir};
use super::{Subtype};
+use hir::def_id::DefId;
+
use ty::{self, Ty, TyCtxt};
use ty::TyVar;
-use ty::relate::{Relate, RelateResult, TypeRelation};
+use ty::subst::Substs;
+use ty::relate::{self, Relate, RelateResult, TypeRelation};
/// Ensures `a` is made equal to `b`. Returns `a` on success.
pub struct Equate<'combine, 'infcx: 'combine, 'gcx: 'infcx+'tcx, 'tcx: 'infcx> {
fn a_is_expected(&self) -> bool { self.a_is_expected }
+ fn relate_item_substs(&mut self,
+ _item_def_id: DefId,
+ a_subst: &'tcx Substs<'tcx>,
+ b_subst: &'tcx Substs<'tcx>)
+ -> RelateResult<'tcx, &'tcx Substs<'tcx>>
+ {
+ // NB: Once we are equating types, we don't care about
+ // variance, so don't try to lookup the variance here. This
+ // also avoids some cycles (e.g. #41849) since looking up
+ // variance requires computing types which can require
+ // performing trait matching (which then performs equality
+ // unification).
+
+ relate::relate_substs(self, None, a_subst, b_subst)
+ }
+
fn relate_with_variance<T: Relate<'tcx>>(&mut self,
_: ty::Variance,
a: &T,
// except according to those terms.
use infer::{self, InferCtxt, SubregionOrigin};
-use ty::Region;
+use ty::{self, Region};
use ty::error::TypeError;
use errors::DiagnosticBuilder;
"the type `{}` does not fulfill the required \
lifetime",
self.ty_to_string(ty));
- self.tcx.note_and_explain_region(&mut err, "type must outlive ", sub, "");
+ match *sub {
+ ty::ReStatic => {
+ self.tcx.note_and_explain_region(&mut err, "type must satisfy ", sub, "")
+ }
+ _ => {
+ self.tcx.note_and_explain_region(&mut err, "type must outlive ", sub, "")
+ }
+ }
err
}
infer::RelateRegionParamBound(span) => {
fn visibility(&self, def: DefId) -> ty::Visibility;
fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap<DefId>>;
fn item_generics_cloned(&self, def: DefId) -> ty::Generics;
- fn item_attrs(&self, def_id: DefId) -> Rc<[ast::Attribute]>;
- fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>;
// trait info
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>;
// impl info
fn impl_defaultness(&self, def: DefId) -> hir::Defaultness;
- fn impl_parent(&self, impl_def_id: DefId) -> Option<DefId>;
// trait/impl-item info
- fn trait_of_item(&self, def_id: DefId) -> Option<DefId>;
fn associated_item_cloned(&self, def: DefId) -> ty::AssociatedItem;
// flags
fn is_const_fn(&self, did: DefId) -> bool;
fn is_default_impl(&self, impl_did: DefId) -> bool;
- fn is_foreign_item(&self, did: DefId) -> bool;
fn is_dllimport_foreign_item(&self, def: DefId) -> bool;
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool;
- fn is_exported_symbol(&self, def_id: DefId) -> bool;
// crate metadata
fn dylib_dependency_formats(&self, cnum: CrateNum)
}
fn item_generics_cloned(&self, def: DefId) -> ty::Generics
{ bug!("item_generics_cloned") }
- fn item_attrs(&self, def_id: DefId) -> Rc<[ast::Attribute]> { bug!("item_attrs") }
- fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name> { bug!("fn_arg_names") }
// trait info
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId> { vec![] }
// impl info
fn impl_defaultness(&self, def: DefId) -> hir::Defaultness { bug!("impl_defaultness") }
- fn impl_parent(&self, def: DefId) -> Option<DefId> { bug!("impl_parent") }
// trait/impl-item info
- fn trait_of_item(&self, def_id: DefId) -> Option<DefId> { bug!("trait_of_item") }
fn associated_item_cloned(&self, def: DefId) -> ty::AssociatedItem
{ bug!("associated_item_cloned") }
// flags
fn is_const_fn(&self, did: DefId) -> bool { bug!("is_const_fn") }
fn is_default_impl(&self, impl_did: DefId) -> bool { bug!("is_default_impl") }
- fn is_foreign_item(&self, did: DefId) -> bool { bug!("is_foreign_item") }
fn is_dllimport_foreign_item(&self, id: DefId) -> bool { false }
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool { false }
- fn is_exported_symbol(&self, def_id: DefId) -> bool { false }
// crate metadata
fn dylib_dependency_formats(&self, cnum: CrateNum)
} else if match self.tcx.hir.get_if_local(def_id) {
Some(hir::map::NodeForeignItem(..)) => true,
Some(..) => false,
- None => self.tcx.sess.cstore.is_foreign_item(def_id),
+ None => self.tcx.is_foreign_item(def_id),
} {
self.require_unsafe_ext(expr.id, expr.span, "use of extern static", true);
}
StorageDead(Lvalue<'tcx>),
InlineAsm {
- asm: InlineAsm,
+ asm: Box<InlineAsm>,
outputs: Vec<Lvalue<'tcx>>,
inputs: Vec<Operand<'tcx>>
},
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum Operand<'tcx> {
Consume(Lvalue<'tcx>),
- Constant(Constant<'tcx>),
+ Constant(Box<Constant<'tcx>>),
}
impl<'tcx> Debug for Operand<'tcx> {
substs: &'tcx Substs<'tcx>,
span: Span,
) -> Self {
- Operand::Constant(Constant {
+ Operand::Constant(box Constant {
span: span,
ty: tcx.type_of(def_id).subst(tcx, substs),
literal: Literal::Value { value: ConstVal::Function(def_id, substs) },
/// ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case
/// that `Foo` has a destructor. These rvalues can be optimized
/// away after type-checking and before lowering.
- Aggregate(AggregateKind<'tcx>, Vec<Operand<'tcx>>),
+ Aggregate(Box<AggregateKind<'tcx>>, Vec<Operand<'tcx>>),
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)]
tuple_fmt.finish()
}
- match *kind {
+ match **kind {
AggregateKind::Array(_) => write!(fmt, "{:?}", lvs),
AggregateKind::Tuple => {
Discriminant(ref lval) => Discriminant(lval.fold_with(folder)),
Box(ty) => Box(ty.fold_with(folder)),
Aggregate(ref kind, ref fields) => {
- let kind = match *kind {
+ let kind = box match **kind {
AggregateKind::Array(ty) => AggregateKind::Array(ty.fold_with(folder)),
AggregateKind::Tuple => AggregateKind::Tuple,
AggregateKind::Adt(def, v, substs, n) =>
Discriminant(ref lval) => lval.visit_with(visitor),
Box(ty) => ty.visit_with(visitor),
Aggregate(ref kind, ref fields) => {
- (match *kind {
+ (match **kind {
AggregateKind::Array(ty) => ty.visit_with(visitor),
AggregateKind::Tuple => false,
AggregateKind::Adt(_, _, substs, _) => substs.visit_with(visitor),
tcx.mk_box(t)
}
Rvalue::Aggregate(ref ak, ref ops) => {
- match *ak {
+ match **ak {
AggregateKind::Array(ty) => {
tcx.mk_array(ty, ops.len())
}
Rvalue::Aggregate(ref $($mutability)* kind,
ref $($mutability)* operands) => {
+ let kind = &$($mutability)* **kind;
match *kind {
AggregateKind::Array(ref $($mutability)* ty) => {
self.visit_ty(ty);
use std::rc::Rc;
use syntax_pos::{Span, DUMMY_SP};
use syntax::attr;
+use syntax::ast;
use syntax::symbol::Symbol;
pub trait Key: Clone + Hash + Eq + Debug {
}
}
+impl<'tcx> QueryDescription for queries::item_attrs<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("item_attrs")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_exported_symbol<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("is_exported_symbol")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::fn_arg_names<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("fn_arg_names")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::impl_parent<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("impl_parent")
+ }
+}
+
+impl<'tcx> QueryDescription for queries::trait_of_item<'tcx> {
+ fn describe(_: TyCtxt, _: DefId) -> String {
+ bug!("trait_of_item")
+ }
+}
+
impl<'tcx> QueryDescription for queries::item_body_nested_bodies<'tcx> {
fn describe(tcx: TyCtxt, def_id: DefId) -> String {
format!("nested item bodies of `{}`", tcx.item_path_str(def_id))
[] def_span: DefSpan(DefId) -> Span,
[] stability: Stability(DefId) -> Option<attr::Stability>,
[] deprecation: Deprecation(DefId) -> Option<attr::Deprecation>,
- [] item_body_nested_bodies: metadata_dep_node(DefId) -> Rc<BTreeMap<hir::BodyId, hir::Body>>,
- [] const_is_rvalue_promotable_to_static: metadata_dep_node(DefId) -> bool,
- [] is_mir_available: metadata_dep_node(DefId) -> bool,
+ [] item_attrs: ItemAttrs(DefId) -> Rc<[ast::Attribute]>,
+ [] fn_arg_names: FnArgNames(DefId) -> Vec<ast::Name>,
+ [] impl_parent: ImplParent(DefId) -> Option<DefId>,
+ [] trait_of_item: TraitOfItem(DefId) -> Option<DefId>,
+ [] is_exported_symbol: IsExportedSymbol(DefId) -> bool,
+ [] item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> Rc<BTreeMap<hir::BodyId, hir::Body>>,
+ [] const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool,
+ [] is_mir_available: IsMirAvailable(DefId) -> bool,
}
fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode<DefId> {
DepNode::Reachability
}
-fn metadata_dep_node(def_id: DefId) -> DepNode<DefId> {
- DepNode::MetaData(def_id)
-}
-
fn mir_shim_dep_node(instance: ty::InstanceDef) -> DepNode<DefId> {
instance.dep_node()
}
pub empty_variance: Rc<Vec<ty::Variance>>,
}
+impl Variance {
+ /// `a.xform(b)` combines the variance of a context with the
+ /// variance of a type with the following meaning. If we are in a
+ /// context with variance `a`, and we encounter a type argument in
+ /// a position with variance `b`, then `a.xform(b)` is the new
+ /// variance with which the argument appears.
+ ///
+ /// Example 1:
+ ///
+ /// *mut Vec<i32>
+ ///
+ /// Here, the "ambient" variance starts as covariant. `*mut T` is
+ /// invariant with respect to `T`, so the variance in which the
+ /// `Vec<i32>` appears is `Covariant.xform(Invariant)`, which
+ /// yields `Invariant`. Now, the type `Vec<T>` is covariant with
+ /// respect to its type argument `T`, and hence the variance of
+ /// the `i32` here is `Invariant.xform(Covariant)`, which results
+ /// (again) in `Invariant`.
+ ///
+ /// Example 2:
+ ///
+ /// fn(*const Vec<i32>, *mut Vec<i32)
+ ///
+ /// The ambient variance is covariant. A `fn` type is
+ /// contravariant with respect to its parameters, so the variance
+ /// within which both pointer types appear is
+ /// `Covariant.xform(Contravariant)`, or `Contravariant`. `*const
+ /// T` is covariant with respect to `T`, so the variance within
+ /// which the first `Vec<i32>` appears is
+ /// `Contravariant.xform(Covariant)` or `Contravariant`. The same
+ /// is true for its `i32` argument. In the `*mut T` case, the
+ /// variance of `Vec<i32>` is `Contravariant.xform(Invariant)`,
+ /// and hence the outermost type is `Invariant` with respect to
+ /// `Vec<i32>` (and its `i32` argument).
+ ///
+ /// Source: Figure 1 of "Taming the Wildcards:
+ /// Combining Definition- and Use-Site Variance" published in PLDI'11.
+ pub fn xform(self, v: ty::Variance) -> ty::Variance {
+ match (self, v) {
+ // Figure 1, column 1.
+ (ty::Covariant, ty::Covariant) => ty::Covariant,
+ (ty::Covariant, ty::Contravariant) => ty::Contravariant,
+ (ty::Covariant, ty::Invariant) => ty::Invariant,
+ (ty::Covariant, ty::Bivariant) => ty::Bivariant,
+
+ // Figure 1, column 2.
+ (ty::Contravariant, ty::Covariant) => ty::Contravariant,
+ (ty::Contravariant, ty::Contravariant) => ty::Covariant,
+ (ty::Contravariant, ty::Invariant) => ty::Invariant,
+ (ty::Contravariant, ty::Bivariant) => ty::Bivariant,
+
+ // Figure 1, column 3.
+ (ty::Invariant, _) => ty::Invariant,
+
+ // Figure 1, column 4.
+ (ty::Bivariant, _) => ty::Bivariant,
+ }
+ }
+}
+
#[derive(Clone, Copy, Debug, RustcDecodable, RustcEncodable)]
pub struct MethodCallee<'tcx> {
/// Impl method ID, for inherent methods, or trait method ID, otherwise.
if let Some(id) = self.hir.as_local_node_id(did) {
Attributes::Borrowed(self.hir.attrs(id))
} else {
- Attributes::Owned(self.sess.cstore.item_attrs(did))
+ Attributes::Owned(self.item_attrs(did))
}
}
let trait_ref = self.impl_trait_ref(impl_def_id).unwrap();
// Record the trait->implementation mapping.
- let parent = self.sess.cstore.impl_parent(impl_def_id).unwrap_or(trait_id);
+ let parent = self.impl_parent(impl_def_id).unwrap_or(trait_id);
def.record_remote_impl(self, impl_def_id, trait_ref, parent);
}
}
}
- /// If the given def ID describes an item belonging to a trait,
- /// return the ID of the trait that the trait item belongs to.
- /// Otherwise, return `None`.
- pub fn trait_of_item(self, def_id: DefId) -> Option<DefId> {
- if def_id.krate != LOCAL_CRATE {
- return self.sess.cstore.trait_of_item(def_id);
- }
- self.opt_associated_item(def_id)
- .and_then(|associated_item| {
- match associated_item.container {
- TraitContainer(def_id) => Some(def_id),
- ImplContainer(_) => None
- }
- })
- }
-
/// Construct a parameter environment suitable for static contexts or other contexts where there
/// are no free type/lifetime parameters in scope.
pub fn empty_parameter_environment(self) -> ParameterEnvironment<'tcx> {
tcx.hir.span_if_local(def_id).unwrap()
}
+/// If the given def ID describes an item belonging to a trait,
+/// return the ID of the trait that the trait item belongs to.
+/// Otherwise, return `None`.
+fn trait_of_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option<DefId> {
+ tcx.opt_associated_item(def_id)
+ .and_then(|associated_item| {
+ match associated_item.container {
+ TraitContainer(def_id) => Some(def_id),
+ ImplContainer(_) => None
+ }
+ })
+}
+
+
pub fn provide(providers: &mut ty::maps::Providers) {
*providers = ty::maps::Providers {
associated_item,
adt_sized_constraint,
adt_dtorck_constraint,
def_span,
+ trait_of_item,
..*providers
};
}
Relate::relate(self, a, b)
}
+ /// Relate the two substitutions for the given item. The default
+ /// is to look up the variance for the item and proceed
+ /// accordingly.
+ fn relate_item_substs(&mut self,
+ item_def_id: DefId,
+ a_subst: &'tcx Substs<'tcx>,
+ b_subst: &'tcx Substs<'tcx>)
+ -> RelateResult<'tcx, &'tcx Substs<'tcx>>
+ {
+ debug!("relate_item_substs(item_def_id={:?}, a_subst={:?}, b_subst={:?})",
+ item_def_id,
+ a_subst,
+ b_subst);
+
+ let opt_variances = self.tcx().variances_of(item_def_id);
+ relate_substs(self, Some(&opt_variances), a_subst, b_subst)
+ }
+
/// Switch variance for the purpose of relating `a` and `b`.
fn relate_with_variance<T: Relate<'tcx>>(&mut self,
variance: ty::Variance,
}
}
-// substitutions are not themselves relatable without more context,
-// but they is an important subroutine for things that ARE relatable,
-// like traits etc.
-fn relate_item_substs<'a, 'gcx, 'tcx, R>(relation: &mut R,
- item_def_id: DefId,
- a_subst: &'tcx Substs<'tcx>,
- b_subst: &'tcx Substs<'tcx>)
- -> RelateResult<'tcx, &'tcx Substs<'tcx>>
- where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a
-{
- debug!("substs: item_def_id={:?} a_subst={:?} b_subst={:?}",
- item_def_id,
- a_subst,
- b_subst);
-
- let opt_variances = relation.tcx().variances_of(item_def_id);
- relate_substs(relation, Some(&opt_variances), a_subst, b_subst)
-}
-
pub fn relate_substs<'a, 'gcx, 'tcx, R>(relation: &mut R,
variances: Option<&Vec<ty::Variance>>,
a_subst: &'tcx Substs<'tcx>,
if a.def_id != b.def_id {
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
} else {
- let substs = relate_item_substs(relation, a.def_id, a.substs, b.substs)?;
+ let substs = relation.relate_item_substs(a.def_id, a.substs, b.substs)?;
Ok(ty::TraitRef { def_id: a.def_id, substs: substs })
}
}
if a.def_id != b.def_id {
Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id)))
} else {
- let substs = relate_item_substs(relation, a.def_id, a.substs, b.substs)?;
+ let substs = relation.relate_item_substs(a.def_id, a.substs, b.substs)?;
Ok(ty::ExistentialTraitRef { def_id: a.def_id, substs: substs })
}
}
(&ty::TyAdt(a_def, a_substs), &ty::TyAdt(b_def, b_substs))
if a_def == b_def =>
{
- let substs = relate_item_substs(relation, a_def.did, a_substs, b_substs)?;
+ let substs = relation.relate_item_substs(a_def.did, a_substs, b_substs)?;
Ok(tcx.mk_adt(a_def, substs))
}
}
fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
- Rvalue::Use(Operand::Constant(Constant {
+ Rvalue::Use(Operand::Constant(Box::new(Constant {
span: span,
ty: self.tcx.types.bool,
literal: Literal::Value { value: ConstVal::Bool(val) }
- }))
+ })))
}
fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
// constants, we only try to find the expression for a
// trait-associated const if the caller gives us the
// substitutions for the reference to it.
- if tcx.sess.cstore.trait_of_item(def_id).is_some() {
+ if tcx.trait_of_item(def_id).is_some() {
resolve_trait_associated_const(tcx, def_id, substs)
} else {
Some((def_id, substs))
node: ast::MetaItemKind::Word,
span: DUMMY_SP,
});
- if !allow_unstable_cfg && gated_cfg.is_some() {
- continue;
+
+ // Note that crt-static is a specially recognized cfg
+ // directive that's printed out here as part of
+ // rust-lang/rust#37406, but in general the
+ // `target_feature` cfg is gated under
+ // rust-lang/rust#29717. For now this is just
+ // specifically allowing the crt-static cfg and that's
+ // it, this is intended to get into Cargo and then go
+ // through to build scripts.
+ let value = value.as_ref().map(|s| s.as_str());
+ let value = value.as_ref().map(|s| s.as_ref());
+ if name != "target_feature" || value != Some("crt-static") {
+ if !allow_unstable_cfg && gated_cfg.is_some() {
+ continue;
+ }
}
- cfgs.push(if let &Some(ref value) = value {
+ cfgs.push(if let Some(value) = value {
format!("{}=\"{}\"", name, value)
} else {
format!("{}", name)
use llvm::LLVMRustHasFeature;
use rustc::session::Session;
use rustc_trans::back::write::create_target_machine;
-use syntax::feature_gate::UnstableFeatures;
use syntax::symbol::Symbol;
use libc::c_char;
}
let requested_features = sess.opts.cg.target_feature.split(',');
- let unstable_options = sess.opts.debugging_opts.unstable_options;
- let is_nightly = UnstableFeatures::from_environment().is_nightly_build();
let found_negative = requested_features.clone().any(|r| r == "-crt-static");
let found_positive = requested_features.clone().any(|r| r == "+crt-static");
found_positive
};
- // If we switched from the default then that's only allowed on nightly, so
- // gate that here.
- if (found_positive || found_negative) && (!is_nightly || !unstable_options) {
- sess.fatal("specifying the `crt-static` target feature is only allowed \
- on the nightly channel with `-Z unstable-options` passed \
- as well");
- }
-
if crt_static {
cfg.insert((tf, Some(Symbol::intern("crt-static"))));
}
DepNode::FileMap(def_id, ref name) => {
if def_id.is_local() {
- Some(self.incremental_hashes_map[dep_node])
+ // We will have been able to retrace the DefId (which is
+ // always the local CRATE_DEF_INDEX), but the file with the
+ // given name might have been removed, so we use get() in
+ // order to allow for that case.
+ self.incremental_hashes_map.get(dep_node).map(|x| *x)
} else {
Some(self.metadata_hash(DepNode::FileMap(def_id, name.clone()),
def_id.krate,
cx.span_lint(ILLEGAL_FLOATING_POINT_LITERAL_PATTERN,
l.span,
"floating-point literals cannot be used in patterns");
- error!("span mc spanspam");
},
_ => (),
}
use rustc_back::target::Target;
use rustc::hir;
-use std::collections::BTreeMap;
-
macro_rules! provide {
(<$lt:tt> $tcx:ident, $def_id:ident, $cdata:ident $($name:ident => $compute:block)*) => {
pub fn provide<$lt>(providers: &mut Providers<$lt>) {
def_span => { cdata.get_span(def_id.index, &tcx.sess) }
stability => { cdata.get_stability(def_id.index) }
deprecation => { cdata.get_deprecation(def_id.index) }
- item_body_nested_bodies => {
- let map: BTreeMap<_, _> = cdata.entry(def_id.index).ast.into_iter().flat_map(|ast| {
- ast.decode(cdata).nested_bodies.decode(cdata).map(|body| (body.id(), body))
- }).collect();
-
- Rc::new(map)
- }
+ item_attrs => { cdata.get_item_attrs(def_id.index, &tcx.dep_graph) }
+ // FIXME(#38501) We've skipped a `read` on the `HirBody` of
+ // a `fn` when encoding, so the dep-tracking wouldn't work.
+ // This is only used by rustdoc anyway, which shouldn't have
+ // incremental recompilation ever enabled.
+ fn_arg_names => { cdata.get_fn_arg_names(def_id.index) }
+ impl_parent => { cdata.get_parent_impl(def_id.index) }
+ trait_of_item => { cdata.get_trait_of_item(def_id.index) }
+ is_exported_symbol => {
+ let dep_node = cdata.metadata_dep_node(GlobalMetaDataKind::ExportedSymbols);
+ cdata.exported_symbols.get(&tcx.dep_graph, dep_node).contains(&def_id.index)
+ }
+ item_body_nested_bodies => { Rc::new(cdata.item_body_nested_bodies(def_id.index)) }
const_is_rvalue_promotable_to_static => {
- cdata.entry(def_id.index).ast.expect("const item missing `ast`")
- .decode(cdata).rvalue_promotable_to_static
- }
- is_mir_available => {
- !cdata.is_proc_macro(def_id.index) &&
- cdata.maybe_entry(def_id.index).and_then(|item| item.decode(cdata).mir).is_some()
+ cdata.const_is_rvalue_promotable_to_static(def_id.index)
}
+ is_mir_available => { cdata.is_item_mir_available(def_id.index) }
}
impl CrateStore for cstore::CStore {
self.get_crate_data(def.krate).get_generics(def.index)
}
- fn item_attrs(&self, def_id: DefId) -> Rc<[ast::Attribute]>
- {
- self.get_crate_data(def_id.krate)
- .get_item_attrs(def_id.index, &self.dep_graph)
- }
-
- fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>
- {
- // FIXME(#38501) We've skipped a `read` on the `HirBody` of
- // a `fn` when encoding, so the dep-tracking wouldn't work.
- // This is only used by rustdoc anyway, which shouldn't have
- // incremental recompilation ever enabled.
- assert!(!self.dep_graph.is_fully_enabled());
- self.get_crate_data(did.krate).get_fn_arg_names(did.index)
- }
-
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>
{
if let Some(def_id) = filter {
self.get_crate_data(def.krate).get_impl_defaultness(def.index)
}
- fn impl_parent(&self, impl_def: DefId) -> Option<DefId> {
- self.dep_graph.read(DepNode::MetaData(impl_def));
- self.get_crate_data(impl_def.krate).get_parent_impl(impl_def.index)
- }
-
- fn trait_of_item(&self, def_id: DefId) -> Option<DefId> {
- self.dep_graph.read(DepNode::MetaData(def_id));
- self.get_crate_data(def_id.krate).get_trait_of_item(def_id.index)
- }
-
fn associated_item_cloned(&self, def: DefId) -> ty::AssociatedItem
{
self.dep_graph.read(DepNode::MetaData(def));
self.get_crate_data(impl_did.krate).is_default_impl(impl_did.index)
}
- fn is_foreign_item(&self, did: DefId) -> bool {
- self.get_crate_data(did.krate).is_foreign_item(did.index)
- }
-
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool
{
self.do_is_statically_included_foreign_item(def_id)
}
- fn is_exported_symbol(&self, def_id: DefId) -> bool {
- let data = self.get_crate_data(def_id.krate);
- let dep_node = data.metadata_dep_node(GlobalMetaDataKind::ExportedSymbols);
- data.exported_symbols
- .get(&self.dep_graph, dep_node)
- .contains(&def_id.index)
- }
-
fn is_dllimport_foreign_item(&self, def_id: DefId) -> bool {
if def_id.krate == LOCAL_CRATE {
self.dllimport_foreign_items.borrow().contains(&def_id.index)
use std::borrow::Cow;
use std::cell::Ref;
+use std::collections::BTreeMap;
use std::io;
use std::mem;
use std::rc::Rc;
}
impl<'a, 'tcx> CrateMetadata {
- pub fn is_proc_macro(&self, id: DefIndex) -> bool {
+ fn is_proc_macro(&self, id: DefIndex) -> bool {
self.proc_macros.is_some() && id != CRATE_DEF_INDEX
}
- pub fn maybe_entry(&self, item_id: DefIndex) -> Option<Lazy<Entry<'tcx>>> {
+ fn maybe_entry(&self, item_id: DefIndex) -> Option<Lazy<Entry<'tcx>>> {
assert!(!self.is_proc_macro(item_id));
self.root.index.lookup(self.blob.raw_bytes(), item_id)
}
- pub fn entry(&self, item_id: DefIndex) -> Entry<'tcx> {
+ fn entry(&self, item_id: DefIndex) -> Entry<'tcx> {
match self.maybe_entry(item_id) {
None => {
bug!("entry: id not found: {:?} in crate {:?} with number {}",
tcx.alloc_tables(ast.tables.decode((self, tcx)))
}
+ pub fn item_body_nested_bodies(&self, id: DefIndex) -> BTreeMap<hir::BodyId, hir::Body> {
+ self.entry(id).ast.into_iter().flat_map(|ast| {
+ ast.decode(self).nested_bodies.decode(self).map(|body| (body.id(), body))
+ }).collect()
+ }
+
+ pub fn const_is_rvalue_promotable_to_static(&self, id: DefIndex) -> bool {
+ self.entry(id).ast.expect("const item missing `ast`")
+ .decode(self).rvalue_promotable_to_static
+ }
+
+ pub fn is_item_mir_available(&self, id: DefIndex) -> bool {
+ !self.is_proc_macro(id) &&
+ self.maybe_entry(id).and_then(|item| item.decode(self).mir).is_some()
+ }
+
pub fn maybe_get_optimized_mir(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
id: DefIndex)
temp: &Lvalue<'tcx>,
constant: Constant<'tcx>) {
self.push_assign(block, source_info, temp,
- Rvalue::Use(Operand::Constant(constant)));
+ Rvalue::Use(Operand::Constant(box constant)));
}
pub fn push_assign_unit(&mut self,
source_info: SourceInfo,
lvalue: &Lvalue<'tcx>) {
self.push_assign(block, source_info, lvalue, Rvalue::Aggregate(
- AggregateKind::Tuple, vec![]
+ box AggregateKind::Tuple, vec![]
));
}
match category {
Category::Constant => {
let constant = this.as_constant(expr);
- block.and(Operand::Constant(constant))
+ block.and(Operand::Constant(box constant))
}
Category::Lvalue |
Category::Rvalue(..) => {
.map(|f| unpack!(block = this.as_operand(block, scope, f)))
.collect();
- block.and(Rvalue::Aggregate(AggregateKind::Array(el_ty), fields))
+ block.and(Rvalue::Aggregate(box AggregateKind::Array(el_ty), fields))
}
ExprKind::Tuple { fields } => { // see (*) above
// first process the set of fields
.map(|f| unpack!(block = this.as_operand(block, scope, f)))
.collect();
- block.and(Rvalue::Aggregate(AggregateKind::Tuple, fields))
+ block.and(Rvalue::Aggregate(box AggregateKind::Tuple, fields))
}
ExprKind::Closure { closure_id, substs, upvars } => { // see (*) above
let upvars =
upvars.into_iter()
.map(|upvar| unpack!(block = this.as_operand(block, scope, upvar)))
.collect();
- block.and(Rvalue::Aggregate(AggregateKind::Closure(closure_id, substs), upvars))
+ block.and(Rvalue::Aggregate(box AggregateKind::Closure(closure_id, substs), upvars))
}
ExprKind::Adt {
adt_def, variant_index, substs, fields, base
field_names.iter().filter_map(|n| fields_map.get(n).cloned()).collect()
};
- let adt = AggregateKind::Adt(adt_def, variant_index, substs, active_field_index);
+ let adt =
+ box AggregateKind::Adt(adt_def, variant_index, substs, active_field_index);
block.and(Rvalue::Aggregate(adt, fields))
}
ExprKind::Assign { .. } |
this.cfg.push(block, Statement {
source_info: source_info,
kind: StatementKind::InlineAsm {
- asm: asm.clone(),
+ asm: box asm.clone(),
outputs: outputs,
inputs: inputs
},
let eq_block = self.cfg.start_new_block();
let cleanup = self.diverge_cleanup();
self.cfg.terminate(block, source_info, TerminatorKind::Call {
- func: Operand::Constant(Constant {
+ func: Operand::Constant(box Constant {
span: test.span,
ty: mty,
literal: method
ty: Ty<'tcx>,
literal: Literal<'tcx>)
-> Operand<'tcx> {
- let constant = Constant {
+ let constant = box Constant {
span: span,
ty: ty,
literal: literal,
}
pub fn unit_rvalue(&mut self) -> Rvalue<'tcx> {
- Rvalue::Aggregate(AggregateKind::Tuple, vec![])
+ Rvalue::Aggregate(box AggregateKind::Tuple, vec![])
}
// Returns a zero literal operand for the appropriate type, works for
let free_func = tcx.require_lang_item(lang_items::BoxFreeFnLangItem);
let substs = tcx.intern_substs(&[Kind::from(data.item_ty)]);
TerminatorKind::Call {
- func: Operand::Constant(Constant {
+ func: Operand::Constant(box Constant {
span: data.span,
ty: tcx.type_of(free_func).subst(tcx, substs),
literal: Literal::Value {
let (callee, mut args) = match call_kind {
CallKind::Indirect => (rcvr, vec![]),
CallKind::Direct(def_id) => (
- Operand::Constant(Constant {
+ Operand::Constant(box Constant {
span: span,
ty: tcx.type_of(def_id).subst(tcx, param_env.free_substs),
literal: Literal::Value {
kind: StatementKind::Assign(
Lvalue::Local(RETURN_POINTER),
Rvalue::Aggregate(
- AggregateKind::Adt(adt_def, variant_no, substs, None),
+ box AggregateKind::Adt(adt_def, variant_no, substs, None),
(1..sig.inputs().len()+1).map(|i| {
Operand::Consume(Lvalue::Local(Local::new(i)))
}).collect()
_ => return,
}
- *operand = Operand::Constant(self.constant.clone());
+ *operand = Operand::Constant(box self.constant.clone());
self.uses_replaced += 1
}
}
&Rvalue::Aggregate(ref agg_kind, ref operands) => (agg_kind, operands),
_ => span_bug!(src_info.span, "expected aggregate, not {:?}", rhs),
};
- let (adt_def, variant, substs) = match agg_kind {
- &AggregateKind::Adt(adt_def, variant, substs, None)
+ let (adt_def, variant, substs) = match **agg_kind {
+ AggregateKind::Adt(adt_def, variant, substs, None)
=> (adt_def, variant, substs),
_ => span_bug!(src_info.span, "expected struct, not {:?}", rhs),
};
&Rvalue::Aggregate(ref kind, ref operands) => (kind, operands),
_ => continue,
};
- let (adt_def, variant) = match kind {
- &AggregateKind::Adt(adt_def, variant, _, None) => (adt_def, variant),
+ let (adt_def, variant) = match **kind {
+ AggregateKind::Adt(adt_def, variant, _, None) => (adt_def, variant),
_ => continue,
};
if operands.len() == 0 {
(if self.keep_original {
rhs.clone()
} else {
- let unit = Rvalue::Aggregate(AggregateKind::Tuple, vec![]);
+ let unit = Rvalue::Aggregate(box AggregateKind::Tuple, vec![]);
mem::replace(rhs, unit)
}, statement.source_info)
};
fn promote_candidate(mut self, candidate: Candidate) {
let span = self.promoted.span;
- let new_operand = Operand::Constant(Constant {
+ let new_operand = Operand::Constant(box Constant {
span: span,
ty: self.promoted.return_ty,
literal: Literal::Promoted {
}
Rvalue::Aggregate(ref kind, _) => {
- if let AggregateKind::Adt(def, ..) = *kind {
+ if let AggregateKind::Adt(def, ..) = **kind {
if def.has_dtor(self.tcx) {
self.add(Qualif::NEEDS_DROP);
self.deny_drop();
for block in mir.basic_blocks_mut() {
let terminator = block.terminator_mut();
terminator.kind = match terminator.kind {
- TerminatorKind::SwitchInt { discr: Operand::Constant(Constant {
+ TerminatorKind::SwitchInt { discr: Operand::Constant(box Constant {
literal: Literal::Value { ref value }, ..
}), ref values, ref targets, .. } => {
if let Some(ref constint) = value.to_const_int() {
continue
}
},
- TerminatorKind::Assert { target, cond: Operand::Constant(Constant {
+ TerminatorKind::Assert { target, cond: Operand::Constant(box Constant {
literal: Literal::Value {
value: ConstVal::Bool(cond)
}, ..
}
}
}
-
fn is_box_free(&self, operand: &Operand<'tcx>) -> bool {
match operand {
- &Operand::Constant(Constant {
+ &Operand::Constant(box Constant {
literal: Literal::Value {
value: ConstVal::Function(def_id, _), ..
}, ..
Rvalue::Aggregate(ref kind, ref _operands) => {
// AggregateKind is not distinguished by visit API, so
// record it. (`super_rvalue` handles `_operands`.)
- self.record(match *kind {
+ self.record(match **kind {
AggregateKind::Array(_) => "AggregateKind::Array",
AggregateKind::Tuple => "AggregateKind::Tuple",
AggregateKind::Adt(..) => "AggregateKind::Adt",
_ => false
}
} else {
- tcx.sess.cstore.is_foreign_item(def_id)
+ tcx.is_foreign_item(def_id)
};
if let Some(name) = weak_lang_items::link_name(&attrs) {
}
Some(_) => true,
None => {
- if tcx.sess.cstore.is_exported_symbol(def_id) ||
- tcx.sess.cstore.is_foreign_item(def_id)
+ if tcx.is_exported_symbol(def_id) ||
+ tcx.is_foreign_item(def_id)
{
// We can link to the item in question, no instance needed
// in this crate
llvm::set_thread_local(g, true);
}
}
- if ccx.use_dll_storage_attrs() && !ccx.sess().cstore.is_foreign_item(def_id) {
+ if ccx.use_dll_storage_attrs() && !ccx.tcx().is_foreign_item(def_id) {
// This item is external but not foreign, i.e. it originates from an external Rust
// crate. Since we don't know whether this crate will be linked dynamically or
// statically in the final application, we always mark such symbols as 'dllimport'.
location: Location) {
match *kind {
mir::TerminatorKind::Call {
- func: mir::Operand::Constant(mir::Constant {
+ func: mir::Operand::Constant(box mir::Constant {
literal: Literal::Value {
value: ConstVal::Function(def_id, _), ..
}, ..
}
failure?;
- match *kind {
+ match **kind {
mir::AggregateKind::Array(_) => {
self.const_array(dest_ty, &fields)
}
}
mir::Rvalue::Aggregate(ref kind, ref operands) => {
- match *kind {
+ match **kind {
mir::AggregateKind::Adt(adt_def, variant_index, substs, active_field_index) => {
let discr = adt_def.discriminant_for_variant(bcx.tcx(), variant_index)
.to_u128_unchecked() as u64;
use super::terms::*;
use super::terms::VarianceTerm::*;
-use super::xform::*;
pub struct ConstraintContext<'a, 'tcx: 'a> {
pub terms_cx: TermsContext<'a, 'tcx>,
use rustc::ty;
-pub trait Xform {
- fn xform(self, v: Self) -> Self;
-}
-
-impl Xform for ty::Variance {
- fn xform(self, v: ty::Variance) -> ty::Variance {
- // "Variance transformation", Figure 1 of The Paper
- match (self, v) {
- // Figure 1, column 1.
- (ty::Covariant, ty::Covariant) => ty::Covariant,
- (ty::Covariant, ty::Contravariant) => ty::Contravariant,
- (ty::Covariant, ty::Invariant) => ty::Invariant,
- (ty::Covariant, ty::Bivariant) => ty::Bivariant,
-
- // Figure 1, column 2.
- (ty::Contravariant, ty::Covariant) => ty::Contravariant,
- (ty::Contravariant, ty::Contravariant) => ty::Covariant,
- (ty::Contravariant, ty::Invariant) => ty::Invariant,
- (ty::Contravariant, ty::Bivariant) => ty::Bivariant,
-
- // Figure 1, column 3.
- (ty::Invariant, _) => ty::Invariant,
-
- // Figure 1, column 4.
- (ty::Bivariant, _) => ty::Bivariant,
- }
- }
-}
-
pub fn glb(v1: ty::Variance, v2: ty::Variance) -> ty::Variance {
// Greatest lower bound of the variance lattice as
// defined in The Paper:
let mut names = if cx.tcx.hir.as_local_node_id(did).is_some() {
vec![].into_iter()
} else {
- cx.tcx.sess.cstore.fn_arg_names(did).into_iter()
+ cx.tcx.fn_arg_names(did).into_iter()
}.peekable();
FnDecl {
output: Return(sig.skip_binder().output().clean(cx)),
#[stable(feature = "rust1", since = "1.0.0")]
pub use self::util::{copy, sink, Sink, empty, Empty, repeat, Repeat};
#[stable(feature = "rust1", since = "1.0.0")]
-pub use self::stdio::{stdin, stdout, stderr, _print, Stdin, Stdout, Stderr};
+pub use self::stdio::{stdin, stdout, stderr, Stdin, Stdout, Stderr};
#[stable(feature = "rust1", since = "1.0.0")]
pub use self::stdio::{StdoutLock, StderrLock, StdinLock};
+#[unstable(feature = "print_internals", issue = "0")]
+pub use self::stdio::{_print, _eprint};
#[unstable(feature = "libstd_io_internals", issue = "0")]
#[doc(no_inline, hidden)]
pub use self::stdio::{set_panic, set_print};
use sync::{Arc, Mutex, MutexGuard};
use sys::stdio;
use sys_common::remutex::{ReentrantMutex, ReentrantMutexGuard};
-use thread::LocalKeyState;
+use thread::{LocalKey, LocalKeyState};
/// Stdout used by print! and println! macros
thread_local! {
})
}
-#[unstable(feature = "print",
- reason = "implementation detail which may disappear or be replaced at any time",
- issue = "0")]
-#[doc(hidden)]
-pub fn _print(args: fmt::Arguments) {
- // As an implementation of the `println!` macro, we want to try our best to
- // not panic wherever possible and get the output somewhere. There are
- // currently two possible vectors for panics we take care of here:
- //
- // 1. If the TLS key for the local stdout has been destroyed, accessing it
- // would cause a panic. Note that we just lump in the uninitialized case
- // here for convenience, we're not trying to avoid a panic.
- // 2. If the local stdout is currently in use (e.g. we're in the middle of
- // already printing) then accessing again would cause a panic.
- //
- // If, however, the actual I/O causes an error, we do indeed panic.
- let result = match LOCAL_STDOUT.state() {
+/// Write `args` to output stream `local_s` if possible, `global_s`
+/// otherwise. `label` identifies the stream in a panic message.
+///
+/// This function is used to print error messages, so it takes extra
+/// care to avoid causing a panic when `local_stream` is unusable.
+/// For instance, if the TLS key for the local stream is uninitialized
+/// or already destroyed, or if the local stream is locked by another
+/// thread, it will just fall back to the global stream.
+///
+/// However, if the actual I/O causes an error, this function does panic.
+fn print_to<T>(args: fmt::Arguments,
+ local_s: &'static LocalKey<RefCell<Option<Box<Write+Send>>>>,
+ global_s: fn() -> T,
+ label: &str) where T: Write {
+ let result = match local_s.state() {
LocalKeyState::Uninitialized |
- LocalKeyState::Destroyed => stdout().write_fmt(args),
+ LocalKeyState::Destroyed => global_s().write_fmt(args),
LocalKeyState::Valid => {
- LOCAL_STDOUT.with(|s| {
+ local_s.with(|s| {
if let Ok(mut borrowed) = s.try_borrow_mut() {
if let Some(w) = borrowed.as_mut() {
return w.write_fmt(args);
}
}
- stdout().write_fmt(args)
+ global_s().write_fmt(args)
})
}
};
if let Err(e) = result {
- panic!("failed printing to stdout: {}", e);
+ panic!("failed printing to {}: {}", label, e);
}
}
+#[unstable(feature = "print_internals",
+ reason = "implementation detail which may disappear or be replaced at any time",
+ issue = "0")]
+#[doc(hidden)]
+pub fn _print(args: fmt::Arguments) {
+ print_to(args, &LOCAL_STDOUT, stdout, "stdout");
+}
+
+#[unstable(feature = "print_internals",
+ reason = "implementation detail which may disappear or be replaced at any time",
+ issue = "0")]
+#[doc(hidden)]
+pub fn _eprint(args: fmt::Arguments) {
+ use panicking::LOCAL_STDERR;
+ print_to(args, &LOCAL_STDERR, stderr, "stderr");
+}
+
#[cfg(test)]
mod tests {
use thread;
/// necessary to use `io::stdout().flush()` to ensure the output is emitted
/// immediately.
///
+/// Use `print!` only for the primary output of your program. Use
+/// `eprint!` instead to print error and progress messages.
+///
/// # Panics
///
/// Panics if writing to `io::stdout()` fails.
/// Use the `format!` syntax to write data to the standard output.
/// See `std::fmt` for more information.
///
+/// Use `println!` only for the primary output of your program. Use
+/// `eprintln!` instead to print error and progress messages.
+///
/// # Panics
///
-/// Panics if writing to `io::stdout()` fails.
+/// Panics if writing to `io::stdout` fails.
///
/// # Examples
///
($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, "\n"), $($arg)*));
}
+/// Macro for printing to the standard error.
+///
+/// Equivalent to the `print!` macro, except that output goes to
+/// `io::stderr` instead of `io::stdout`. See `print!` for
+/// example usage.
+///
+/// Use `eprint!` only for error and progress messages. Use `print!`
+/// instead for the primary output of your program.
+///
+/// # Panics
+///
+/// Panics if writing to `io::stderr` fails.
+#[macro_export]
+#[stable(feature = "eprint", since="1.18.0")]
+#[allow_internal_unstable]
+macro_rules! eprint {
+ ($($arg:tt)*) => ($crate::io::_eprint(format_args!($($arg)*)));
+}
+
+/// Macro for printing to the standard error, with a newline.
+///
+/// Equivalent to the `println!` macro, except that output goes to
+/// `io::stderr` instead of `io::stdout`. See `println!` for
+/// example usage.
+///
+/// Use `eprintln!` only for error and progress messages. Use `println!`
+/// instead for the primary output of your program.
+///
+/// # Panics
+///
+/// Panics if writing to `io::stderr` fails.
+#[macro_export]
+#[stable(feature = "eprint", since="1.18.0")]
+macro_rules! eprintln {
+ () => (eprint!("\n"));
+ ($fmt:expr) => (eprint!(concat!($fmt, "\n")));
+ ($fmt:expr, $($arg:tt)*) => (eprint!(concat!($fmt, "\n"), $($arg)*));
+}
+
/// A macro to select an event from a number of receivers.
///
/// This macro is used to wait for the first event to occur on a number of
//! ```
//! use std::path::PathBuf;
//!
+//! // This way works...
//! let mut path = PathBuf::from("c:\\");
+//!
//! path.push("windows");
//! path.push("system32");
+//!
//! path.set_extension("dll");
+//!
+//! // ... but push is best used if you don't know everything up
+//! // front. If you do, this way is better:
+//! let path: PathBuf = ["c:\\", "windows", "system32.dll"].iter().collect();
//! ```
//!
//! [`Component`]: ../../std/path/enum.Component.html
//! [`Path`]: ../../std/path/struct.Path.html
//! [`push`]: ../../std/path/struct.PathBuf.html#method.push
//! [`String`]: ../../std/string/struct.String.html
+//!
//! [`str`]: ../../std/primitive.str.html
//! [`OsString`]: ../../std/ffi/struct.OsString.html
//! [`OsStr`]: ../../std/ffi/struct.OsStr.html
///
/// # Examples
///
+/// You can use [`push`] to build up a `PathBuf` from
+/// components:
+///
/// ```
/// use std::path::PathBuf;
///
-/// let mut path = PathBuf::from("c:\\");
+/// let mut path = PathBuf::new();
+///
+/// path.push(r"C:\");
/// path.push("windows");
/// path.push("system32");
+///
/// path.set_extension("dll");
/// ```
+///
+/// However, [`push`] is best used for dynamic situations. This is a better way
+/// to do this when you know all of the components ahead of time:
+///
+/// ```
+/// use std::path::PathBuf;
+///
+/// let path: PathBuf = [r"C:\", "windows", "system32.dll"].iter().collect();
+/// ```
+///
+/// We can still do better than this! Since these are all strings, we can use
+/// `From::from`:
+///
+/// ```
+/// use std::path::PathBuf;
+///
+/// let path = PathBuf::from(r"C:\windows\system32.dll");
+/// ```
+///
+/// Which method works best depends on what kind of situation you're in.
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct PathBuf {
pub const INVALID_HANDLE_VALUE: HANDLE = !0 as HANDLE;
+pub const FACILITY_NT_BIT: DWORD = 0x1000_0000;
+
pub const FORMAT_MESSAGE_FROM_SYSTEM: DWORD = 0x00001000;
+pub const FORMAT_MESSAGE_FROM_HMODULE: DWORD = 0x00000800;
pub const FORMAT_MESSAGE_IGNORE_INSERTS: DWORD = 0x00000200;
pub const TLS_OUT_OF_INDEXES: DWORD = 0xFFFFFFFF;
}
/// Gets a detailed string description for the given error number.
-pub fn error_string(errnum: i32) -> String {
+pub fn error_string(mut errnum: i32) -> String {
// This value is calculated from the macro
// MAKELANGID(LANG_SYSTEM_DEFAULT, SUBLANG_SYS_DEFAULT)
let langId = 0x0800 as c::DWORD;
let mut buf = [0 as c::WCHAR; 2048];
unsafe {
- let res = c::FormatMessageW(c::FORMAT_MESSAGE_FROM_SYSTEM |
+ let mut module = ptr::null_mut();
+ let mut flags = 0;
+
+ // NTSTATUS errors may be encoded as HRESULT, which may returned from
+ // GetLastError. For more information about Windows error codes, see
+ // `[MS-ERREF]`: https://msdn.microsoft.com/en-us/library/cc231198.aspx
+ if (errnum & c::FACILITY_NT_BIT as i32) != 0 {
+ // format according to https://support.microsoft.com/en-us/help/259693
+ const NTDLL_DLL: &'static [u16] = &['N' as _, 'T' as _, 'D' as _, 'L' as _, 'L' as _,
+ '.' as _, 'D' as _, 'L' as _, 'L' as _, 0];
+ module = c::GetModuleHandleW(NTDLL_DLL.as_ptr());
+
+ if module != ptr::null_mut() {
+ errnum ^= c::FACILITY_NT_BIT as i32;
+ flags = c::FORMAT_MESSAGE_FROM_HMODULE;
+ }
+ }
+
+ let res = c::FormatMessageW(flags | c::FORMAT_MESSAGE_FROM_SYSTEM |
c::FORMAT_MESSAGE_IGNORE_INSERTS,
- ptr::null_mut(),
+ module,
errnum as c::DWORD,
langId,
buf.as_mut_ptr(),
pub fn exit(code: i32) -> ! {
unsafe { c::ExitProcess(code as c::UINT) }
}
+
+#[cfg(test)]
+mod tests {
+ use io::Error;
+ use sys::c;
+
+ // tests `error_string` above
+ #[test]
+ fn ntstatus_error() {
+ const STATUS_UNSUCCESSFUL: u32 = 0xc000_0001;
+ assert!(!Error::from_raw_os_error((STATUS_UNSUCCESSFUL | c::FACILITY_NT_BIT) as _)
+ .to_string().contains("FormatMessageW() returned error"));
+ }
+}
//! The [`thread::current`] function is available even for threads not spawned
//! by the APIs of this module.
//!
-//! ## Blocking support: park and unpark
-//!
-//! Every thread is equipped with some basic low-level blocking support, via the
-//! [`thread::park`][`park`] function and [`thread::Thread::unpark()`][`unpark`]
-//! method. [`park`] blocks the current thread, which can then be resumed from
-//! another thread by calling the [`unpark`] method on the blocked thread's handle.
-//!
-//! Conceptually, each [`Thread`] handle has an associated token, which is
-//! initially not present:
-//!
-//! * The [`thread::park`][`park`] function blocks the current thread unless or until
-//! the token is available for its thread handle, at which point it atomically
-//! consumes the token. It may also return *spuriously*, without consuming the
-//! token. [`thread::park_timeout`] does the same, but allows specifying a
-//! maximum time to block the thread for.
-//!
-//! * The [`unpark`] method on a [`Thread`] atomically makes the token available
-//! if it wasn't already.
-//!
-//! In other words, each [`Thread`] acts a bit like a semaphore with initial count
-//! 0, except that the semaphore is *saturating* (the count cannot go above 1),
-//! and can return spuriously.
-//!
-//! The API is typically used by acquiring a handle to the current thread,
-//! placing that handle in a shared data structure so that other threads can
-//! find it, and then `park`ing. When some desired condition is met, another
-//! thread calls [`unpark`] on the handle.
-//!
-//! The motivation for this design is twofold:
-//!
-//! * It avoids the need to allocate mutexes and condvars when building new
-//! synchronization primitives; the threads already provide basic blocking/signaling.
-//!
-//! * It can be implemented very efficiently on many platforms.
-//!
//! ## Thread-local storage
//!
//! This module also provides an implementation of thread-local storage for Rust
/// thread finishes). The join handle can be used to block on
/// termination of the child thread, including recovering its panics.
///
+ /// For a more complete documentation see [`thread::spawn`][`spawn`].
+ ///
/// # Errors
///
/// Unlike the [`spawn`] free function, this method yields an
/// panics, [`join`] will return an [`Err`] containing the argument given to
/// [`panic`].
///
+/// This will create a thread using default parameters of [`Builder`], if you
+/// want to specify the stack size or the name of the thread, use this API
+/// instead.
+///
/// # Panics
///
/// Panics if the OS fails to create a thread; use [`Builder::spawn`]
/// to recover from such errors.
///
-/// [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
-/// [`join`]: ../../std/thread/struct.JoinHandle.html#method.join
-/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
-/// [`panic`]: ../../std/macro.panic.html
-/// [`Builder::spawn`]: ../../std/thread/struct.Builder.html#method.spawn
-///
/// # Examples
///
+/// Creating a thread.
+///
/// ```
/// use std::thread;
///
///
/// handler.join().unwrap();
/// ```
+///
+/// As mentioned in the module documentation, threads are usually made to
+/// communicate using [`channels`], here is how it usually looks.
+///
+/// This example also shows how to use `move`, in order to give ownership
+/// of values to a thread.
+///
+/// ```
+/// use std::thread;
+/// use std::sync::mpsc::channel;
+///
+/// let (tx, rx) = channel();
+///
+/// let sender = thread::spawn(move || {
+/// let _ = tx.send("Hello, thread".to_owned());
+/// });
+///
+/// let receiver = thread::spawn(move || {
+/// println!("{}", rx.recv().unwrap());
+/// });
+///
+/// let _ = sender.join();
+/// let _ = receiver.join();
+/// ```
+///
+/// A thread can also return a value through its [`JoinHandle`], you can use
+/// this to make asynchronous computations (futures might be more appropriate
+/// though).
+///
+/// ```
+/// use std::thread;
+///
+/// let computation = thread::spawn(|| {
+/// // Some expensive computation.
+/// 42
+/// });
+///
+/// let result = computation.join().unwrap();
+/// println!("{}", result);
+/// ```
+///
+/// [`channels`]: ../../std/sync/mpsc/index.html
+/// [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
+/// [`join`]: ../../std/thread/struct.JoinHandle.html#method.join
+/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
+/// [`panic`]: ../../std/macro.panic.html
+/// [`Builder::spawn`]: ../../std/thread/struct.Builder.html#method.spawn
+/// [`Builder`]: ../../std/thread/struct.Builder.html
#[stable(feature = "rust1", since = "1.0.0")]
pub fn spawn<F, T>(f: F) -> JoinHandle<T> where
F: FnOnce() -> T, F: Send + 'static, T: Send + 'static
/// Blocks unless or until the current thread's token is made available.
///
-/// Every thread is equipped with some basic low-level blocking support, via
-/// the `park()` function and the [`unpark`][unpark] method. These can be
-/// used as a more CPU-efficient implementation of a spinlock.
+/// A call to `park` does not guarantee that the thread will remain parked
+/// forever, and callers should be prepared for this possibility.
///
-/// [unpark]: struct.Thread.html#method.unpark
+/// # park and unpark
+///
+/// Every thread is equipped with some basic low-level blocking support, via the
+/// [`thread::park`][`park`] function and [`thread::Thread::unpark`][`unpark`]
+/// method. [`park`] blocks the current thread, which can then be resumed from
+/// another thread by calling the [`unpark`] method on the blocked thread's
+/// handle.
+///
+/// Conceptually, each [`Thread`] handle has an associated token, which is
+/// initially not present:
+///
+/// * The [`thread::park`][`park`] function blocks the current thread unless or
+/// until the token is available for its thread handle, at which point it
+/// atomically consumes the token. It may also return *spuriously*, without
+/// consuming the token. [`thread::park_timeout`] does the same, but allows
+/// specifying a maximum time to block the thread for.
+///
+/// * The [`unpark`] method on a [`Thread`] atomically makes the token available
+/// if it wasn't already.
+///
+/// In other words, each [`Thread`] acts a bit like a spinlock that can be
+/// locked and unlocked using `park` and `unpark`.
///
/// The API is typically used by acquiring a handle to the current thread,
/// placing that handle in a shared data structure so that other threads can
-/// find it, and then parking (in a loop with a check for the token actually
-/// being acquired).
+/// find it, and then `park`ing. When some desired condition is met, another
+/// thread calls [`unpark`] on the handle.
///
-/// A call to `park` does not guarantee that the thread will remain parked
-/// forever, and callers should be prepared for this possibility.
+/// The motivation for this design is twofold:
///
-/// See the [module documentation][thread] for more detail.
+/// * It avoids the need to allocate mutexes and condvars when building new
+/// synchronization primitives; the threads already provide basic
+/// blocking/signaling.
///
-/// [thread]: index.html
+/// * It can be implemented very efficiently on many platforms.
+///
+/// # Examples
+///
+/// ```
+/// use std::thread;
+/// use std::time::Duration;
+///
+/// let parked_thread = thread::Builder::new()
+/// .spawn(|| {
+/// println!("Parking thread");
+/// thread::park();
+/// println!("Thread unparked");
+/// })
+/// .unwrap();
+///
+/// // Let some time pass for the thread to be spawned.
+/// thread::sleep(Duration::from_millis(10));
+///
+/// println!("Unpark the thread");
+/// parked_thread.thread().unpark();
+///
+/// parked_thread.join().unwrap();
+/// ```
+///
+/// [`Thread`]: ../../std/thread/struct.Thread.html
+/// [`park`]: ../../std/thread/fn.park.html
+/// [`unpark`]: ../../std/thread/struct.Thread.html#method.unpark
+/// [`thread::park_timeout`]: ../../std/thread/fn.park_timeout.html
//
// The implementation currently uses the trivial strategy of a Mutex+Condvar
// with wakeup flag, which does not actually allow spurious wakeups. In the
*guard = false;
}
-/// Use [park_timeout].
+/// Use [`park_timeout`].
///
/// Blocks unless or until the current thread's token is made available or
/// the specified duration has been reached (may wake spuriously).
///
-/// The semantics of this function are equivalent to `park()` except that the
-/// thread will be blocked for roughly no longer than `ms`. This method
-/// should not be used for precise timing due to anomalies such as
+/// The semantics of this function are equivalent to [`park`] except
+/// that the thread will be blocked for roughly no longer than `dur`. This
+/// method should not be used for precise timing due to anomalies such as
/// preemption or platform differences that may not cause the maximum
/// amount of time waited to be precisely `ms` long.
///
-/// See the [module documentation][thread] for more detail.
+/// See the [park documentation][`park`] for more detail.
///
-/// [thread]: index.html
-/// [park_timeout]: fn.park_timeout.html
+/// [`park_timeout`]: fn.park_timeout.html
+/// [`park`]: ../../std/thread/fn.park.html
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(since = "1.6.0", reason = "replaced by `std::thread::park_timeout`")]
pub fn park_timeout_ms(ms: u32) {
/// Blocks unless or until the current thread's token is made available or
/// the specified duration has been reached (may wake spuriously).
///
-/// The semantics of this function are equivalent to `park()` except that the
-/// thread will be blocked for roughly no longer than `dur`. This method
-/// should not be used for precise timing due to anomalies such as
+/// The semantics of this function are equivalent to [`park`][park] except
+/// that the thread will be blocked for roughly no longer than `dur`. This
+/// method should not be used for precise timing due to anomalies such as
/// preemption or platform differences that may not cause the maximum
/// amount of time waited to be precisely `dur` long.
///
-/// See the module doc for more detail.
+/// See the [park dococumentation][park] for more details.
///
/// # Platform behavior
///
/// park_timeout(timeout);
/// }
/// ```
+///
+/// [park]: fn.park.html
#[stable(feature = "park_timeout", since = "1.4.0")]
pub fn park_timeout(dur: Duration) {
let thread = current();
/// Atomically makes the handle's token available if it is not already.
///
- /// See the module doc for more detail.
+ /// Every thread is equipped with some basic low-level blocking support, via
+ /// the [`park`][park] function and the `unpark()` method. These can be
+ /// used as a more CPU-efficient implementation of a spinlock.
+ ///
+ /// See the [park documentation][park] for more details.
///
/// # Examples
///
/// ```
/// use std::thread;
+ /// use std::time::Duration;
///
- /// let handler = thread::Builder::new()
+ /// let parked_thread = thread::Builder::new()
/// .spawn(|| {
- /// let thread = thread::current();
- /// thread.unpark();
+ /// println!("Parking thread");
+ /// thread::park();
+ /// println!("Thread unparked");
/// })
/// .unwrap();
///
- /// handler.join().unwrap();
+ /// // Let some time pass for the thread to be spawned.
+ /// thread::sleep(Duration::from_millis(10));
+ ///
+ /// println!("Unpark the thread");
+ /// parked_thread.thread().unpark();
+ ///
+ /// parked_thread.join().unwrap();
/// ```
+ ///
+ /// [park]: fn.park.html
#[stable(feature = "rust1", since = "1.0.0")]
pub fn unpark(&self) {
let mut guard = self.inner.lock.lock().unwrap();
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags:-C target-feature=+crt-static
-// error-pattern: specifying the `crt-static` target feature is only allowed
-
-fn main() {}
// over time, but this test used to exhibit some pretty bogus messages
// that were not remotely helpful.
-// error-pattern:cannot infer
-// error-pattern:cannot outlive the lifetime 'a
-// error-pattern:must be valid for the static lifetime
+// error-pattern:the lifetime 'a
+// error-pattern:the static lifetime
struct Invariant<'a>(Option<&'a mut &'a mut ()>);
fn foo3<'a,'b>(x: &'a mut Dummy) -> &'b mut Dummy {
// Without knowing 'a:'b, we can't coerce
- x //~ ERROR cannot infer an appropriate lifetime
+ x //~ ERROR lifetime bound not satisfied
//~^ ERROR cannot infer an appropriate lifetime
}
-> Box<Get<&'min i32>>
where 'max : 'min
{
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
where 'max : 'min
{
// Previously OK:
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn main() { }
where 'max : 'min
{
// Previously OK, now an error as traits are invariant.
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
-> Box<Get<&'max i32>>
where 'max : 'min
{
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn main() { }
-> Box<Get<&'min i32>>
where 'max : 'min
{
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
-> Box<Get<&'max i32>>
where 'max : 'min
{
- v //~ ERROR cannot infer an appropriate lifetime
+ v //~ ERROR mismatched types
}
fn main() { }
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub fn print_hello() {
+ println!("hello");
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This test case makes sure that the compiler doesn't crash due to a failing
+// table lookup when a source file is removed.
+
+// revisions:rpass1 rpass2
+
+// Note that we specify -g so that the FileMaps actually get referenced by the
+// incr. comp. cache:
+// compile-flags: -Z query-dep-graph -g
+
+#[cfg(rpass1)]
+mod auxiliary;
+
+#[cfg(rpass1)]
+fn main() {
+ auxiliary::print_hello();
+}
+
+#[cfg(rpass2)]
+fn main() {
+ println!("hello");
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41677. The local variable was winding up with
+// a type `Receiver<?T, H>` where `?T` was unconstrained, because we
+// failed to enforce the WF obligations and `?T` is a bivariant type
+// parameter position.
+
+#![allow(unused_variables, dead_code)]
+
+use std::marker::PhantomData;
+
+trait Handle {
+ type Inner;
+}
+
+struct ResizingHandle<H>(PhantomData<H>);
+impl<H> Handle for ResizingHandle<H> {
+ type Inner = H;
+}
+
+struct Receiver<T, H: Handle<Inner=T>>(PhantomData<H>);
+
+fn channel<T>(size: usize) -> Receiver<T, ResizingHandle<T>> {
+ let rx = Receiver(PhantomData);
+ rx
+}
+
+fn main() {
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41849.
+
+use std::ops::Mul;
+
+const C: usize = 1;
+const CAPACITY: usize = 1 * C;
+
+struct A<X> {
+ f: [X; CAPACITY],
+}
+
+struct B<T> {
+ f: T,
+}
+
+impl<T> Mul for B<T> {
+ type Output = Self;
+ fn mul(self, _rhs: B<T>) -> Self::Output {
+ self
+ }
+}
+
+impl<T> Mul<usize> for B<T> {
+ type Output = Self;
+ fn mul(self, _rhs: usize) -> Self::Output {
+ self
+ }
+}
+
+fn main() {
+ let a = A { f: [1] };
+ let _ = B { f: a };
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-emscripten spawning processes is not supported
+
+use std::{env, process};
+
+fn child() {
+ print!("[stdout 0]");
+ print!("[stdout {}]", 1);
+ println!("[stdout {}]", 2);
+ println!();
+ eprint!("[stderr 0]");
+ eprint!("[stderr {}]", 1);
+ eprintln!("[stderr {}]", 2);
+ eprintln!();
+}
+
+fn parent() {
+ let this = env::args().next().unwrap();
+ let output = process::Command::new(this).arg("-").output().unwrap();
+ assert!(output.status.success());
+
+ let stdout = String::from_utf8(output.stdout).unwrap();
+ let stderr = String::from_utf8(output.stderr).unwrap();
+
+ assert_eq!(stdout, "[stdout 0][stdout 1][stdout 2]\n\n");
+ assert_eq!(stderr, "[stderr 0][stderr 1][stderr 2]\n\n");
+}
+
+fn main() {
+ if env::args().count() == 2 { child() } else { parent() }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub trait Arbitrary: Sized + 'static {}
+
+impl<'a, A: Clone> Arbitrary for ::std::borrow::Cow<'a, A> {}
+
+fn main() {
+}
\ No newline at end of file
--- /dev/null
+error[E0477]: the type `std::borrow::Cow<'a, A>` does not fulfill the required lifetime
+ --> $DIR/static-lifetime.rs:13:20
+ |
+13 | impl<'a, A: Clone> Arbitrary for ::std::borrow::Cow<'a, A> {}
+ | ^^^^^^^^^
+ |
+ = note: type must satisfy the static lifetime
+
+error: aborting due to previous error
+
format!("-command={}", debugger_script.to_str().unwrap())];
let mut gdb_path = tool_path;
- gdb_path.push_str(&format!("/bin/{}-gdb", self.config.target));
+ gdb_path.push_str("/bin/gdb");
let procsrv::Result {
out,
err,
exe_file.to_str().unwrap()
.replace(r"\", r"\\")));
+ // Force GDB to print values in the Rust format.
+ if self.config.gdb_native_rust {
+ script_str.push_str("set language rust\n");
+ }
+
// Add line breakpoints
for line in &breakpoint_lines {
script_str.push_str(&format!("break '{}':{}\n",
-Subproject commit 67babd2d63710444a3071dfd9184648fd85a6a3d
+Subproject commit 207c18da7d73faf0217fa433cce3a9d075f2fe25